##// END OF EJS Templates
setdiscovery: randomly pick between heads and sample when taking full sample...
Pierre-Yves David -
r23810:b681d3a2 default
parent child Browse files
Show More
@@ -1,257 +1,257 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from node import nullid, nullrev
44 44 from i18n import _
45 45 import random
46 46 import util, dagutil
47 47
48 48 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
49 49 """update an existing sample to match the expected size
50 50
51 51 The sample is updated with nodes exponentially distant from each head of the
52 52 <nodes> set. (H~1, H~2, H~4, H~8, etc).
53 53
54 54 If a target size is specified, the sampling will stop once this size is
55 55 reached. Otherwise sampling will happen until roots of the <nodes> set are
56 56 reached.
57 57
58 58 :dag: a dag object from dagutil
59 59 :nodes: set of nodes we want to discover (if None, assume the whole dag)
60 60 :sample: a sample to update
61 61 :always: set of notable nodes that will be part of the sample anyway
62 62 :quicksamplesize: optional target size of the sample"""
63 63 # if nodes is empty we scan the entire graph
64 64 if nodes:
65 65 heads = dag.headsetofconnecteds(nodes)
66 66 else:
67 67 heads = dag.heads()
68 68 dist = {}
69 69 visit = util.deque(heads)
70 70 seen = set()
71 71 factor = 1
72 72 while visit:
73 73 curr = visit.popleft()
74 74 if curr in seen:
75 75 continue
76 76 d = dist.setdefault(curr, 1)
77 77 if d > factor:
78 78 factor *= 2
79 79 if d == factor:
80 80 if curr not in always: # need this check for the early exit below
81 81 sample.add(curr)
82 82 if quicksamplesize and (len(sample) >= quicksamplesize):
83 83 return
84 84 seen.add(curr)
85 85 for p in dag.parents(curr):
86 86 if not nodes or p in nodes:
87 87 dist.setdefault(p, d + 1)
88 88 visit.append(p)
89 89
90 90 def _setupsample(dag, nodes, size):
91 91 always = dag.headsetofconnecteds(nodes)
92 92 desiredlen = size - len(always)
93 93 if desiredlen <= 0:
94 94 # This could be bad if there are very many heads, all unknown to the
95 95 # server. We're counting on long request support here.
96 96 return always, None, desiredlen
97 97 return always, set(), desiredlen
98 98
99 99 def _takequicksample(dag, nodes, size):
100 100 always, sample, desiredlen = _setupsample(dag, nodes, size)
101 101 if sample is None:
102 102 return always
103 103 _updatesample(dag, None, sample, always, quicksamplesize=desiredlen)
104 104 sample.update(always)
105 105 return sample
106 106
107 107 def _takefullsample(dag, nodes, size):
108 108 always, sample, desiredlen = _setupsample(dag, nodes, size)
109 109 if sample is None:
110 110 return always
111 111 # update from heads
112 112 _updatesample(dag, nodes, sample, always)
113 113 # update from roots
114 114 _updatesample(dag.inverse(), nodes, sample, always)
115 115 assert sample
116 sample = _limitsample(sample, desiredlen)
117 if len(sample) < desiredlen:
118 more = desiredlen - len(sample)
119 sample.update(random.sample(list(nodes - sample - always), more))
120 116 sample.update(always)
117 sample = _limitsample(sample, size)
118 if len(sample) < size:
119 more = size - len(sample)
120 sample.update(random.sample(list(nodes - sample), more))
121 121 return sample
122 122
123 123 def _limitsample(sample, desiredlen):
124 124 """return a random subset of sample of at most desiredlen item"""
125 125 if len(sample) > desiredlen:
126 126 sample = set(random.sample(sample, desiredlen))
127 127 return sample
128 128
129 129 def findcommonheads(ui, local, remote,
130 130 initialsamplesize=100,
131 131 fullsamplesize=200,
132 132 abortwhenunrelated=True):
133 133 '''Return a tuple (common, anyincoming, remoteheads) used to identify
134 134 missing nodes from or in remote.
135 135 '''
136 136 roundtrips = 0
137 137 cl = local.changelog
138 138 dag = dagutil.revlogdag(cl)
139 139
140 140 # early exit if we know all the specified remote heads already
141 141 ui.debug("query 1; heads\n")
142 142 roundtrips += 1
143 143 ownheads = dag.heads()
144 144 sample = _limitsample(ownheads, initialsamplesize)
145 145 # indices between sample and externalized version must match
146 146 sample = list(sample)
147 147 if remote.local():
148 148 # stopgap until we have a proper localpeer that supports batch()
149 149 srvheadhashes = remote.heads()
150 150 yesno = remote.known(dag.externalizeall(sample))
151 151 elif remote.capable('batch'):
152 152 batch = remote.batch()
153 153 srvheadhashesref = batch.heads()
154 154 yesnoref = batch.known(dag.externalizeall(sample))
155 155 batch.submit()
156 156 srvheadhashes = srvheadhashesref.value
157 157 yesno = yesnoref.value
158 158 else:
159 159 # compatibility with pre-batch, but post-known remotes during 1.9
160 160 # development
161 161 srvheadhashes = remote.heads()
162 162 sample = []
163 163
164 164 if cl.tip() == nullid:
165 165 if srvheadhashes != [nullid]:
166 166 return [nullid], True, srvheadhashes
167 167 return [nullid], False, []
168 168
169 169 # start actual discovery (we note this before the next "if" for
170 170 # compatibility reasons)
171 171 ui.status(_("searching for changes\n"))
172 172
173 173 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
174 174 if len(srvheads) == len(srvheadhashes):
175 175 ui.debug("all remote heads known locally\n")
176 176 return (srvheadhashes, False, srvheadhashes,)
177 177
178 178 if sample and len(ownheads) <= initialsamplesize and util.all(yesno):
179 179 ui.note(_("all local heads known remotely\n"))
180 180 ownheadhashes = dag.externalizeall(ownheads)
181 181 return (ownheadhashes, True, srvheadhashes,)
182 182
183 183 # full blown discovery
184 184
185 185 # own nodes I know we both know
186 186 # treat remote heads (and maybe own heads) as a first implicit sample
187 187 # response
188 188 common = cl.incrementalmissingrevs(srvheads)
189 189 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
190 190 common.addbases(commoninsample)
191 191 # own nodes where I don't know if remote knows them
192 192 undecided = set(common.missingancestors(ownheads))
193 193 # own nodes I know remote lacks
194 194 missing = set()
195 195
196 196 full = False
197 197 while undecided:
198 198
199 199 if sample:
200 200 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
201 201 missing.update(dag.descendantset(missinginsample, missing))
202 202
203 203 undecided.difference_update(missing)
204 204
205 205 if not undecided:
206 206 break
207 207
208 208 if full or common.hasbases():
209 209 if full:
210 210 ui.note(_("sampling from both directions\n"))
211 211 else:
212 212 ui.debug("taking initial sample\n")
213 213 samplefunc = _takefullsample
214 214 targetsize = fullsamplesize
215 215 else:
216 216 # use even cheaper initial sample
217 217 ui.debug("taking quick initial sample\n")
218 218 samplefunc = _takequicksample
219 219 targetsize = initialsamplesize
220 220 if len(undecided) < targetsize:
221 221 sample = list(undecided)
222 222 else:
223 223 sample = samplefunc(dag, undecided, targetsize)
224 224 sample = _limitsample(sample, targetsize)
225 225
226 226 roundtrips += 1
227 227 ui.progress(_('searching'), roundtrips, unit=_('queries'))
228 228 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
229 229 % (roundtrips, len(undecided), len(sample)))
230 230 # indices between sample and externalized version must match
231 231 sample = list(sample)
232 232 yesno = remote.known(dag.externalizeall(sample))
233 233 full = True
234 234
235 235 if sample:
236 236 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
237 237 common.addbases(commoninsample)
238 238 common.removeancestorsfrom(undecided)
239 239
240 240 # heads(common) == heads(common.bases) since common represents common.bases
241 241 # and all its ancestors
242 242 result = dag.headsetofconnecteds(common.bases)
243 243 # common.bases can include nullrev, but our contract requires us to not
244 244 # return any heads in that case, so discard that
245 245 result.discard(nullrev)
246 246 ui.progress(_('searching'), None)
247 247 ui.debug("%d total queries\n" % roundtrips)
248 248
249 249 if not result and srvheadhashes != [nullid]:
250 250 if abortwhenunrelated:
251 251 raise util.Abort(_("repository is unrelated"))
252 252 else:
253 253 ui.warn(_("warning: repository is unrelated\n"))
254 254 return (set([nullid]), True, srvheadhashes,)
255 255
256 256 anyincoming = (srvheadhashes != [nullid])
257 257 return dag.externalizeall(result), anyincoming, srvheadhashes
@@ -1,409 +1,409 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug
18 18 > echo
19 19 > echo "% -- b -> a tree"
20 20 > hg -R b debugdiscovery a --verbose --old
21 21 > echo
22 22 > echo "% -- b -> a set"
23 23 > hg -R b debugdiscovery a --verbose --debug
24 24 > cd ..
25 25 > }
26 26
27 27
28 28 Small superset:
29 29
30 30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 31 > +2:f +1:a1:b1
32 32 > <f +4 :a2
33 33 > +5 :b2
34 34 > <f +3 :b3'
35 35
36 36 % -- a -> b tree
37 37 comparing with b
38 38 searching for changes
39 39 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
40 40 common heads: 01241442b3c2 b5714e113bc0
41 41 local is subset
42 42
43 43 % -- a -> b set
44 44 comparing with b
45 45 query 1; heads
46 46 searching for changes
47 47 all local heads known remotely
48 48 common heads: 01241442b3c2 b5714e113bc0
49 49 local is subset
50 50
51 51 % -- b -> a tree
52 52 comparing with a
53 53 searching for changes
54 54 unpruned common: 01241442b3c2 b5714e113bc0
55 55 common heads: 01241442b3c2 b5714e113bc0
56 56 remote is subset
57 57
58 58 % -- b -> a set
59 59 comparing with a
60 60 query 1; heads
61 61 searching for changes
62 62 all remote heads known locally
63 63 common heads: 01241442b3c2 b5714e113bc0
64 64 remote is subset
65 65
66 66
67 67 Many new:
68 68
69 69 $ testdesc '-ra1 -ra2' '-rb' '
70 70 > +2:f +3:a1 +3:b
71 71 > <f +30 :a2'
72 72
73 73 % -- a -> b tree
74 74 comparing with b
75 75 searching for changes
76 76 unpruned common: bebd167eb94d
77 77 common heads: bebd167eb94d
78 78
79 79 % -- a -> b set
80 80 comparing with b
81 81 query 1; heads
82 82 searching for changes
83 83 taking initial sample
84 84 searching: 2 queries
85 85 query 2; still undecided: 29, sample size is: 29
86 86 2 total queries
87 87 common heads: bebd167eb94d
88 88
89 89 % -- b -> a tree
90 90 comparing with a
91 91 searching for changes
92 92 unpruned common: 66f7d451a68b bebd167eb94d
93 93 common heads: bebd167eb94d
94 94
95 95 % -- b -> a set
96 96 comparing with a
97 97 query 1; heads
98 98 searching for changes
99 99 taking initial sample
100 100 searching: 2 queries
101 101 query 2; still undecided: 2, sample size is: 2
102 102 2 total queries
103 103 common heads: bebd167eb94d
104 104
105 105
106 106 Both sides many new with stub:
107 107
108 108 $ testdesc '-ra1 -ra2' '-rb' '
109 109 > +2:f +2:a1 +30 :b
110 110 > <f +30 :a2'
111 111
112 112 % -- a -> b tree
113 113 comparing with b
114 114 searching for changes
115 115 unpruned common: 2dc09a01254d
116 116 common heads: 2dc09a01254d
117 117
118 118 % -- a -> b set
119 119 comparing with b
120 120 query 1; heads
121 121 searching for changes
122 122 taking initial sample
123 123 searching: 2 queries
124 124 query 2; still undecided: 29, sample size is: 29
125 125 2 total queries
126 126 common heads: 2dc09a01254d
127 127
128 128 % -- b -> a tree
129 129 comparing with a
130 130 searching for changes
131 131 unpruned common: 2dc09a01254d 66f7d451a68b
132 132 common heads: 2dc09a01254d
133 133
134 134 % -- b -> a set
135 135 comparing with a
136 136 query 1; heads
137 137 searching for changes
138 138 taking initial sample
139 139 searching: 2 queries
140 140 query 2; still undecided: 29, sample size is: 29
141 141 2 total queries
142 142 common heads: 2dc09a01254d
143 143
144 144
145 145 Both many new:
146 146
147 147 $ testdesc '-ra' '-rb' '
148 148 > +2:f +30 :b
149 149 > <f +30 :a'
150 150
151 151 % -- a -> b tree
152 152 comparing with b
153 153 searching for changes
154 154 unpruned common: 66f7d451a68b
155 155 common heads: 66f7d451a68b
156 156
157 157 % -- a -> b set
158 158 comparing with b
159 159 query 1; heads
160 160 searching for changes
161 161 taking quick initial sample
162 162 searching: 2 queries
163 163 query 2; still undecided: 31, sample size is: 31
164 164 2 total queries
165 165 common heads: 66f7d451a68b
166 166
167 167 % -- b -> a tree
168 168 comparing with a
169 169 searching for changes
170 170 unpruned common: 66f7d451a68b
171 171 common heads: 66f7d451a68b
172 172
173 173 % -- b -> a set
174 174 comparing with a
175 175 query 1; heads
176 176 searching for changes
177 177 taking quick initial sample
178 178 searching: 2 queries
179 179 query 2; still undecided: 31, sample size is: 31
180 180 2 total queries
181 181 common heads: 66f7d451a68b
182 182
183 183
184 184 Both many new skewed:
185 185
186 186 $ testdesc '-ra' '-rb' '
187 187 > +2:f +30 :b
188 188 > <f +50 :a'
189 189
190 190 % -- a -> b tree
191 191 comparing with b
192 192 searching for changes
193 193 unpruned common: 66f7d451a68b
194 194 common heads: 66f7d451a68b
195 195
196 196 % -- a -> b set
197 197 comparing with b
198 198 query 1; heads
199 199 searching for changes
200 200 taking quick initial sample
201 201 searching: 2 queries
202 202 query 2; still undecided: 51, sample size is: 51
203 203 2 total queries
204 204 common heads: 66f7d451a68b
205 205
206 206 % -- b -> a tree
207 207 comparing with a
208 208 searching for changes
209 209 unpruned common: 66f7d451a68b
210 210 common heads: 66f7d451a68b
211 211
212 212 % -- b -> a set
213 213 comparing with a
214 214 query 1; heads
215 215 searching for changes
216 216 taking quick initial sample
217 217 searching: 2 queries
218 218 query 2; still undecided: 31, sample size is: 31
219 219 2 total queries
220 220 common heads: 66f7d451a68b
221 221
222 222
223 223 Both many new on top of long history:
224 224
225 225 $ testdesc '-ra' '-rb' '
226 226 > +1000:f +30 :b
227 227 > <f +50 :a'
228 228
229 229 % -- a -> b tree
230 230 comparing with b
231 231 searching for changes
232 232 unpruned common: 7ead0cba2838
233 233 common heads: 7ead0cba2838
234 234
235 235 % -- a -> b set
236 236 comparing with b
237 237 query 1; heads
238 238 searching for changes
239 239 taking quick initial sample
240 240 searching: 2 queries
241 241 query 2; still undecided: 1049, sample size is: 11
242 242 sampling from both directions
243 243 searching: 3 queries
244 244 query 3; still undecided: 31, sample size is: 31
245 245 3 total queries
246 246 common heads: 7ead0cba2838
247 247
248 248 % -- b -> a tree
249 249 comparing with a
250 250 searching for changes
251 251 unpruned common: 7ead0cba2838
252 252 common heads: 7ead0cba2838
253 253
254 254 % -- b -> a set
255 255 comparing with a
256 256 query 1; heads
257 257 searching for changes
258 258 taking quick initial sample
259 259 searching: 2 queries
260 260 query 2; still undecided: 1029, sample size is: 11
261 261 sampling from both directions
262 262 searching: 3 queries
263 263 query 3; still undecided: 15, sample size is: 15
264 264 3 total queries
265 265 common heads: 7ead0cba2838
266 266
267 267
268 268 One with >200 heads, which used to use up all of the sample:
269 269
270 270 $ hg init manyheads
271 271 $ cd manyheads
272 272 $ echo "+300:r @a" >dagdesc
273 273 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 274 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 275 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 276 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 277 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 278 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 279 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 280 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 281 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 282 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 283 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 284 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 285 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
286 286 $ echo "@b *r+3" >>dagdesc # one more head
287 287 $ hg debugbuilddag <dagdesc
288 288 reading DAG from stdin
289 289
290 290 $ hg heads -t --template . | wc -c
291 291 \s*261 (re)
292 292
293 293 $ hg clone -b a . a
294 294 adding changesets
295 295 adding manifests
296 296 adding file changes
297 297 added 1340 changesets with 0 changes to 0 files (+259 heads)
298 298 updating to branch a
299 299 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
300 300 $ hg clone -b b . b
301 301 adding changesets
302 302 adding manifests
303 303 adding file changes
304 304 added 304 changesets with 0 changes to 0 files
305 305 updating to branch b
306 306 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
307 307
308 308 $ hg -R a debugdiscovery b --debug --verbose
309 309 comparing with b
310 310 query 1; heads
311 311 searching for changes
312 312 taking quick initial sample
313 313 searching: 2 queries
314 314 query 2; still undecided: 1240, sample size is: 100
315 315 sampling from both directions
316 316 searching: 3 queries
317 317 query 3; still undecided: 1140, sample size is: 200
318 318 sampling from both directions
319 319 searching: 4 queries
320 320 query 4; still undecided: 940, sample size is: 200
321 321 sampling from both directions
322 322 searching: 5 queries
323 323 query 5; still undecided: 740, sample size is: 200
324 324 sampling from both directions
325 325 searching: 6 queries
326 326 query 6; still undecided: 540, sample size is: 200
327 327 sampling from both directions
328 328 searching: 7 queries
329 query 7; still undecided: 44, sample size is: 44
329 query 7; still undecided: 37, sample size is: 37
330 330 7 total queries
331 331 common heads: 3ee37d65064a
332 332
333 333 Test actual protocol when pulling one new head in addition to common heads
334 334
335 335 $ hg clone -U b c
336 336 $ hg -R c id -ir tip
337 337 513314ca8b3a
338 338 $ hg -R c up -qr default
339 339 $ touch c/f
340 340 $ hg -R c ci -Aqm "extra head"
341 341 $ hg -R c id -i
342 342 e64a39e7da8b
343 343
344 344 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
345 345 $ cat hg.pid >> $DAEMON_PIDS
346 346
347 347 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
348 348 comparing with http://localhost:$HGPORT/
349 349 searching for changes
350 350 e64a39e7da8b
351 351
352 352 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
353 353 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
354 354 "GET /?cmd=capabilities HTTP/1.1" 200 -
355 355 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db
356 356 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477
357 357 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
358 358 $ cat errors.log
359 359
360 360 $ cd ..
361 361
362 362
363 363 Issue 4438 - test coverage for 3ef893520a85 issues.
364 364
365 365 $ mkdir issue4438
366 366 $ cd issue4438
367 367 #if false
368 368 generate new bundles:
369 369 $ hg init r1
370 370 $ for i in `seq 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
371 371 $ hg clone -q r1 r2
372 372 $ for i in `seq 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
373 373 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
374 374 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
375 375 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
376 376 #else
377 377 use existing bundles:
378 378 $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1
379 379 $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2
380 380 #endif
381 381
382 382 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
383 383
384 384 $ hg -R r1 outgoing r2 -T'{rev} '
385 385 comparing with r2
386 386 searching for changes
387 387 101 102 103 104 105 106 107 108 109 110 (no-eol)
388 388
389 389 The case where all the 'initialsamplesize' samples already were common would
390 390 give 'all remote heads known locally' without checking the remaining heads -
391 391 fixed in 86c35b7ae300:
392 392
393 393 $ cat >> $TESTTMP/unrandomsample.py << EOF
394 394 > import random
395 395 > def sample(population, k):
396 396 > return sorted(population)[:k]
397 397 > random.sample = sample
398 398 > EOF
399 399
400 400 $ cat >> r1/.hg/hgrc << EOF
401 401 > [extensions]
402 402 > unrandomsample = $TESTTMP/unrandomsample.py
403 403 > EOF
404 404
405 405 $ hg -R r1 outgoing r2 -T'{rev} '
406 406 comparing with r2
407 407 searching for changes
408 408 101 102 103 104 105 106 107 108 109 110 (no-eol)
409 409 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now