##// END OF EJS Templates
setdiscovery: limit the size of all sample (issue4411)...
Pierre-Yves David -
r23130:ced63239 3.2 stable
parent child Browse files
Show More
@@ -1,237 +1,241 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from node import nullid
44 44 from i18n import _
45 45 import random
46 46 import util, dagutil
47 47
48 48 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
49 49 # if nodes is empty we scan the entire graph
50 50 if nodes:
51 51 heads = dag.headsetofconnecteds(nodes)
52 52 else:
53 53 heads = dag.heads()
54 54 dist = {}
55 55 visit = util.deque(heads)
56 56 seen = set()
57 57 factor = 1
58 58 while visit:
59 59 curr = visit.popleft()
60 60 if curr in seen:
61 61 continue
62 62 d = dist.setdefault(curr, 1)
63 63 if d > factor:
64 64 factor *= 2
65 65 if d == factor:
66 66 if curr not in always: # need this check for the early exit below
67 67 sample.add(curr)
68 68 if quicksamplesize and (len(sample) >= quicksamplesize):
69 69 return
70 70 seen.add(curr)
71 71 for p in dag.parents(curr):
72 72 if not nodes or p in nodes:
73 73 dist.setdefault(p, d + 1)
74 74 visit.append(p)
75 75
76 76 def _setupsample(dag, nodes, size):
77 77 if len(nodes) <= size:
78 78 return set(nodes), None, 0
79 79 always = dag.headsetofconnecteds(nodes)
80 80 desiredlen = size - len(always)
81 81 if desiredlen <= 0:
82 82 # This could be bad if there are very many heads, all unknown to the
83 83 # server. We're counting on long request support here.
84 84 return always, None, desiredlen
85 85 return always, set(), desiredlen
86 86
87 87 def _takequicksample(dag, nodes, size, initial):
88 88 always, sample, desiredlen = _setupsample(dag, nodes, size)
89 89 if sample is None:
90 90 return always
91 91 if initial:
92 92 fromset = None
93 93 else:
94 94 fromset = nodes
95 95 _updatesample(dag, fromset, sample, always, quicksamplesize=desiredlen)
96 96 sample.update(always)
97 97 return sample
98 98
99 99 def _takefullsample(dag, nodes, size):
100 100 always, sample, desiredlen = _setupsample(dag, nodes, size)
101 101 if sample is None:
102 102 return always
103 103 # update from heads
104 104 _updatesample(dag, nodes, sample, always)
105 105 # update from roots
106 106 _updatesample(dag.inverse(), nodes, sample, always)
107 107 assert sample
108 108 sample = _limitsample(sample, desiredlen)
109 109 if len(sample) < desiredlen:
110 110 more = desiredlen - len(sample)
111 111 sample.update(random.sample(list(nodes - sample - always), more))
112 112 sample.update(always)
113 113 return sample
114 114
115 115 def _limitsample(sample, desiredlen):
116 116 """return a random subset of sample of at most desiredlen item"""
117 117 if len(sample) > desiredlen:
118 118 sample = set(random.sample(sample, desiredlen))
119 119 return sample
120 120
121 121 def findcommonheads(ui, local, remote,
122 122 initialsamplesize=100,
123 123 fullsamplesize=200,
124 124 abortwhenunrelated=True):
125 125 '''Return a tuple (common, anyincoming, remoteheads) used to identify
126 126 missing nodes from or in remote.
127 127 '''
128 128 roundtrips = 0
129 129 cl = local.changelog
130 130 dag = dagutil.revlogdag(cl)
131 131
132 132 # early exit if we know all the specified remote heads already
133 133 ui.debug("query 1; heads\n")
134 134 roundtrips += 1
135 135 ownheads = dag.heads()
136 136 sample = _limitsample(ownheads, initialsamplesize)
137 137 if remote.local():
138 138 # stopgap until we have a proper localpeer that supports batch()
139 139 srvheadhashes = remote.heads()
140 140 yesno = remote.known(dag.externalizeall(sample))
141 141 elif remote.capable('batch'):
142 142 batch = remote.batch()
143 143 srvheadhashesref = batch.heads()
144 144 yesnoref = batch.known(dag.externalizeall(sample))
145 145 batch.submit()
146 146 srvheadhashes = srvheadhashesref.value
147 147 yesno = yesnoref.value
148 148 else:
149 149 # compatibility with pre-batch, but post-known remotes during 1.9
150 150 # development
151 151 srvheadhashes = remote.heads()
152 152 sample = []
153 153
154 154 if cl.tip() == nullid:
155 155 if srvheadhashes != [nullid]:
156 156 return [nullid], True, srvheadhashes
157 157 return [nullid], False, []
158 158
159 159 # start actual discovery (we note this before the next "if" for
160 160 # compatibility reasons)
161 161 ui.status(_("searching for changes\n"))
162 162
163 163 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
164 164 if len(srvheads) == len(srvheadhashes):
165 165 ui.debug("all remote heads known locally\n")
166 166 return (srvheadhashes, False, srvheadhashes,)
167 167
168 168 if sample and util.all(yesno):
169 169 ui.note(_("all local heads known remotely\n"))
170 170 ownheadhashes = dag.externalizeall(ownheads)
171 171 return (ownheadhashes, True, srvheadhashes,)
172 172
173 173 # full blown discovery
174 174
175 175 # own nodes where I don't know if remote knows them
176 176 undecided = dag.nodeset()
177 177 # own nodes I know we both know
178 178 common = set()
179 179 # own nodes I know remote lacks
180 180 missing = set()
181 181
182 182 # treat remote heads (and maybe own heads) as a first implicit sample
183 183 # response
184 184 common.update(dag.ancestorset(srvheads))
185 185 undecided.difference_update(common)
186 186
187 187 full = False
188 188 while undecided:
189 189
190 190 if sample:
191 191 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
192 192 common.update(dag.ancestorset(commoninsample, common))
193 193
194 194 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
195 195 missing.update(dag.descendantset(missinginsample, missing))
196 196
197 197 undecided.difference_update(missing)
198 198 undecided.difference_update(common)
199 199
200 200 if not undecided:
201 201 break
202 202
203 203 if full:
204 204 ui.note(_("sampling from both directions\n"))
205 205 sample = _takefullsample(dag, undecided, size=fullsamplesize)
206 targetsize = fullsamplesize
206 207 elif common:
207 208 # use cheapish initial sample
208 209 ui.debug("taking initial sample\n")
209 210 sample = _takefullsample(dag, undecided, size=fullsamplesize)
211 targetsize = fullsamplesize
210 212 else:
211 213 # use even cheaper initial sample
212 214 ui.debug("taking quick initial sample\n")
213 215 sample = _takequicksample(dag, undecided, size=initialsamplesize,
214 216 initial=True)
217 targetsize = initialsamplesize
218 sample = _limitsample(sample, targetsize)
215 219
216 220 roundtrips += 1
217 221 ui.progress(_('searching'), roundtrips, unit=_('queries'))
218 222 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
219 223 % (roundtrips, len(undecided), len(sample)))
220 224 # indices between sample and externalized version must match
221 225 sample = list(sample)
222 226 yesno = remote.known(dag.externalizeall(sample))
223 227 full = True
224 228
225 229 result = dag.headsetofconnecteds(common)
226 230 ui.progress(_('searching'), None)
227 231 ui.debug("%d total queries\n" % roundtrips)
228 232
229 233 if not result and srvheadhashes != [nullid]:
230 234 if abortwhenunrelated:
231 235 raise util.Abort(_("repository is unrelated"))
232 236 else:
233 237 ui.warn(_("warning: repository is unrelated\n"))
234 238 return (set([nullid]), True, srvheadhashes,)
235 239
236 240 anyincoming = (srvheadhashes != [nullid])
237 241 return dag.externalizeall(result), anyincoming, srvheadhashes
@@ -1,353 +1,359 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug
18 18 > echo
19 19 > echo "% -- b -> a tree"
20 20 > hg -R b debugdiscovery a --verbose --old
21 21 > echo
22 22 > echo "% -- b -> a set"
23 23 > hg -R b debugdiscovery a --verbose --debug
24 24 > cd ..
25 25 > }
26 26
27 27
28 28 Small superset:
29 29
30 30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 31 > +2:f +1:a1:b1
32 32 > <f +4 :a2
33 33 > +5 :b2
34 34 > <f +3 :b3'
35 35
36 36 % -- a -> b tree
37 37 comparing with b
38 38 searching for changes
39 39 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
40 40 common heads: 01241442b3c2 b5714e113bc0
41 41 local is subset
42 42
43 43 % -- a -> b set
44 44 comparing with b
45 45 query 1; heads
46 46 searching for changes
47 47 all local heads known remotely
48 48 common heads: 01241442b3c2 b5714e113bc0
49 49 local is subset
50 50
51 51 % -- b -> a tree
52 52 comparing with a
53 53 searching for changes
54 54 unpruned common: 01241442b3c2 b5714e113bc0
55 55 common heads: 01241442b3c2 b5714e113bc0
56 56 remote is subset
57 57
58 58 % -- b -> a set
59 59 comparing with a
60 60 query 1; heads
61 61 searching for changes
62 62 all remote heads known locally
63 63 common heads: 01241442b3c2 b5714e113bc0
64 64 remote is subset
65 65
66 66
67 67 Many new:
68 68
69 69 $ testdesc '-ra1 -ra2' '-rb' '
70 70 > +2:f +3:a1 +3:b
71 71 > <f +30 :a2'
72 72
73 73 % -- a -> b tree
74 74 comparing with b
75 75 searching for changes
76 76 unpruned common: bebd167eb94d
77 77 common heads: bebd167eb94d
78 78
79 79 % -- a -> b set
80 80 comparing with b
81 81 query 1; heads
82 82 searching for changes
83 83 taking initial sample
84 84 searching: 2 queries
85 85 query 2; still undecided: 29, sample size is: 29
86 86 2 total queries
87 87 common heads: bebd167eb94d
88 88
89 89 % -- b -> a tree
90 90 comparing with a
91 91 searching for changes
92 92 unpruned common: 66f7d451a68b bebd167eb94d
93 93 common heads: bebd167eb94d
94 94
95 95 % -- b -> a set
96 96 comparing with a
97 97 query 1; heads
98 98 searching for changes
99 99 taking initial sample
100 100 searching: 2 queries
101 101 query 2; still undecided: 2, sample size is: 2
102 102 2 total queries
103 103 common heads: bebd167eb94d
104 104
105 105
106 106 Both sides many new with stub:
107 107
108 108 $ testdesc '-ra1 -ra2' '-rb' '
109 109 > +2:f +2:a1 +30 :b
110 110 > <f +30 :a2'
111 111
112 112 % -- a -> b tree
113 113 comparing with b
114 114 searching for changes
115 115 unpruned common: 2dc09a01254d
116 116 common heads: 2dc09a01254d
117 117
118 118 % -- a -> b set
119 119 comparing with b
120 120 query 1; heads
121 121 searching for changes
122 122 taking initial sample
123 123 searching: 2 queries
124 124 query 2; still undecided: 29, sample size is: 29
125 125 2 total queries
126 126 common heads: 2dc09a01254d
127 127
128 128 % -- b -> a tree
129 129 comparing with a
130 130 searching for changes
131 131 unpruned common: 2dc09a01254d 66f7d451a68b
132 132 common heads: 2dc09a01254d
133 133
134 134 % -- b -> a set
135 135 comparing with a
136 136 query 1; heads
137 137 searching for changes
138 138 taking initial sample
139 139 searching: 2 queries
140 140 query 2; still undecided: 29, sample size is: 29
141 141 2 total queries
142 142 common heads: 2dc09a01254d
143 143
144 144
145 145 Both many new:
146 146
147 147 $ testdesc '-ra' '-rb' '
148 148 > +2:f +30 :b
149 149 > <f +30 :a'
150 150
151 151 % -- a -> b tree
152 152 comparing with b
153 153 searching for changes
154 154 unpruned common: 66f7d451a68b
155 155 common heads: 66f7d451a68b
156 156
157 157 % -- a -> b set
158 158 comparing with b
159 159 query 1; heads
160 160 searching for changes
161 161 taking quick initial sample
162 162 searching: 2 queries
163 163 query 2; still undecided: 31, sample size is: 31
164 164 2 total queries
165 165 common heads: 66f7d451a68b
166 166
167 167 % -- b -> a tree
168 168 comparing with a
169 169 searching for changes
170 170 unpruned common: 66f7d451a68b
171 171 common heads: 66f7d451a68b
172 172
173 173 % -- b -> a set
174 174 comparing with a
175 175 query 1; heads
176 176 searching for changes
177 177 taking quick initial sample
178 178 searching: 2 queries
179 179 query 2; still undecided: 31, sample size is: 31
180 180 2 total queries
181 181 common heads: 66f7d451a68b
182 182
183 183
184 184 Both many new skewed:
185 185
186 186 $ testdesc '-ra' '-rb' '
187 187 > +2:f +30 :b
188 188 > <f +50 :a'
189 189
190 190 % -- a -> b tree
191 191 comparing with b
192 192 searching for changes
193 193 unpruned common: 66f7d451a68b
194 194 common heads: 66f7d451a68b
195 195
196 196 % -- a -> b set
197 197 comparing with b
198 198 query 1; heads
199 199 searching for changes
200 200 taking quick initial sample
201 201 searching: 2 queries
202 202 query 2; still undecided: 51, sample size is: 51
203 203 2 total queries
204 204 common heads: 66f7d451a68b
205 205
206 206 % -- b -> a tree
207 207 comparing with a
208 208 searching for changes
209 209 unpruned common: 66f7d451a68b
210 210 common heads: 66f7d451a68b
211 211
212 212 % -- b -> a set
213 213 comparing with a
214 214 query 1; heads
215 215 searching for changes
216 216 taking quick initial sample
217 217 searching: 2 queries
218 218 query 2; still undecided: 31, sample size is: 31
219 219 2 total queries
220 220 common heads: 66f7d451a68b
221 221
222 222
223 223 Both many new on top of long history:
224 224
225 225 $ testdesc '-ra' '-rb' '
226 226 > +1000:f +30 :b
227 227 > <f +50 :a'
228 228
229 229 % -- a -> b tree
230 230 comparing with b
231 231 searching for changes
232 232 unpruned common: 7ead0cba2838
233 233 common heads: 7ead0cba2838
234 234
235 235 % -- a -> b set
236 236 comparing with b
237 237 query 1; heads
238 238 searching for changes
239 239 taking quick initial sample
240 240 searching: 2 queries
241 241 query 2; still undecided: 1049, sample size is: 11
242 242 sampling from both directions
243 243 searching: 3 queries
244 244 query 3; still undecided: 31, sample size is: 31
245 245 3 total queries
246 246 common heads: 7ead0cba2838
247 247
248 248 % -- b -> a tree
249 249 comparing with a
250 250 searching for changes
251 251 unpruned common: 7ead0cba2838
252 252 common heads: 7ead0cba2838
253 253
254 254 % -- b -> a set
255 255 comparing with a
256 256 query 1; heads
257 257 searching for changes
258 258 taking quick initial sample
259 259 searching: 2 queries
260 260 query 2; still undecided: 1029, sample size is: 11
261 261 sampling from both directions
262 262 searching: 3 queries
263 263 query 3; still undecided: 15, sample size is: 15
264 264 3 total queries
265 265 common heads: 7ead0cba2838
266 266
267 267
268 268 One with >200 heads, which used to use up all of the sample:
269 269
270 270 $ hg init manyheads
271 271 $ cd manyheads
272 272 $ echo "+300:r @a" >dagdesc
273 273 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 274 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 275 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 276 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 277 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 278 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 279 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 280 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 281 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 282 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 283 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 284 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 285 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
286 286 $ echo "@b *r+3" >>dagdesc # one more head
287 287 $ hg debugbuilddag <dagdesc
288 288 reading DAG from stdin
289 289
290 290 $ hg heads -t --template . | wc -c
291 291 \s*261 (re)
292 292
293 293 $ hg clone -b a . a
294 294 adding changesets
295 295 adding manifests
296 296 adding file changes
297 297 added 1340 changesets with 0 changes to 0 files (+259 heads)
298 298 updating to branch a
299 299 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
300 300 $ hg clone -b b . b
301 301 adding changesets
302 302 adding manifests
303 303 adding file changes
304 304 added 304 changesets with 0 changes to 0 files
305 305 updating to branch b
306 306 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
307 307
308 308 $ hg -R a debugdiscovery b --debug --verbose
309 309 comparing with b
310 310 query 1; heads
311 311 searching for changes
312 312 taking quick initial sample
313 313 searching: 2 queries
314 query 2; still undecided: 1240, sample size is: 260
314 query 2; still undecided: 1240, sample size is: 100
315 315 sampling from both directions
316 316 searching: 3 queries
317 query 3; still undecided: 980, sample size is: 260
317 query 3; still undecided: 1140, sample size is: 200
318 318 sampling from both directions
319 319 searching: 4 queries
320 query 4; still undecided: 720, sample size is: 260
320 query 4; still undecided: 940, sample size is: 200
321 321 sampling from both directions
322 322 searching: 5 queries
323 query 5; still undecided: 460, sample size is: 200
324 5 total queries
323 query 5; still undecided: 740, sample size is: 200
324 sampling from both directions
325 searching: 6 queries
326 query 6; still undecided: 540, sample size is: 200
327 sampling from both directions
328 searching: 7 queries
329 query 7; still undecided: 44, sample size is: 44
330 7 total queries
325 331 common heads: 3ee37d65064a
326 332
327 333 Test actual protocol when pulling one new head in addition to common heads
328 334
329 335 $ hg clone -U b c
330 336 $ hg -R c id -ir tip
331 337 513314ca8b3a
332 338 $ hg -R c up -qr default
333 339 $ touch c/f
334 340 $ hg -R c ci -Aqm "extra head"
335 341 $ hg -R c id -i
336 342 e64a39e7da8b
337 343
338 344 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
339 345 $ cat hg.pid >> $DAEMON_PIDS
340 346
341 347 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
342 348 comparing with http://localhost:$HGPORT/
343 349 searching for changes
344 350 e64a39e7da8b
345 351
346 352 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
347 353 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
348 354 "GET /?cmd=capabilities HTTP/1.1" 200 -
349 355 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db
350 356 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477
351 357 $ cat errors.log
352 358
353 359 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now