##// END OF EJS Templates
merge with stable
Yuya Nishihara -
r40675:43752021 merge default
parent child Browse files
Show More
@@ -1,986 +1,987
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import itertools
44 import itertools
45 import json
45 import json
46 import operator
46 import operator
47 import re
47 import re
48
48
49 from mercurial.node import bin, nullid
49 from mercurial.node import bin, nullid
50 from mercurial.i18n import _
50 from mercurial.i18n import _
51 from mercurial import (
51 from mercurial import (
52 cmdutil,
52 cmdutil,
53 context,
53 context,
54 encoding,
54 encoding,
55 error,
55 error,
56 httpconnection as httpconnectionmod,
56 httpconnection as httpconnectionmod,
57 mdiff,
57 mdiff,
58 obsutil,
58 obsutil,
59 parser,
59 parser,
60 patch,
60 patch,
61 registrar,
61 registrar,
62 scmutil,
62 scmutil,
63 smartset,
63 smartset,
64 tags,
64 tags,
65 templateutil,
65 templateutil,
66 url as urlmod,
66 url as urlmod,
67 util,
67 util,
68 )
68 )
69 from mercurial.utils import (
69 from mercurial.utils import (
70 procutil,
70 procutil,
71 stringutil,
71 stringutil,
72 )
72 )
73
73
74 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
74 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
75 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
75 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
76 # be specifying the version(s) of Mercurial they are tested with, or
76 # be specifying the version(s) of Mercurial they are tested with, or
77 # leave the attribute unspecified.
77 # leave the attribute unspecified.
78 testedwith = 'ships-with-hg-core'
78 testedwith = 'ships-with-hg-core'
79
79
80 cmdtable = {}
80 cmdtable = {}
81 command = registrar.command(cmdtable)
81 command = registrar.command(cmdtable)
82
82
83 configtable = {}
83 configtable = {}
84 configitem = registrar.configitem(configtable)
84 configitem = registrar.configitem(configtable)
85
85
86 # developer config: phabricator.batchsize
86 # developer config: phabricator.batchsize
87 configitem(b'phabricator', b'batchsize',
87 configitem(b'phabricator', b'batchsize',
88 default=12,
88 default=12,
89 )
89 )
90 configitem(b'phabricator', b'callsign',
90 configitem(b'phabricator', b'callsign',
91 default=None,
91 default=None,
92 )
92 )
93 configitem(b'phabricator', b'curlcmd',
93 configitem(b'phabricator', b'curlcmd',
94 default=None,
94 default=None,
95 )
95 )
96 # developer config: phabricator.repophid
96 # developer config: phabricator.repophid
97 configitem(b'phabricator', b'repophid',
97 configitem(b'phabricator', b'repophid',
98 default=None,
98 default=None,
99 )
99 )
100 configitem(b'phabricator', b'url',
100 configitem(b'phabricator', b'url',
101 default=None,
101 default=None,
102 )
102 )
103 configitem(b'phabsend', b'confirm',
103 configitem(b'phabsend', b'confirm',
104 default=False,
104 default=False,
105 )
105 )
106
106
107 colortable = {
107 colortable = {
108 b'phabricator.action.created': b'green',
108 b'phabricator.action.created': b'green',
109 b'phabricator.action.skipped': b'magenta',
109 b'phabricator.action.skipped': b'magenta',
110 b'phabricator.action.updated': b'magenta',
110 b'phabricator.action.updated': b'magenta',
111 b'phabricator.desc': b'',
111 b'phabricator.desc': b'',
112 b'phabricator.drev': b'bold',
112 b'phabricator.drev': b'bold',
113 b'phabricator.node': b'',
113 b'phabricator.node': b'',
114 }
114 }
115
115
116 _VCR_FLAGS = [
116 _VCR_FLAGS = [
117 (b'', b'test-vcr', b'',
117 (b'', b'test-vcr', b'',
118 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
118 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
119 b', otherwise will mock all http requests using the specified vcr file.'
119 b', otherwise will mock all http requests using the specified vcr file.'
120 b' (ADVANCED)'
120 b' (ADVANCED)'
121 )),
121 )),
122 ]
122 ]
123
123
124 def vcrcommand(name, flags, spec):
124 def vcrcommand(name, flags, spec):
125 fullflags = flags + _VCR_FLAGS
125 fullflags = flags + _VCR_FLAGS
126 def decorate(fn):
126 def decorate(fn):
127 def inner(*args, **kwargs):
127 def inner(*args, **kwargs):
128 cassette = kwargs.pop(r'test_vcr', None)
128 cassette = kwargs.pop(r'test_vcr', None)
129 if cassette:
129 if cassette:
130 import hgdemandimport
130 import hgdemandimport
131 with hgdemandimport.deactivated():
131 with hgdemandimport.deactivated():
132 import vcr as vcrmod
132 import vcr as vcrmod
133 import vcr.stubs as stubs
133 import vcr.stubs as stubs
134 vcr = vcrmod.VCR(
134 vcr = vcrmod.VCR(
135 serializer=r'json',
135 serializer=r'json',
136 custom_patches=[
136 custom_patches=[
137 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
137 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
138 (urlmod, 'httpsconnection',
138 (urlmod, 'httpsconnection',
139 stubs.VCRHTTPSConnection),
139 stubs.VCRHTTPSConnection),
140 ])
140 ])
141 with vcr.use_cassette(cassette):
141 with vcr.use_cassette(cassette):
142 return fn(*args, **kwargs)
142 return fn(*args, **kwargs)
143 return fn(*args, **kwargs)
143 return fn(*args, **kwargs)
144 inner.__name__ = fn.__name__
144 inner.__name__ = fn.__name__
145 inner.__doc__ = fn.__doc__
145 return command(name, fullflags, spec)(inner)
146 return command(name, fullflags, spec)(inner)
146 return decorate
147 return decorate
147
148
148 def urlencodenested(params):
149 def urlencodenested(params):
149 """like urlencode, but works with nested parameters.
150 """like urlencode, but works with nested parameters.
150
151
151 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
152 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
152 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
153 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
153 urlencode. Note: the encoding is consistent with PHP's http_build_query.
154 urlencode. Note: the encoding is consistent with PHP's http_build_query.
154 """
155 """
155 flatparams = util.sortdict()
156 flatparams = util.sortdict()
156 def process(prefix, obj):
157 def process(prefix, obj):
157 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
158 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
158 if items is None:
159 if items is None:
159 flatparams[prefix] = obj
160 flatparams[prefix] = obj
160 else:
161 else:
161 for k, v in items(obj):
162 for k, v in items(obj):
162 if prefix:
163 if prefix:
163 process(b'%s[%s]' % (prefix, k), v)
164 process(b'%s[%s]' % (prefix, k), v)
164 else:
165 else:
165 process(k, v)
166 process(k, v)
166 process(b'', params)
167 process(b'', params)
167 return util.urlreq.urlencode(flatparams)
168 return util.urlreq.urlencode(flatparams)
168
169
169 def readurltoken(repo):
170 def readurltoken(repo):
170 """return conduit url, token and make sure they exist
171 """return conduit url, token and make sure they exist
171
172
172 Currently read from [auth] config section. In the future, it might
173 Currently read from [auth] config section. In the future, it might
173 make sense to read from .arcconfig and .arcrc as well.
174 make sense to read from .arcconfig and .arcrc as well.
174 """
175 """
175 url = repo.ui.config(b'phabricator', b'url')
176 url = repo.ui.config(b'phabricator', b'url')
176 if not url:
177 if not url:
177 raise error.Abort(_(b'config %s.%s is required')
178 raise error.Abort(_(b'config %s.%s is required')
178 % (b'phabricator', b'url'))
179 % (b'phabricator', b'url'))
179
180
180 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
181 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
181 token = None
182 token = None
182
183
183 if res:
184 if res:
184 group, auth = res
185 group, auth = res
185
186
186 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
187 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
187
188
188 token = auth.get(b'phabtoken')
189 token = auth.get(b'phabtoken')
189
190
190 if not token:
191 if not token:
191 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
192 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
192 % (url,))
193 % (url,))
193
194
194 return url, token
195 return url, token
195
196
196 def callconduit(repo, name, params):
197 def callconduit(repo, name, params):
197 """call Conduit API, params is a dict. return json.loads result, or None"""
198 """call Conduit API, params is a dict. return json.loads result, or None"""
198 host, token = readurltoken(repo)
199 host, token = readurltoken(repo)
199 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
200 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
200 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
201 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
201 params = params.copy()
202 params = params.copy()
202 params[b'api.token'] = token
203 params[b'api.token'] = token
203 data = urlencodenested(params)
204 data = urlencodenested(params)
204 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
205 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
205 if curlcmd:
206 if curlcmd:
206 sin, sout = procutil.popen2(b'%s -d @- %s'
207 sin, sout = procutil.popen2(b'%s -d @- %s'
207 % (curlcmd, procutil.shellquote(url)))
208 % (curlcmd, procutil.shellquote(url)))
208 sin.write(data)
209 sin.write(data)
209 sin.close()
210 sin.close()
210 body = sout.read()
211 body = sout.read()
211 else:
212 else:
212 urlopener = urlmod.opener(repo.ui, authinfo)
213 urlopener = urlmod.opener(repo.ui, authinfo)
213 request = util.urlreq.request(url, data=data)
214 request = util.urlreq.request(url, data=data)
214 body = urlopener.open(request).read()
215 body = urlopener.open(request).read()
215 repo.ui.debug(b'Conduit Response: %s\n' % body)
216 repo.ui.debug(b'Conduit Response: %s\n' % body)
216 parsed = json.loads(body)
217 parsed = json.loads(body)
217 if parsed.get(r'error_code'):
218 if parsed.get(r'error_code'):
218 msg = (_(b'Conduit Error (%s): %s')
219 msg = (_(b'Conduit Error (%s): %s')
219 % (parsed[r'error_code'], parsed[r'error_info']))
220 % (parsed[r'error_code'], parsed[r'error_info']))
220 raise error.Abort(msg)
221 raise error.Abort(msg)
221 return parsed[r'result']
222 return parsed[r'result']
222
223
223 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
224 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
224 def debugcallconduit(ui, repo, name):
225 def debugcallconduit(ui, repo, name):
225 """call Conduit API
226 """call Conduit API
226
227
227 Call parameters are read from stdin as a JSON blob. Result will be written
228 Call parameters are read from stdin as a JSON blob. Result will be written
228 to stdout as a JSON blob.
229 to stdout as a JSON blob.
229 """
230 """
230 params = json.loads(ui.fin.read())
231 params = json.loads(ui.fin.read())
231 result = callconduit(repo, name, params)
232 result = callconduit(repo, name, params)
232 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
233 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
233 ui.write(b'%s\n' % s)
234 ui.write(b'%s\n' % s)
234
235
235 def getrepophid(repo):
236 def getrepophid(repo):
236 """given callsign, return repository PHID or None"""
237 """given callsign, return repository PHID or None"""
237 # developer config: phabricator.repophid
238 # developer config: phabricator.repophid
238 repophid = repo.ui.config(b'phabricator', b'repophid')
239 repophid = repo.ui.config(b'phabricator', b'repophid')
239 if repophid:
240 if repophid:
240 return repophid
241 return repophid
241 callsign = repo.ui.config(b'phabricator', b'callsign')
242 callsign = repo.ui.config(b'phabricator', b'callsign')
242 if not callsign:
243 if not callsign:
243 return None
244 return None
244 query = callconduit(repo, b'diffusion.repository.search',
245 query = callconduit(repo, b'diffusion.repository.search',
245 {b'constraints': {b'callsigns': [callsign]}})
246 {b'constraints': {b'callsigns': [callsign]}})
246 if len(query[r'data']) == 0:
247 if len(query[r'data']) == 0:
247 return None
248 return None
248 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
249 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
249 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
250 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
250 return repophid
251 return repophid
251
252
252 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
253 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
253 _differentialrevisiondescre = re.compile(
254 _differentialrevisiondescre = re.compile(
254 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
255 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
255
256
256 def getoldnodedrevmap(repo, nodelist):
257 def getoldnodedrevmap(repo, nodelist):
257 """find previous nodes that has been sent to Phabricator
258 """find previous nodes that has been sent to Phabricator
258
259
259 return {node: (oldnode, Differential diff, Differential Revision ID)}
260 return {node: (oldnode, Differential diff, Differential Revision ID)}
260 for node in nodelist with known previous sent versions, or associated
261 for node in nodelist with known previous sent versions, or associated
261 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
262 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
262 be ``None``.
263 be ``None``.
263
264
264 Examines commit messages like "Differential Revision:" to get the
265 Examines commit messages like "Differential Revision:" to get the
265 association information.
266 association information.
266
267
267 If such commit message line is not found, examines all precursors and their
268 If such commit message line is not found, examines all precursors and their
268 tags. Tags with format like "D1234" are considered a match and the node
269 tags. Tags with format like "D1234" are considered a match and the node
269 with that tag, and the number after "D" (ex. 1234) will be returned.
270 with that tag, and the number after "D" (ex. 1234) will be returned.
270
271
271 The ``old node``, if not None, is guaranteed to be the last diff of
272 The ``old node``, if not None, is guaranteed to be the last diff of
272 corresponding Differential Revision, and exist in the repo.
273 corresponding Differential Revision, and exist in the repo.
273 """
274 """
274 url, token = readurltoken(repo)
275 url, token = readurltoken(repo)
275 unfi = repo.unfiltered()
276 unfi = repo.unfiltered()
276 nodemap = unfi.changelog.nodemap
277 nodemap = unfi.changelog.nodemap
277
278
278 result = {} # {node: (oldnode?, lastdiff?, drev)}
279 result = {} # {node: (oldnode?, lastdiff?, drev)}
279 toconfirm = {} # {node: (force, {precnode}, drev)}
280 toconfirm = {} # {node: (force, {precnode}, drev)}
280 for node in nodelist:
281 for node in nodelist:
281 ctx = unfi[node]
282 ctx = unfi[node]
282 # For tags like "D123", put them into "toconfirm" to verify later
283 # For tags like "D123", put them into "toconfirm" to verify later
283 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
284 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
284 for n in precnodes:
285 for n in precnodes:
285 if n in nodemap:
286 if n in nodemap:
286 for tag in unfi.nodetags(n):
287 for tag in unfi.nodetags(n):
287 m = _differentialrevisiontagre.match(tag)
288 m = _differentialrevisiontagre.match(tag)
288 if m:
289 if m:
289 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
290 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
290 continue
291 continue
291
292
292 # Check commit message
293 # Check commit message
293 m = _differentialrevisiondescre.search(ctx.description())
294 m = _differentialrevisiondescre.search(ctx.description())
294 if m:
295 if m:
295 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
296 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
296
297
297 # Double check if tags are genuine by collecting all old nodes from
298 # Double check if tags are genuine by collecting all old nodes from
298 # Phabricator, and expect precursors overlap with it.
299 # Phabricator, and expect precursors overlap with it.
299 if toconfirm:
300 if toconfirm:
300 drevs = [drev for force, precs, drev in toconfirm.values()]
301 drevs = [drev for force, precs, drev in toconfirm.values()]
301 alldiffs = callconduit(unfi, b'differential.querydiffs',
302 alldiffs = callconduit(unfi, b'differential.querydiffs',
302 {b'revisionIDs': drevs})
303 {b'revisionIDs': drevs})
303 getnode = lambda d: bin(encoding.unitolocal(
304 getnode = lambda d: bin(encoding.unitolocal(
304 getdiffmeta(d).get(r'node', b''))) or None
305 getdiffmeta(d).get(r'node', b''))) or None
305 for newnode, (force, precset, drev) in toconfirm.items():
306 for newnode, (force, precset, drev) in toconfirm.items():
306 diffs = [d for d in alldiffs.values()
307 diffs = [d for d in alldiffs.values()
307 if int(d[r'revisionID']) == drev]
308 if int(d[r'revisionID']) == drev]
308
309
309 # "precursors" as known by Phabricator
310 # "precursors" as known by Phabricator
310 phprecset = set(getnode(d) for d in diffs)
311 phprecset = set(getnode(d) for d in diffs)
311
312
312 # Ignore if precursors (Phabricator and local repo) do not overlap,
313 # Ignore if precursors (Phabricator and local repo) do not overlap,
313 # and force is not set (when commit message says nothing)
314 # and force is not set (when commit message says nothing)
314 if not force and not bool(phprecset & precset):
315 if not force and not bool(phprecset & precset):
315 tagname = b'D%d' % drev
316 tagname = b'D%d' % drev
316 tags.tag(repo, tagname, nullid, message=None, user=None,
317 tags.tag(repo, tagname, nullid, message=None, user=None,
317 date=None, local=True)
318 date=None, local=True)
318 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
319 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
319 b'Differential history\n') % drev)
320 b'Differential history\n') % drev)
320 continue
321 continue
321
322
322 # Find the last node using Phabricator metadata, and make sure it
323 # Find the last node using Phabricator metadata, and make sure it
323 # exists in the repo
324 # exists in the repo
324 oldnode = lastdiff = None
325 oldnode = lastdiff = None
325 if diffs:
326 if diffs:
326 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
327 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
327 oldnode = getnode(lastdiff)
328 oldnode = getnode(lastdiff)
328 if oldnode and oldnode not in nodemap:
329 if oldnode and oldnode not in nodemap:
329 oldnode = None
330 oldnode = None
330
331
331 result[newnode] = (oldnode, lastdiff, drev)
332 result[newnode] = (oldnode, lastdiff, drev)
332
333
333 return result
334 return result
334
335
335 def getdiff(ctx, diffopts):
336 def getdiff(ctx, diffopts):
336 """plain-text diff without header (user, commit message, etc)"""
337 """plain-text diff without header (user, commit message, etc)"""
337 output = util.stringio()
338 output = util.stringio()
338 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
339 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
339 None, opts=diffopts):
340 None, opts=diffopts):
340 output.write(chunk)
341 output.write(chunk)
341 return output.getvalue()
342 return output.getvalue()
342
343
343 def creatediff(ctx):
344 def creatediff(ctx):
344 """create a Differential Diff"""
345 """create a Differential Diff"""
345 repo = ctx.repo()
346 repo = ctx.repo()
346 repophid = getrepophid(repo)
347 repophid = getrepophid(repo)
347 # Create a "Differential Diff" via "differential.createrawdiff" API
348 # Create a "Differential Diff" via "differential.createrawdiff" API
348 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
349 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
349 if repophid:
350 if repophid:
350 params[b'repositoryPHID'] = repophid
351 params[b'repositoryPHID'] = repophid
351 diff = callconduit(repo, b'differential.createrawdiff', params)
352 diff = callconduit(repo, b'differential.createrawdiff', params)
352 if not diff:
353 if not diff:
353 raise error.Abort(_(b'cannot create diff for %s') % ctx)
354 raise error.Abort(_(b'cannot create diff for %s') % ctx)
354 return diff
355 return diff
355
356
356 def writediffproperties(ctx, diff):
357 def writediffproperties(ctx, diff):
357 """write metadata to diff so patches could be applied losslessly"""
358 """write metadata to diff so patches could be applied losslessly"""
358 params = {
359 params = {
359 b'diff_id': diff[r'id'],
360 b'diff_id': diff[r'id'],
360 b'name': b'hg:meta',
361 b'name': b'hg:meta',
361 b'data': json.dumps({
362 b'data': json.dumps({
362 b'user': ctx.user(),
363 b'user': ctx.user(),
363 b'date': b'%d %d' % ctx.date(),
364 b'date': b'%d %d' % ctx.date(),
364 b'node': ctx.hex(),
365 b'node': ctx.hex(),
365 b'parent': ctx.p1().hex(),
366 b'parent': ctx.p1().hex(),
366 }),
367 }),
367 }
368 }
368 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
369 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
369
370
370 params = {
371 params = {
371 b'diff_id': diff[r'id'],
372 b'diff_id': diff[r'id'],
372 b'name': b'local:commits',
373 b'name': b'local:commits',
373 b'data': json.dumps({
374 b'data': json.dumps({
374 ctx.hex(): {
375 ctx.hex(): {
375 b'author': stringutil.person(ctx.user()),
376 b'author': stringutil.person(ctx.user()),
376 b'authorEmail': stringutil.email(ctx.user()),
377 b'authorEmail': stringutil.email(ctx.user()),
377 b'time': ctx.date()[0],
378 b'time': ctx.date()[0],
378 },
379 },
379 }),
380 }),
380 }
381 }
381 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
382 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
382
383
383 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
384 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
384 olddiff=None, actions=None):
385 olddiff=None, actions=None):
385 """create or update a Differential Revision
386 """create or update a Differential Revision
386
387
387 If revid is None, create a new Differential Revision, otherwise update
388 If revid is None, create a new Differential Revision, otherwise update
388 revid. If parentrevid is not None, set it as a dependency.
389 revid. If parentrevid is not None, set it as a dependency.
389
390
390 If oldnode is not None, check if the patch content (without commit message
391 If oldnode is not None, check if the patch content (without commit message
391 and metadata) has changed before creating another diff.
392 and metadata) has changed before creating another diff.
392
393
393 If actions is not None, they will be appended to the transaction.
394 If actions is not None, they will be appended to the transaction.
394 """
395 """
395 repo = ctx.repo()
396 repo = ctx.repo()
396 if oldnode:
397 if oldnode:
397 diffopts = mdiff.diffopts(git=True, context=32767)
398 diffopts = mdiff.diffopts(git=True, context=32767)
398 oldctx = repo.unfiltered()[oldnode]
399 oldctx = repo.unfiltered()[oldnode]
399 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
400 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
400 else:
401 else:
401 neednewdiff = True
402 neednewdiff = True
402
403
403 transactions = []
404 transactions = []
404 if neednewdiff:
405 if neednewdiff:
405 diff = creatediff(ctx)
406 diff = creatediff(ctx)
406 transactions.append({b'type': b'update', b'value': diff[r'phid']})
407 transactions.append({b'type': b'update', b'value': diff[r'phid']})
407 else:
408 else:
408 # Even if we don't need to upload a new diff because the patch content
409 # Even if we don't need to upload a new diff because the patch content
409 # does not change. We might still need to update its metadata so
410 # does not change. We might still need to update its metadata so
410 # pushers could know the correct node metadata.
411 # pushers could know the correct node metadata.
411 assert olddiff
412 assert olddiff
412 diff = olddiff
413 diff = olddiff
413 writediffproperties(ctx, diff)
414 writediffproperties(ctx, diff)
414
415
415 # Use a temporary summary to set dependency. There might be better ways but
416 # Use a temporary summary to set dependency. There might be better ways but
416 # I cannot find them for now. But do not do that if we are updating an
417 # I cannot find them for now. But do not do that if we are updating an
417 # existing revision (revid is not None) since that introduces visible
418 # existing revision (revid is not None) since that introduces visible
418 # churns (someone edited "Summary" twice) on the web page.
419 # churns (someone edited "Summary" twice) on the web page.
419 if parentrevid and revid is None:
420 if parentrevid and revid is None:
420 summary = b'Depends on D%s' % parentrevid
421 summary = b'Depends on D%s' % parentrevid
421 transactions += [{b'type': b'summary', b'value': summary},
422 transactions += [{b'type': b'summary', b'value': summary},
422 {b'type': b'summary', b'value': b' '}]
423 {b'type': b'summary', b'value': b' '}]
423
424
424 if actions:
425 if actions:
425 transactions += actions
426 transactions += actions
426
427
427 # Parse commit message and update related fields.
428 # Parse commit message and update related fields.
428 desc = ctx.description()
429 desc = ctx.description()
429 info = callconduit(repo, b'differential.parsecommitmessage',
430 info = callconduit(repo, b'differential.parsecommitmessage',
430 {b'corpus': desc})
431 {b'corpus': desc})
431 for k, v in info[r'fields'].items():
432 for k, v in info[r'fields'].items():
432 if k in [b'title', b'summary', b'testPlan']:
433 if k in [b'title', b'summary', b'testPlan']:
433 transactions.append({b'type': k, b'value': v})
434 transactions.append({b'type': k, b'value': v})
434
435
435 params = {b'transactions': transactions}
436 params = {b'transactions': transactions}
436 if revid is not None:
437 if revid is not None:
437 # Update an existing Differential Revision
438 # Update an existing Differential Revision
438 params[b'objectIdentifier'] = revid
439 params[b'objectIdentifier'] = revid
439
440
440 revision = callconduit(repo, b'differential.revision.edit', params)
441 revision = callconduit(repo, b'differential.revision.edit', params)
441 if not revision:
442 if not revision:
442 raise error.Abort(_(b'cannot create revision for %s') % ctx)
443 raise error.Abort(_(b'cannot create revision for %s') % ctx)
443
444
444 return revision, diff
445 return revision, diff
445
446
446 def userphids(repo, names):
447 def userphids(repo, names):
447 """convert user names to PHIDs"""
448 """convert user names to PHIDs"""
448 query = {b'constraints': {b'usernames': names}}
449 query = {b'constraints': {b'usernames': names}}
449 result = callconduit(repo, b'user.search', query)
450 result = callconduit(repo, b'user.search', query)
450 # username not found is not an error of the API. So check if we have missed
451 # username not found is not an error of the API. So check if we have missed
451 # some names here.
452 # some names here.
452 data = result[r'data']
453 data = result[r'data']
453 resolved = set(entry[r'fields'][r'username'] for entry in data)
454 resolved = set(entry[r'fields'][r'username'] for entry in data)
454 unresolved = set(names) - resolved
455 unresolved = set(names) - resolved
455 if unresolved:
456 if unresolved:
456 raise error.Abort(_(b'unknown username: %s')
457 raise error.Abort(_(b'unknown username: %s')
457 % b' '.join(sorted(unresolved)))
458 % b' '.join(sorted(unresolved)))
458 return [entry[r'phid'] for entry in data]
459 return [entry[r'phid'] for entry in data]
459
460
460 @vcrcommand(b'phabsend',
461 @vcrcommand(b'phabsend',
461 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
462 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
462 (b'', b'amend', True, _(b'update commit messages')),
463 (b'', b'amend', True, _(b'update commit messages')),
463 (b'', b'reviewer', [], _(b'specify reviewers')),
464 (b'', b'reviewer', [], _(b'specify reviewers')),
464 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
465 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
465 _(b'REV [OPTIONS]'))
466 _(b'REV [OPTIONS]'))
466 def phabsend(ui, repo, *revs, **opts):
467 def phabsend(ui, repo, *revs, **opts):
467 """upload changesets to Phabricator
468 """upload changesets to Phabricator
468
469
469 If there are multiple revisions specified, they will be send as a stack
470 If there are multiple revisions specified, they will be send as a stack
470 with a linear dependencies relationship using the order specified by the
471 with a linear dependencies relationship using the order specified by the
471 revset.
472 revset.
472
473
473 For the first time uploading changesets, local tags will be created to
474 For the first time uploading changesets, local tags will be created to
474 maintain the association. After the first time, phabsend will check
475 maintain the association. After the first time, phabsend will check
475 obsstore and tags information so it can figure out whether to update an
476 obsstore and tags information so it can figure out whether to update an
476 existing Differential Revision, or create a new one.
477 existing Differential Revision, or create a new one.
477
478
478 If --amend is set, update commit messages so they have the
479 If --amend is set, update commit messages so they have the
479 ``Differential Revision`` URL, remove related tags. This is similar to what
480 ``Differential Revision`` URL, remove related tags. This is similar to what
480 arcanist will do, and is more desired in author-push workflows. Otherwise,
481 arcanist will do, and is more desired in author-push workflows. Otherwise,
481 use local tags to record the ``Differential Revision`` association.
482 use local tags to record the ``Differential Revision`` association.
482
483
483 The --confirm option lets you confirm changesets before sending them. You
484 The --confirm option lets you confirm changesets before sending them. You
484 can also add following to your configuration file to make it default
485 can also add following to your configuration file to make it default
485 behaviour::
486 behaviour::
486
487
487 [phabsend]
488 [phabsend]
488 confirm = true
489 confirm = true
489
490
490 phabsend will check obsstore and the above association to decide whether to
491 phabsend will check obsstore and the above association to decide whether to
491 update an existing Differential Revision, or create a new one.
492 update an existing Differential Revision, or create a new one.
492 """
493 """
493 revs = list(revs) + opts.get(b'rev', [])
494 revs = list(revs) + opts.get(b'rev', [])
494 revs = scmutil.revrange(repo, revs)
495 revs = scmutil.revrange(repo, revs)
495
496
496 if not revs:
497 if not revs:
497 raise error.Abort(_(b'phabsend requires at least one changeset'))
498 raise error.Abort(_(b'phabsend requires at least one changeset'))
498 if opts.get(b'amend'):
499 if opts.get(b'amend'):
499 cmdutil.checkunfinished(repo)
500 cmdutil.checkunfinished(repo)
500
501
501 # {newnode: (oldnode, olddiff, olddrev}
502 # {newnode: (oldnode, olddiff, olddrev}
502 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
503 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
503
504
504 confirm = ui.configbool(b'phabsend', b'confirm')
505 confirm = ui.configbool(b'phabsend', b'confirm')
505 confirm |= bool(opts.get(b'confirm'))
506 confirm |= bool(opts.get(b'confirm'))
506 if confirm:
507 if confirm:
507 confirmed = _confirmbeforesend(repo, revs, oldmap)
508 confirmed = _confirmbeforesend(repo, revs, oldmap)
508 if not confirmed:
509 if not confirmed:
509 raise error.Abort(_(b'phabsend cancelled'))
510 raise error.Abort(_(b'phabsend cancelled'))
510
511
511 actions = []
512 actions = []
512 reviewers = opts.get(b'reviewer', [])
513 reviewers = opts.get(b'reviewer', [])
513 if reviewers:
514 if reviewers:
514 phids = userphids(repo, reviewers)
515 phids = userphids(repo, reviewers)
515 actions.append({b'type': b'reviewers.add', b'value': phids})
516 actions.append({b'type': b'reviewers.add', b'value': phids})
516
517
517 drevids = [] # [int]
518 drevids = [] # [int]
518 diffmap = {} # {newnode: diff}
519 diffmap = {} # {newnode: diff}
519
520
520 # Send patches one by one so we know their Differential Revision IDs and
521 # Send patches one by one so we know their Differential Revision IDs and
521 # can provide dependency relationship
522 # can provide dependency relationship
522 lastrevid = None
523 lastrevid = None
523 for rev in revs:
524 for rev in revs:
524 ui.debug(b'sending rev %d\n' % rev)
525 ui.debug(b'sending rev %d\n' % rev)
525 ctx = repo[rev]
526 ctx = repo[rev]
526
527
527 # Get Differential Revision ID
528 # Get Differential Revision ID
528 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
529 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
529 if oldnode != ctx.node() or opts.get(b'amend'):
530 if oldnode != ctx.node() or opts.get(b'amend'):
530 # Create or update Differential Revision
531 # Create or update Differential Revision
531 revision, diff = createdifferentialrevision(
532 revision, diff = createdifferentialrevision(
532 ctx, revid, lastrevid, oldnode, olddiff, actions)
533 ctx, revid, lastrevid, oldnode, olddiff, actions)
533 diffmap[ctx.node()] = diff
534 diffmap[ctx.node()] = diff
534 newrevid = int(revision[r'object'][r'id'])
535 newrevid = int(revision[r'object'][r'id'])
535 if revid:
536 if revid:
536 action = b'updated'
537 action = b'updated'
537 else:
538 else:
538 action = b'created'
539 action = b'created'
539
540
540 # Create a local tag to note the association, if commit message
541 # Create a local tag to note the association, if commit message
541 # does not have it already
542 # does not have it already
542 m = _differentialrevisiondescre.search(ctx.description())
543 m = _differentialrevisiondescre.search(ctx.description())
543 if not m or int(m.group(b'id')) != newrevid:
544 if not m or int(m.group(b'id')) != newrevid:
544 tagname = b'D%d' % newrevid
545 tagname = b'D%d' % newrevid
545 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
546 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
546 date=None, local=True)
547 date=None, local=True)
547 else:
548 else:
548 # Nothing changed. But still set "newrevid" so the next revision
549 # Nothing changed. But still set "newrevid" so the next revision
549 # could depend on this one.
550 # could depend on this one.
550 newrevid = revid
551 newrevid = revid
551 action = b'skipped'
552 action = b'skipped'
552
553
553 actiondesc = ui.label(
554 actiondesc = ui.label(
554 {b'created': _(b'created'),
555 {b'created': _(b'created'),
555 b'skipped': _(b'skipped'),
556 b'skipped': _(b'skipped'),
556 b'updated': _(b'updated')}[action],
557 b'updated': _(b'updated')}[action],
557 b'phabricator.action.%s' % action)
558 b'phabricator.action.%s' % action)
558 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
559 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
559 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
560 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
560 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
561 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
561 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
562 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
562 desc))
563 desc))
563 drevids.append(newrevid)
564 drevids.append(newrevid)
564 lastrevid = newrevid
565 lastrevid = newrevid
565
566
566 # Update commit messages and remove tags
567 # Update commit messages and remove tags
567 if opts.get(b'amend'):
568 if opts.get(b'amend'):
568 unfi = repo.unfiltered()
569 unfi = repo.unfiltered()
569 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
570 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
570 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
571 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
571 wnode = unfi[b'.'].node()
572 wnode = unfi[b'.'].node()
572 mapping = {} # {oldnode: [newnode]}
573 mapping = {} # {oldnode: [newnode]}
573 for i, rev in enumerate(revs):
574 for i, rev in enumerate(revs):
574 old = unfi[rev]
575 old = unfi[rev]
575 drevid = drevids[i]
576 drevid = drevids[i]
576 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
577 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
577 newdesc = getdescfromdrev(drev)
578 newdesc = getdescfromdrev(drev)
578 newdesc = encoding.unitolocal(newdesc)
579 newdesc = encoding.unitolocal(newdesc)
579 # Make sure commit message contain "Differential Revision"
580 # Make sure commit message contain "Differential Revision"
580 if old.description() != newdesc:
581 if old.description() != newdesc:
581 parents = [
582 parents = [
582 mapping.get(old.p1().node(), (old.p1(),))[0],
583 mapping.get(old.p1().node(), (old.p1(),))[0],
583 mapping.get(old.p2().node(), (old.p2(),))[0],
584 mapping.get(old.p2().node(), (old.p2(),))[0],
584 ]
585 ]
585 new = context.metadataonlyctx(
586 new = context.metadataonlyctx(
586 repo, old, parents=parents, text=newdesc,
587 repo, old, parents=parents, text=newdesc,
587 user=old.user(), date=old.date(), extra=old.extra())
588 user=old.user(), date=old.date(), extra=old.extra())
588
589
589 newnode = new.commit()
590 newnode = new.commit()
590
591
591 mapping[old.node()] = [newnode]
592 mapping[old.node()] = [newnode]
592 # Update diff property
593 # Update diff property
593 writediffproperties(unfi[newnode], diffmap[old.node()])
594 writediffproperties(unfi[newnode], diffmap[old.node()])
594 # Remove local tags since it's no longer necessary
595 # Remove local tags since it's no longer necessary
595 tagname = b'D%d' % drevid
596 tagname = b'D%d' % drevid
596 if tagname in repo.tags():
597 if tagname in repo.tags():
597 tags.tag(repo, tagname, nullid, message=None, user=None,
598 tags.tag(repo, tagname, nullid, message=None, user=None,
598 date=None, local=True)
599 date=None, local=True)
599 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
600 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
600 if wnode in mapping:
601 if wnode in mapping:
601 unfi.setparents(mapping[wnode][0])
602 unfi.setparents(mapping[wnode][0])
602
603
603 # Map from "hg:meta" keys to header understood by "hg import". The order is
604 # Map from "hg:meta" keys to header understood by "hg import". The order is
604 # consistent with "hg export" output.
605 # consistent with "hg export" output.
605 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
606 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
606 (r'node', b'Node ID'), (r'parent', b'Parent ')])
607 (r'node', b'Node ID'), (r'parent', b'Parent ')])
607
608
608 def _confirmbeforesend(repo, revs, oldmap):
609 def _confirmbeforesend(repo, revs, oldmap):
609 url, token = readurltoken(repo)
610 url, token = readurltoken(repo)
610 ui = repo.ui
611 ui = repo.ui
611 for rev in revs:
612 for rev in revs:
612 ctx = repo[rev]
613 ctx = repo[rev]
613 desc = ctx.description().splitlines()[0]
614 desc = ctx.description().splitlines()[0]
614 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
615 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
615 if drevid:
616 if drevid:
616 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
617 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
617 else:
618 else:
618 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
619 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
619
620
620 ui.write(_(b'%s - %s: %s\n')
621 ui.write(_(b'%s - %s: %s\n')
621 % (drevdesc,
622 % (drevdesc,
622 ui.label(bytes(ctx), b'phabricator.node'),
623 ui.label(bytes(ctx), b'phabricator.node'),
623 ui.label(desc, b'phabricator.desc')))
624 ui.label(desc, b'phabricator.desc')))
624
625
625 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
626 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
626 b'$$ &Yes $$ &No') % url):
627 b'$$ &Yes $$ &No') % url):
627 return False
628 return False
628
629
629 return True
630 return True
630
631
631 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
632 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
632 b'abandoned'}
633 b'abandoned'}
633
634
634 def _getstatusname(drev):
635 def _getstatusname(drev):
635 """get normalized status name from a Differential Revision"""
636 """get normalized status name from a Differential Revision"""
636 return drev[r'statusName'].replace(b' ', b'').lower()
637 return drev[r'statusName'].replace(b' ', b'').lower()
637
638
638 # Small language to specify differential revisions. Support symbols: (), :X,
639 # Small language to specify differential revisions. Support symbols: (), :X,
639 # +, and -.
640 # +, and -.
640
641
641 _elements = {
642 _elements = {
642 # token-type: binding-strength, primary, prefix, infix, suffix
643 # token-type: binding-strength, primary, prefix, infix, suffix
643 b'(': (12, None, (b'group', 1, b')'), None, None),
644 b'(': (12, None, (b'group', 1, b')'), None, None),
644 b':': (8, None, (b'ancestors', 8), None, None),
645 b':': (8, None, (b'ancestors', 8), None, None),
645 b'&': (5, None, None, (b'and_', 5), None),
646 b'&': (5, None, None, (b'and_', 5), None),
646 b'+': (4, None, None, (b'add', 4), None),
647 b'+': (4, None, None, (b'add', 4), None),
647 b'-': (4, None, None, (b'sub', 4), None),
648 b'-': (4, None, None, (b'sub', 4), None),
648 b')': (0, None, None, None, None),
649 b')': (0, None, None, None, None),
649 b'symbol': (0, b'symbol', None, None, None),
650 b'symbol': (0, b'symbol', None, None, None),
650 b'end': (0, None, None, None, None),
651 b'end': (0, None, None, None, None),
651 }
652 }
652
653
653 def _tokenize(text):
654 def _tokenize(text):
654 view = memoryview(text) # zero-copy slice
655 view = memoryview(text) # zero-copy slice
655 special = b'():+-& '
656 special = b'():+-& '
656 pos = 0
657 pos = 0
657 length = len(text)
658 length = len(text)
658 while pos < length:
659 while pos < length:
659 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
660 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
660 view[pos:]))
661 view[pos:]))
661 if symbol:
662 if symbol:
662 yield (b'symbol', symbol, pos)
663 yield (b'symbol', symbol, pos)
663 pos += len(symbol)
664 pos += len(symbol)
664 else: # special char, ignore space
665 else: # special char, ignore space
665 if text[pos] != b' ':
666 if text[pos] != b' ':
666 yield (text[pos], None, pos)
667 yield (text[pos], None, pos)
667 pos += 1
668 pos += 1
668 yield (b'end', None, pos)
669 yield (b'end', None, pos)
669
670
670 def _parse(text):
671 def _parse(text):
671 tree, pos = parser.parser(_elements).parse(_tokenize(text))
672 tree, pos = parser.parser(_elements).parse(_tokenize(text))
672 if pos != len(text):
673 if pos != len(text):
673 raise error.ParseError(b'invalid token', pos)
674 raise error.ParseError(b'invalid token', pos)
674 return tree
675 return tree
675
676
676 def _parsedrev(symbol):
677 def _parsedrev(symbol):
677 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
678 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
678 if symbol.startswith(b'D') and symbol[1:].isdigit():
679 if symbol.startswith(b'D') and symbol[1:].isdigit():
679 return int(symbol[1:])
680 return int(symbol[1:])
680 if symbol.isdigit():
681 if symbol.isdigit():
681 return int(symbol)
682 return int(symbol)
682
683
683 def _prefetchdrevs(tree):
684 def _prefetchdrevs(tree):
684 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
685 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
685 drevs = set()
686 drevs = set()
686 ancestordrevs = set()
687 ancestordrevs = set()
687 op = tree[0]
688 op = tree[0]
688 if op == b'symbol':
689 if op == b'symbol':
689 r = _parsedrev(tree[1])
690 r = _parsedrev(tree[1])
690 if r:
691 if r:
691 drevs.add(r)
692 drevs.add(r)
692 elif op == b'ancestors':
693 elif op == b'ancestors':
693 r, a = _prefetchdrevs(tree[1])
694 r, a = _prefetchdrevs(tree[1])
694 drevs.update(r)
695 drevs.update(r)
695 ancestordrevs.update(r)
696 ancestordrevs.update(r)
696 ancestordrevs.update(a)
697 ancestordrevs.update(a)
697 else:
698 else:
698 for t in tree[1:]:
699 for t in tree[1:]:
699 r, a = _prefetchdrevs(t)
700 r, a = _prefetchdrevs(t)
700 drevs.update(r)
701 drevs.update(r)
701 ancestordrevs.update(a)
702 ancestordrevs.update(a)
702 return drevs, ancestordrevs
703 return drevs, ancestordrevs
703
704
704 def querydrev(repo, spec):
705 def querydrev(repo, spec):
705 """return a list of "Differential Revision" dicts
706 """return a list of "Differential Revision" dicts
706
707
707 spec is a string using a simple query language, see docstring in phabread
708 spec is a string using a simple query language, see docstring in phabread
708 for details.
709 for details.
709
710
710 A "Differential Revision dict" looks like:
711 A "Differential Revision dict" looks like:
711
712
712 {
713 {
713 "id": "2",
714 "id": "2",
714 "phid": "PHID-DREV-672qvysjcczopag46qty",
715 "phid": "PHID-DREV-672qvysjcczopag46qty",
715 "title": "example",
716 "title": "example",
716 "uri": "https://phab.example.com/D2",
717 "uri": "https://phab.example.com/D2",
717 "dateCreated": "1499181406",
718 "dateCreated": "1499181406",
718 "dateModified": "1499182103",
719 "dateModified": "1499182103",
719 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
720 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
720 "status": "0",
721 "status": "0",
721 "statusName": "Needs Review",
722 "statusName": "Needs Review",
722 "properties": [],
723 "properties": [],
723 "branch": null,
724 "branch": null,
724 "summary": "",
725 "summary": "",
725 "testPlan": "",
726 "testPlan": "",
726 "lineCount": "2",
727 "lineCount": "2",
727 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
728 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
728 "diffs": [
729 "diffs": [
729 "3",
730 "3",
730 "4",
731 "4",
731 ],
732 ],
732 "commits": [],
733 "commits": [],
733 "reviewers": [],
734 "reviewers": [],
734 "ccs": [],
735 "ccs": [],
735 "hashes": [],
736 "hashes": [],
736 "auxiliary": {
737 "auxiliary": {
737 "phabricator:projects": [],
738 "phabricator:projects": [],
738 "phabricator:depends-on": [
739 "phabricator:depends-on": [
739 "PHID-DREV-gbapp366kutjebt7agcd"
740 "PHID-DREV-gbapp366kutjebt7agcd"
740 ]
741 ]
741 },
742 },
742 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
743 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
743 "sourcePath": null
744 "sourcePath": null
744 }
745 }
745 """
746 """
746 def fetch(params):
747 def fetch(params):
747 """params -> single drev or None"""
748 """params -> single drev or None"""
748 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
749 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
749 if key in prefetched:
750 if key in prefetched:
750 return prefetched[key]
751 return prefetched[key]
751 drevs = callconduit(repo, b'differential.query', params)
752 drevs = callconduit(repo, b'differential.query', params)
752 # Fill prefetched with the result
753 # Fill prefetched with the result
753 for drev in drevs:
754 for drev in drevs:
754 prefetched[drev[r'phid']] = drev
755 prefetched[drev[r'phid']] = drev
755 prefetched[int(drev[r'id'])] = drev
756 prefetched[int(drev[r'id'])] = drev
756 if key not in prefetched:
757 if key not in prefetched:
757 raise error.Abort(_(b'cannot get Differential Revision %r')
758 raise error.Abort(_(b'cannot get Differential Revision %r')
758 % params)
759 % params)
759 return prefetched[key]
760 return prefetched[key]
760
761
761 def getstack(topdrevids):
762 def getstack(topdrevids):
762 """given a top, get a stack from the bottom, [id] -> [id]"""
763 """given a top, get a stack from the bottom, [id] -> [id]"""
763 visited = set()
764 visited = set()
764 result = []
765 result = []
765 queue = [{r'ids': [i]} for i in topdrevids]
766 queue = [{r'ids': [i]} for i in topdrevids]
766 while queue:
767 while queue:
767 params = queue.pop()
768 params = queue.pop()
768 drev = fetch(params)
769 drev = fetch(params)
769 if drev[r'id'] in visited:
770 if drev[r'id'] in visited:
770 continue
771 continue
771 visited.add(drev[r'id'])
772 visited.add(drev[r'id'])
772 result.append(int(drev[r'id']))
773 result.append(int(drev[r'id']))
773 auxiliary = drev.get(r'auxiliary', {})
774 auxiliary = drev.get(r'auxiliary', {})
774 depends = auxiliary.get(r'phabricator:depends-on', [])
775 depends = auxiliary.get(r'phabricator:depends-on', [])
775 for phid in depends:
776 for phid in depends:
776 queue.append({b'phids': [phid]})
777 queue.append({b'phids': [phid]})
777 result.reverse()
778 result.reverse()
778 return smartset.baseset(result)
779 return smartset.baseset(result)
779
780
780 # Initialize prefetch cache
781 # Initialize prefetch cache
781 prefetched = {} # {id or phid: drev}
782 prefetched = {} # {id or phid: drev}
782
783
783 tree = _parse(spec)
784 tree = _parse(spec)
784 drevs, ancestordrevs = _prefetchdrevs(tree)
785 drevs, ancestordrevs = _prefetchdrevs(tree)
785
786
786 # developer config: phabricator.batchsize
787 # developer config: phabricator.batchsize
787 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
788 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
788
789
789 # Prefetch Differential Revisions in batch
790 # Prefetch Differential Revisions in batch
790 tofetch = set(drevs)
791 tofetch = set(drevs)
791 for r in ancestordrevs:
792 for r in ancestordrevs:
792 tofetch.update(range(max(1, r - batchsize), r + 1))
793 tofetch.update(range(max(1, r - batchsize), r + 1))
793 if drevs:
794 if drevs:
794 fetch({r'ids': list(tofetch)})
795 fetch({r'ids': list(tofetch)})
795 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
796 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
796
797
797 # Walk through the tree, return smartsets
798 # Walk through the tree, return smartsets
798 def walk(tree):
799 def walk(tree):
799 op = tree[0]
800 op = tree[0]
800 if op == b'symbol':
801 if op == b'symbol':
801 drev = _parsedrev(tree[1])
802 drev = _parsedrev(tree[1])
802 if drev:
803 if drev:
803 return smartset.baseset([drev])
804 return smartset.baseset([drev])
804 elif tree[1] in _knownstatusnames:
805 elif tree[1] in _knownstatusnames:
805 drevs = [r for r in validids
806 drevs = [r for r in validids
806 if _getstatusname(prefetched[r]) == tree[1]]
807 if _getstatusname(prefetched[r]) == tree[1]]
807 return smartset.baseset(drevs)
808 return smartset.baseset(drevs)
808 else:
809 else:
809 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
810 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
810 elif op in {b'and_', b'add', b'sub'}:
811 elif op in {b'and_', b'add', b'sub'}:
811 assert len(tree) == 3
812 assert len(tree) == 3
812 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
813 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
813 elif op == b'group':
814 elif op == b'group':
814 return walk(tree[1])
815 return walk(tree[1])
815 elif op == b'ancestors':
816 elif op == b'ancestors':
816 return getstack(walk(tree[1]))
817 return getstack(walk(tree[1]))
817 else:
818 else:
818 raise error.ProgrammingError(b'illegal tree: %r' % tree)
819 raise error.ProgrammingError(b'illegal tree: %r' % tree)
819
820
820 return [prefetched[r] for r in walk(tree)]
821 return [prefetched[r] for r in walk(tree)]
821
822
822 def getdescfromdrev(drev):
823 def getdescfromdrev(drev):
823 """get description (commit message) from "Differential Revision"
824 """get description (commit message) from "Differential Revision"
824
825
825 This is similar to differential.getcommitmessage API. But we only care
826 This is similar to differential.getcommitmessage API. But we only care
826 about limited fields: title, summary, test plan, and URL.
827 about limited fields: title, summary, test plan, and URL.
827 """
828 """
828 title = drev[r'title']
829 title = drev[r'title']
829 summary = drev[r'summary'].rstrip()
830 summary = drev[r'summary'].rstrip()
830 testplan = drev[r'testPlan'].rstrip()
831 testplan = drev[r'testPlan'].rstrip()
831 if testplan:
832 if testplan:
832 testplan = b'Test Plan:\n%s' % testplan
833 testplan = b'Test Plan:\n%s' % testplan
833 uri = b'Differential Revision: %s' % drev[r'uri']
834 uri = b'Differential Revision: %s' % drev[r'uri']
834 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
835 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
835
836
836 def getdiffmeta(diff):
837 def getdiffmeta(diff):
837 """get commit metadata (date, node, user, p1) from a diff object
838 """get commit metadata (date, node, user, p1) from a diff object
838
839
839 The metadata could be "hg:meta", sent by phabsend, like:
840 The metadata could be "hg:meta", sent by phabsend, like:
840
841
841 "properties": {
842 "properties": {
842 "hg:meta": {
843 "hg:meta": {
843 "date": "1499571514 25200",
844 "date": "1499571514 25200",
844 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
845 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
845 "user": "Foo Bar <foo@example.com>",
846 "user": "Foo Bar <foo@example.com>",
846 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
847 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
847 }
848 }
848 }
849 }
849
850
850 Or converted from "local:commits", sent by "arc", like:
851 Or converted from "local:commits", sent by "arc", like:
851
852
852 "properties": {
853 "properties": {
853 "local:commits": {
854 "local:commits": {
854 "98c08acae292b2faf60a279b4189beb6cff1414d": {
855 "98c08acae292b2faf60a279b4189beb6cff1414d": {
855 "author": "Foo Bar",
856 "author": "Foo Bar",
856 "time": 1499546314,
857 "time": 1499546314,
857 "branch": "default",
858 "branch": "default",
858 "tag": "",
859 "tag": "",
859 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
860 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
860 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
861 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
861 "local": "1000",
862 "local": "1000",
862 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
863 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
863 "summary": "...",
864 "summary": "...",
864 "message": "...",
865 "message": "...",
865 "authorEmail": "foo@example.com"
866 "authorEmail": "foo@example.com"
866 }
867 }
867 }
868 }
868 }
869 }
869
870
870 Note: metadata extracted from "local:commits" will lose time zone
871 Note: metadata extracted from "local:commits" will lose time zone
871 information.
872 information.
872 """
873 """
873 props = diff.get(r'properties') or {}
874 props = diff.get(r'properties') or {}
874 meta = props.get(r'hg:meta')
875 meta = props.get(r'hg:meta')
875 if not meta and props.get(r'local:commits'):
876 if not meta and props.get(r'local:commits'):
876 commit = sorted(props[r'local:commits'].values())[0]
877 commit = sorted(props[r'local:commits'].values())[0]
877 meta = {
878 meta = {
878 r'date': r'%d 0' % commit[r'time'],
879 r'date': r'%d 0' % commit[r'time'],
879 r'node': commit[r'rev'],
880 r'node': commit[r'rev'],
880 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
881 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
881 }
882 }
882 if len(commit.get(r'parents', ())) >= 1:
883 if len(commit.get(r'parents', ())) >= 1:
883 meta[r'parent'] = commit[r'parents'][0]
884 meta[r'parent'] = commit[r'parents'][0]
884 return meta or {}
885 return meta or {}
885
886
886 def readpatch(repo, drevs, write):
887 def readpatch(repo, drevs, write):
887 """generate plain-text patch readable by 'hg import'
888 """generate plain-text patch readable by 'hg import'
888
889
889 write is usually ui.write. drevs is what "querydrev" returns, results of
890 write is usually ui.write. drevs is what "querydrev" returns, results of
890 "differential.query".
891 "differential.query".
891 """
892 """
892 # Prefetch hg:meta property for all diffs
893 # Prefetch hg:meta property for all diffs
893 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
894 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
894 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
895 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
895
896
896 # Generate patch for each drev
897 # Generate patch for each drev
897 for drev in drevs:
898 for drev in drevs:
898 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
899 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
899
900
900 diffid = max(int(v) for v in drev[r'diffs'])
901 diffid = max(int(v) for v in drev[r'diffs'])
901 body = callconduit(repo, b'differential.getrawdiff',
902 body = callconduit(repo, b'differential.getrawdiff',
902 {b'diffID': diffid})
903 {b'diffID': diffid})
903 desc = getdescfromdrev(drev)
904 desc = getdescfromdrev(drev)
904 header = b'# HG changeset patch\n'
905 header = b'# HG changeset patch\n'
905
906
906 # Try to preserve metadata from hg:meta property. Write hg patch
907 # Try to preserve metadata from hg:meta property. Write hg patch
907 # headers that can be read by the "import" command. See patchheadermap
908 # headers that can be read by the "import" command. See patchheadermap
908 # and extract in mercurial/patch.py for supported headers.
909 # and extract in mercurial/patch.py for supported headers.
909 meta = getdiffmeta(diffs[str(diffid)])
910 meta = getdiffmeta(diffs[str(diffid)])
910 for k in _metanamemap.keys():
911 for k in _metanamemap.keys():
911 if k in meta:
912 if k in meta:
912 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
913 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
913
914
914 content = b'%s%s\n%s' % (header, desc, body)
915 content = b'%s%s\n%s' % (header, desc, body)
915 write(encoding.unitolocal(content))
916 write(encoding.unitolocal(content))
916
917
917 @vcrcommand(b'phabread',
918 @vcrcommand(b'phabread',
918 [(b'', b'stack', False, _(b'read dependencies'))],
919 [(b'', b'stack', False, _(b'read dependencies'))],
919 _(b'DREVSPEC [OPTIONS]'))
920 _(b'DREVSPEC [OPTIONS]'))
920 def phabread(ui, repo, spec, **opts):
921 def phabread(ui, repo, spec, **opts):
921 """print patches from Phabricator suitable for importing
922 """print patches from Phabricator suitable for importing
922
923
923 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
924 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
924 the number ``123``. It could also have common operators like ``+``, ``-``,
925 the number ``123``. It could also have common operators like ``+``, ``-``,
925 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
926 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
926 select a stack.
927 select a stack.
927
928
928 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
929 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
929 could be used to filter patches by status. For performance reason, they
930 could be used to filter patches by status. For performance reason, they
930 only represent a subset of non-status selections and cannot be used alone.
931 only represent a subset of non-status selections and cannot be used alone.
931
932
932 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
933 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
933 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
934 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
934 stack up to D9.
935 stack up to D9.
935
936
936 If --stack is given, follow dependencies information and read all patches.
937 If --stack is given, follow dependencies information and read all patches.
937 It is equivalent to the ``:`` operator.
938 It is equivalent to the ``:`` operator.
938 """
939 """
939 if opts.get(b'stack'):
940 if opts.get(b'stack'):
940 spec = b':(%s)' % spec
941 spec = b':(%s)' % spec
941 drevs = querydrev(repo, spec)
942 drevs = querydrev(repo, spec)
942 readpatch(repo, drevs, ui.write)
943 readpatch(repo, drevs, ui.write)
943
944
944 @vcrcommand(b'phabupdate',
945 @vcrcommand(b'phabupdate',
945 [(b'', b'accept', False, _(b'accept revisions')),
946 [(b'', b'accept', False, _(b'accept revisions')),
946 (b'', b'reject', False, _(b'reject revisions')),
947 (b'', b'reject', False, _(b'reject revisions')),
947 (b'', b'abandon', False, _(b'abandon revisions')),
948 (b'', b'abandon', False, _(b'abandon revisions')),
948 (b'', b'reclaim', False, _(b'reclaim revisions')),
949 (b'', b'reclaim', False, _(b'reclaim revisions')),
949 (b'm', b'comment', b'', _(b'comment on the last revision')),
950 (b'm', b'comment', b'', _(b'comment on the last revision')),
950 ], _(b'DREVSPEC [OPTIONS]'))
951 ], _(b'DREVSPEC [OPTIONS]'))
951 def phabupdate(ui, repo, spec, **opts):
952 def phabupdate(ui, repo, spec, **opts):
952 """update Differential Revision in batch
953 """update Differential Revision in batch
953
954
954 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
955 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
955 """
956 """
956 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
957 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
957 if len(flags) > 1:
958 if len(flags) > 1:
958 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
959 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
959
960
960 actions = []
961 actions = []
961 for f in flags:
962 for f in flags:
962 actions.append({b'type': f, b'value': b'true'})
963 actions.append({b'type': f, b'value': b'true'})
963
964
964 drevs = querydrev(repo, spec)
965 drevs = querydrev(repo, spec)
965 for i, drev in enumerate(drevs):
966 for i, drev in enumerate(drevs):
966 if i + 1 == len(drevs) and opts.get(b'comment'):
967 if i + 1 == len(drevs) and opts.get(b'comment'):
967 actions.append({b'type': b'comment', b'value': opts[b'comment']})
968 actions.append({b'type': b'comment', b'value': opts[b'comment']})
968 if actions:
969 if actions:
969 params = {b'objectIdentifier': drev[r'phid'],
970 params = {b'objectIdentifier': drev[r'phid'],
970 b'transactions': actions}
971 b'transactions': actions}
971 callconduit(repo, b'differential.revision.edit', params)
972 callconduit(repo, b'differential.revision.edit', params)
972
973
973 templatekeyword = registrar.templatekeyword()
974 templatekeyword = registrar.templatekeyword()
974
975
975 @templatekeyword(b'phabreview', requires={b'ctx'})
976 @templatekeyword(b'phabreview', requires={b'ctx'})
976 def template_review(context, mapping):
977 def template_review(context, mapping):
977 """:phabreview: Object describing the review for this changeset.
978 """:phabreview: Object describing the review for this changeset.
978 Has attributes `url` and `id`.
979 Has attributes `url` and `id`.
979 """
980 """
980 ctx = context.resource(mapping, b'ctx')
981 ctx = context.resource(mapping, b'ctx')
981 m = _differentialrevisiondescre.search(ctx.description())
982 m = _differentialrevisiondescre.search(ctx.description())
982 if m:
983 if m:
983 return templateutil.hybriddict({
984 return templateutil.hybriddict({
984 b'url': m.group(b'url'),
985 b'url': m.group(b'url'),
985 b'id': b"D{}".format(m.group(b'id')),
986 b'id': b"D{}".format(m.group(b'id')),
986 })
987 })
@@ -1,1229 +1,1229
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import shutil
14 import shutil
15 import stat
15 import stat
16
16
17 from .i18n import _
17 from .i18n import _
18 from .node import (
18 from .node import (
19 nullid,
19 nullid,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 bookmarks,
23 bookmarks,
24 bundlerepo,
24 bundlerepo,
25 cacheutil,
25 cacheutil,
26 cmdutil,
26 cmdutil,
27 destutil,
27 destutil,
28 discovery,
28 discovery,
29 error,
29 error,
30 exchange,
30 exchange,
31 extensions,
31 extensions,
32 httppeer,
32 httppeer,
33 localrepo,
33 localrepo,
34 lock,
34 lock,
35 logcmdutil,
35 logcmdutil,
36 logexchange,
36 logexchange,
37 merge as mergemod,
37 merge as mergemod,
38 narrowspec,
38 narrowspec,
39 node,
39 node,
40 phases,
40 phases,
41 scmutil,
41 scmutil,
42 sshpeer,
42 sshpeer,
43 statichttprepo,
43 statichttprepo,
44 ui as uimod,
44 ui as uimod,
45 unionrepo,
45 unionrepo,
46 url,
46 url,
47 util,
47 util,
48 verify as verifymod,
48 verify as verifymod,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51
51
52 release = lock.release
52 release = lock.release
53
53
54 # shared features
54 # shared features
55 sharedbookmarks = 'bookmarks'
55 sharedbookmarks = 'bookmarks'
56
56
57 def _local(path):
57 def _local(path):
58 path = util.expandpath(util.urllocalpath(path))
58 path = util.expandpath(util.urllocalpath(path))
59 return (os.path.isfile(path) and bundlerepo or localrepo)
59 return (os.path.isfile(path) and bundlerepo or localrepo)
60
60
61 def addbranchrevs(lrepo, other, branches, revs):
61 def addbranchrevs(lrepo, other, branches, revs):
62 peer = other.peer() # a courtesy to callers using a localrepo for other
62 peer = other.peer() # a courtesy to callers using a localrepo for other
63 hashbranch, branches = branches
63 hashbranch, branches = branches
64 if not hashbranch and not branches:
64 if not hashbranch and not branches:
65 x = revs or None
65 x = revs or None
66 if revs:
66 if revs:
67 y = revs[0]
67 y = revs[0]
68 else:
68 else:
69 y = None
69 y = None
70 return x, y
70 return x, y
71 if revs:
71 if revs:
72 revs = list(revs)
72 revs = list(revs)
73 else:
73 else:
74 revs = []
74 revs = []
75
75
76 if not peer.capable('branchmap'):
76 if not peer.capable('branchmap'):
77 if branches:
77 if branches:
78 raise error.Abort(_("remote branch lookup not supported"))
78 raise error.Abort(_("remote branch lookup not supported"))
79 revs.append(hashbranch)
79 revs.append(hashbranch)
80 return revs, revs[0]
80 return revs, revs[0]
81
81
82 with peer.commandexecutor() as e:
82 with peer.commandexecutor() as e:
83 branchmap = e.callcommand('branchmap', {}).result()
83 branchmap = e.callcommand('branchmap', {}).result()
84
84
85 def primary(branch):
85 def primary(branch):
86 if branch == '.':
86 if branch == '.':
87 if not lrepo:
87 if not lrepo:
88 raise error.Abort(_("dirstate branch not accessible"))
88 raise error.Abort(_("dirstate branch not accessible"))
89 branch = lrepo.dirstate.branch()
89 branch = lrepo.dirstate.branch()
90 if branch in branchmap:
90 if branch in branchmap:
91 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
91 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
92 return True
92 return True
93 else:
93 else:
94 return False
94 return False
95
95
96 for branch in branches:
96 for branch in branches:
97 if not primary(branch):
97 if not primary(branch):
98 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
98 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
99 if hashbranch:
99 if hashbranch:
100 if not primary(hashbranch):
100 if not primary(hashbranch):
101 revs.append(hashbranch)
101 revs.append(hashbranch)
102 return revs, revs[0]
102 return revs, revs[0]
103
103
104 def parseurl(path, branches=None):
104 def parseurl(path, branches=None):
105 '''parse url#branch, returning (url, (branch, branches))'''
105 '''parse url#branch, returning (url, (branch, branches))'''
106
106
107 u = util.url(path)
107 u = util.url(path)
108 branch = None
108 branch = None
109 if u.fragment:
109 if u.fragment:
110 branch = u.fragment
110 branch = u.fragment
111 u.fragment = None
111 u.fragment = None
112 return bytes(u), (branch, branches or [])
112 return bytes(u), (branch, branches or [])
113
113
114 schemes = {
114 schemes = {
115 'bundle': bundlerepo,
115 'bundle': bundlerepo,
116 'union': unionrepo,
116 'union': unionrepo,
117 'file': _local,
117 'file': _local,
118 'http': httppeer,
118 'http': httppeer,
119 'https': httppeer,
119 'https': httppeer,
120 'ssh': sshpeer,
120 'ssh': sshpeer,
121 'static-http': statichttprepo,
121 'static-http': statichttprepo,
122 }
122 }
123
123
124 def _peerlookup(path):
124 def _peerlookup(path):
125 u = util.url(path)
125 u = util.url(path)
126 scheme = u.scheme or 'file'
126 scheme = u.scheme or 'file'
127 thing = schemes.get(scheme) or schemes['file']
127 thing = schemes.get(scheme) or schemes['file']
128 try:
128 try:
129 return thing(path)
129 return thing(path)
130 except TypeError:
130 except TypeError:
131 # we can't test callable(thing) because 'thing' can be an unloaded
131 # we can't test callable(thing) because 'thing' can be an unloaded
132 # module that implements __call__
132 # module that implements __call__
133 if not util.safehasattr(thing, 'instance'):
133 if not util.safehasattr(thing, 'instance'):
134 raise
134 raise
135 return thing
135 return thing
136
136
137 def islocal(repo):
137 def islocal(repo):
138 '''return true if repo (or path pointing to repo) is local'''
138 '''return true if repo (or path pointing to repo) is local'''
139 if isinstance(repo, bytes):
139 if isinstance(repo, bytes):
140 try:
140 try:
141 return _peerlookup(repo).islocal(repo)
141 return _peerlookup(repo).islocal(repo)
142 except AttributeError:
142 except AttributeError:
143 return False
143 return False
144 return repo.local()
144 return repo.local()
145
145
146 def openpath(ui, path):
146 def openpath(ui, path):
147 '''open path with open if local, url.open if remote'''
147 '''open path with open if local, url.open if remote'''
148 pathurl = util.url(path, parsequery=False, parsefragment=False)
148 pathurl = util.url(path, parsequery=False, parsefragment=False)
149 if pathurl.islocal():
149 if pathurl.islocal():
150 return util.posixfile(pathurl.localpath(), 'rb')
150 return util.posixfile(pathurl.localpath(), 'rb')
151 else:
151 else:
152 return url.open(ui, path)
152 return url.open(ui, path)
153
153
154 # a list of (ui, repo) functions called for wire peer initialization
154 # a list of (ui, repo) functions called for wire peer initialization
155 wirepeersetupfuncs = []
155 wirepeersetupfuncs = []
156
156
157 def _peerorrepo(ui, path, create=False, presetupfuncs=None,
157 def _peerorrepo(ui, path, create=False, presetupfuncs=None,
158 intents=None, createopts=None):
158 intents=None, createopts=None):
159 """return a repository object for the specified path"""
159 """return a repository object for the specified path"""
160 obj = _peerlookup(path).instance(ui, path, create, intents=intents,
160 obj = _peerlookup(path).instance(ui, path, create, intents=intents,
161 createopts=createopts)
161 createopts=createopts)
162 ui = getattr(obj, "ui", ui)
162 ui = getattr(obj, "ui", ui)
163 if ui.configbool('devel', 'debug.extensions'):
163 if ui.configbool('devel', 'debug.extensions'):
164 log = lambda msg, *values: ui.debug('debug.extensions: ',
164 log = lambda msg, *values: ui.debug('debug.extensions: ',
165 msg % values, label='debug.extensions')
165 msg % values, label='debug.extensions')
166 else:
166 else:
167 log = lambda *a, **kw: None
167 log = lambda *a, **kw: None
168 for f in presetupfuncs or []:
168 for f in presetupfuncs or []:
169 f(ui, obj)
169 f(ui, obj)
170 log('- executing reposetup hooks\n')
170 log('- executing reposetup hooks\n')
171 with util.timedcm('all reposetup') as allreposetupstats:
171 with util.timedcm('all reposetup') as allreposetupstats:
172 for name, module in extensions.extensions(ui):
172 for name, module in extensions.extensions(ui):
173 log(' - running reposetup for %s\n' % (name,))
173 log(' - running reposetup for %s\n' % (name,))
174 hook = getattr(module, 'reposetup', None)
174 hook = getattr(module, 'reposetup', None)
175 if hook:
175 if hook:
176 with util.timedcm('reposetup %r', name) as stats:
176 with util.timedcm('reposetup %r', name) as stats:
177 hook(ui, obj)
177 hook(ui, obj)
178 log(' > reposetup for %s took %s\n', name, stats)
178 log(' > reposetup for %s took %s\n', name, stats)
179 log('> all reposetup took %s\n', allreposetupstats)
179 log('> all reposetup took %s\n', allreposetupstats)
180 if not obj.local():
180 if not obj.local():
181 for f in wirepeersetupfuncs:
181 for f in wirepeersetupfuncs:
182 f(ui, obj)
182 f(ui, obj)
183 return obj
183 return obj
184
184
185 def repository(ui, path='', create=False, presetupfuncs=None, intents=None,
185 def repository(ui, path='', create=False, presetupfuncs=None, intents=None,
186 createopts=None):
186 createopts=None):
187 """return a repository object for the specified path"""
187 """return a repository object for the specified path"""
188 peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs,
188 peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs,
189 intents=intents, createopts=createopts)
189 intents=intents, createopts=createopts)
190 repo = peer.local()
190 repo = peer.local()
191 if not repo:
191 if not repo:
192 raise error.Abort(_("repository '%s' is not local") %
192 raise error.Abort(_("repository '%s' is not local") %
193 (path or peer.url()))
193 (path or peer.url()))
194 return repo.filtered('visible')
194 return repo.filtered('visible')
195
195
196 def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
196 def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
197 '''return a repository peer for the specified path'''
197 '''return a repository peer for the specified path'''
198 rui = remoteui(uiorrepo, opts)
198 rui = remoteui(uiorrepo, opts)
199 return _peerorrepo(rui, path, create, intents=intents,
199 return _peerorrepo(rui, path, create, intents=intents,
200 createopts=createopts).peer()
200 createopts=createopts).peer()
201
201
202 def defaultdest(source):
202 def defaultdest(source):
203 '''return default destination of clone if none is given
203 '''return default destination of clone if none is given
204
204
205 >>> defaultdest(b'foo')
205 >>> defaultdest(b'foo')
206 'foo'
206 'foo'
207 >>> defaultdest(b'/foo/bar')
207 >>> defaultdest(b'/foo/bar')
208 'bar'
208 'bar'
209 >>> defaultdest(b'/')
209 >>> defaultdest(b'/')
210 ''
210 ''
211 >>> defaultdest(b'')
211 >>> defaultdest(b'')
212 ''
212 ''
213 >>> defaultdest(b'http://example.org/')
213 >>> defaultdest(b'http://example.org/')
214 ''
214 ''
215 >>> defaultdest(b'http://example.org/foo/')
215 >>> defaultdest(b'http://example.org/foo/')
216 'foo'
216 'foo'
217 '''
217 '''
218 path = util.url(source).path
218 path = util.url(source).path
219 if not path:
219 if not path:
220 return ''
220 return ''
221 return os.path.basename(os.path.normpath(path))
221 return os.path.basename(os.path.normpath(path))
222
222
223 def sharedreposource(repo):
223 def sharedreposource(repo):
224 """Returns repository object for source repository of a shared repo.
224 """Returns repository object for source repository of a shared repo.
225
225
226 If repo is not a shared repository, returns None.
226 If repo is not a shared repository, returns None.
227 """
227 """
228 if repo.sharedpath == repo.path:
228 if repo.sharedpath == repo.path:
229 return None
229 return None
230
230
231 if util.safehasattr(repo, 'srcrepo') and repo.srcrepo:
231 if util.safehasattr(repo, 'srcrepo') and repo.srcrepo:
232 return repo.srcrepo
232 return repo.srcrepo
233
233
234 # the sharedpath always ends in the .hg; we want the path to the repo
234 # the sharedpath always ends in the .hg; we want the path to the repo
235 source = repo.vfs.split(repo.sharedpath)[0]
235 source = repo.vfs.split(repo.sharedpath)[0]
236 srcurl, branches = parseurl(source)
236 srcurl, branches = parseurl(source)
237 srcrepo = repository(repo.ui, srcurl)
237 srcrepo = repository(repo.ui, srcurl)
238 repo.srcrepo = srcrepo
238 repo.srcrepo = srcrepo
239 return srcrepo
239 return srcrepo
240
240
241 def share(ui, source, dest=None, update=True, bookmarks=True, defaultpath=None,
241 def share(ui, source, dest=None, update=True, bookmarks=True, defaultpath=None,
242 relative=False):
242 relative=False):
243 '''create a shared repository'''
243 '''create a shared repository'''
244
244
245 if not islocal(source):
245 if not islocal(source):
246 raise error.Abort(_('can only share local repositories'))
246 raise error.Abort(_('can only share local repositories'))
247
247
248 if not dest:
248 if not dest:
249 dest = defaultdest(source)
249 dest = defaultdest(source)
250 else:
250 else:
251 dest = ui.expandpath(dest)
251 dest = ui.expandpath(dest)
252
252
253 if isinstance(source, bytes):
253 if isinstance(source, bytes):
254 origsource = ui.expandpath(source)
254 origsource = ui.expandpath(source)
255 source, branches = parseurl(origsource)
255 source, branches = parseurl(origsource)
256 srcrepo = repository(ui, source)
256 srcrepo = repository(ui, source)
257 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
257 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
258 else:
258 else:
259 srcrepo = source.local()
259 srcrepo = source.local()
260 checkout = None
260 checkout = None
261
261
262 shareditems = set()
262 shareditems = set()
263 if bookmarks:
263 if bookmarks:
264 shareditems.add(sharedbookmarks)
264 shareditems.add(sharedbookmarks)
265
265
266 r = repository(ui, dest, create=True, createopts={
266 r = repository(ui, dest, create=True, createopts={
267 'sharedrepo': srcrepo,
267 'sharedrepo': srcrepo,
268 'sharedrelative': relative,
268 'sharedrelative': relative,
269 'shareditems': shareditems,
269 'shareditems': shareditems,
270 })
270 })
271
271
272 postshare(srcrepo, r, defaultpath=defaultpath)
272 postshare(srcrepo, r, defaultpath=defaultpath)
273 r = repository(ui, dest)
273 r = repository(ui, dest)
274 _postshareupdate(r, update, checkout=checkout)
274 _postshareupdate(r, update, checkout=checkout)
275 return r
275 return r
276
276
277 def unshare(ui, repo):
277 def unshare(ui, repo):
278 """convert a shared repository to a normal one
278 """convert a shared repository to a normal one
279
279
280 Copy the store data to the repo and remove the sharedpath data.
280 Copy the store data to the repo and remove the sharedpath data.
281
281
282 Returns a new repository object representing the unshared repository.
282 Returns a new repository object representing the unshared repository.
283
283
284 The passed repository object is not usable after this function is
284 The passed repository object is not usable after this function is
285 called.
285 called.
286 """
286 """
287
287
288 destlock = lock = None
288 destlock = lock = None
289 lock = repo.lock()
289 lock = repo.lock()
290 try:
290 try:
291 # we use locks here because if we race with commit, we
291 # we use locks here because if we race with commit, we
292 # can end up with extra data in the cloned revlogs that's
292 # can end up with extra data in the cloned revlogs that's
293 # not pointed to by changesets, thus causing verify to
293 # not pointed to by changesets, thus causing verify to
294 # fail
294 # fail
295
295
296 destlock = copystore(ui, repo, repo.path)
296 destlock = copystore(ui, repo, repo.path)
297
297
298 sharefile = repo.vfs.join('sharedpath')
298 sharefile = repo.vfs.join('sharedpath')
299 util.rename(sharefile, sharefile + '.old')
299 util.rename(sharefile, sharefile + '.old')
300
300
301 repo.requirements.discard('shared')
301 repo.requirements.discard('shared')
302 repo.requirements.discard('relshared')
302 repo.requirements.discard('relshared')
303 repo._writerequirements()
303 repo._writerequirements()
304 finally:
304 finally:
305 destlock and destlock.release()
305 destlock and destlock.release()
306 lock and lock.release()
306 lock and lock.release()
307
307
308 # Removing share changes some fundamental properties of the repo instance.
308 # Removing share changes some fundamental properties of the repo instance.
309 # So we instantiate a new repo object and operate on it rather than
309 # So we instantiate a new repo object and operate on it rather than
310 # try to keep the existing repo usable.
310 # try to keep the existing repo usable.
311 newrepo = repository(repo.baseui, repo.root, create=False)
311 newrepo = repository(repo.baseui, repo.root, create=False)
312
312
313 # TODO: figure out how to access subrepos that exist, but were previously
313 # TODO: figure out how to access subrepos that exist, but were previously
314 # removed from .hgsub
314 # removed from .hgsub
315 c = newrepo['.']
315 c = newrepo['.']
316 subs = c.substate
316 subs = c.substate
317 for s in sorted(subs):
317 for s in sorted(subs):
318 c.sub(s).unshare()
318 c.sub(s).unshare()
319
319
320 localrepo.poisonrepository(repo)
320 localrepo.poisonrepository(repo)
321
321
322 return newrepo
322 return newrepo
323
323
324 def postshare(sourcerepo, destrepo, defaultpath=None):
324 def postshare(sourcerepo, destrepo, defaultpath=None):
325 """Called after a new shared repo is created.
325 """Called after a new shared repo is created.
326
326
327 The new repo only has a requirements file and pointer to the source.
327 The new repo only has a requirements file and pointer to the source.
328 This function configures additional shared data.
328 This function configures additional shared data.
329
329
330 Extensions can wrap this function and write additional entries to
330 Extensions can wrap this function and write additional entries to
331 destrepo/.hg/shared to indicate additional pieces of data to be shared.
331 destrepo/.hg/shared to indicate additional pieces of data to be shared.
332 """
332 """
333 default = defaultpath or sourcerepo.ui.config('paths', 'default')
333 default = defaultpath or sourcerepo.ui.config('paths', 'default')
334 if default:
334 if default:
335 template = ('[paths]\n'
335 template = ('[paths]\n'
336 'default = %s\n')
336 'default = %s\n')
337 destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
337 destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
338
338
339 def _postshareupdate(repo, update, checkout=None):
339 def _postshareupdate(repo, update, checkout=None):
340 """Maybe perform a working directory update after a shared repo is created.
340 """Maybe perform a working directory update after a shared repo is created.
341
341
342 ``update`` can be a boolean or a revision to update to.
342 ``update`` can be a boolean or a revision to update to.
343 """
343 """
344 if not update:
344 if not update:
345 return
345 return
346
346
347 repo.ui.status(_("updating working directory\n"))
347 repo.ui.status(_("updating working directory\n"))
348 if update is not True:
348 if update is not True:
349 checkout = update
349 checkout = update
350 for test in (checkout, 'default', 'tip'):
350 for test in (checkout, 'default', 'tip'):
351 if test is None:
351 if test is None:
352 continue
352 continue
353 try:
353 try:
354 uprev = repo.lookup(test)
354 uprev = repo.lookup(test)
355 break
355 break
356 except error.RepoLookupError:
356 except error.RepoLookupError:
357 continue
357 continue
358 _update(repo, uprev)
358 _update(repo, uprev)
359
359
360 def copystore(ui, srcrepo, destpath):
360 def copystore(ui, srcrepo, destpath):
361 '''copy files from store of srcrepo in destpath
361 '''copy files from store of srcrepo in destpath
362
362
363 returns destlock
363 returns destlock
364 '''
364 '''
365 destlock = None
365 destlock = None
366 try:
366 try:
367 hardlink = None
367 hardlink = None
368 topic = _('linking') if hardlink else _('copying')
368 topic = _('linking') if hardlink else _('copying')
369 with ui.makeprogress(topic) as progress:
369 with ui.makeprogress(topic, unit=_('files')) as progress:
370 num = 0
370 num = 0
371 srcpublishing = srcrepo.publishing()
371 srcpublishing = srcrepo.publishing()
372 srcvfs = vfsmod.vfs(srcrepo.sharedpath)
372 srcvfs = vfsmod.vfs(srcrepo.sharedpath)
373 dstvfs = vfsmod.vfs(destpath)
373 dstvfs = vfsmod.vfs(destpath)
374 for f in srcrepo.store.copylist():
374 for f in srcrepo.store.copylist():
375 if srcpublishing and f.endswith('phaseroots'):
375 if srcpublishing and f.endswith('phaseroots'):
376 continue
376 continue
377 dstbase = os.path.dirname(f)
377 dstbase = os.path.dirname(f)
378 if dstbase and not dstvfs.exists(dstbase):
378 if dstbase and not dstvfs.exists(dstbase):
379 dstvfs.mkdir(dstbase)
379 dstvfs.mkdir(dstbase)
380 if srcvfs.exists(f):
380 if srcvfs.exists(f):
381 if f.endswith('data'):
381 if f.endswith('data'):
382 # 'dstbase' may be empty (e.g. revlog format 0)
382 # 'dstbase' may be empty (e.g. revlog format 0)
383 lockfile = os.path.join(dstbase, "lock")
383 lockfile = os.path.join(dstbase, "lock")
384 # lock to avoid premature writing to the target
384 # lock to avoid premature writing to the target
385 destlock = lock.lock(dstvfs, lockfile)
385 destlock = lock.lock(dstvfs, lockfile)
386 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
386 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
387 hardlink, progress)
387 hardlink, progress)
388 num += n
388 num += n
389 if hardlink:
389 if hardlink:
390 ui.debug("linked %d files\n" % num)
390 ui.debug("linked %d files\n" % num)
391 else:
391 else:
392 ui.debug("copied %d files\n" % num)
392 ui.debug("copied %d files\n" % num)
393 return destlock
393 return destlock
394 except: # re-raises
394 except: # re-raises
395 release(destlock)
395 release(destlock)
396 raise
396 raise
397
397
398 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
398 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
399 rev=None, update=True, stream=False):
399 rev=None, update=True, stream=False):
400 """Perform a clone using a shared repo.
400 """Perform a clone using a shared repo.
401
401
402 The store for the repository will be located at <sharepath>/.hg. The
402 The store for the repository will be located at <sharepath>/.hg. The
403 specified revisions will be cloned or pulled from "source". A shared repo
403 specified revisions will be cloned or pulled from "source". A shared repo
404 will be created at "dest" and a working copy will be created if "update" is
404 will be created at "dest" and a working copy will be created if "update" is
405 True.
405 True.
406 """
406 """
407 revs = None
407 revs = None
408 if rev:
408 if rev:
409 if not srcpeer.capable('lookup'):
409 if not srcpeer.capable('lookup'):
410 raise error.Abort(_("src repository does not support "
410 raise error.Abort(_("src repository does not support "
411 "revision lookup and so doesn't "
411 "revision lookup and so doesn't "
412 "support clone by revision"))
412 "support clone by revision"))
413
413
414 # TODO this is batchable.
414 # TODO this is batchable.
415 remoterevs = []
415 remoterevs = []
416 for r in rev:
416 for r in rev:
417 with srcpeer.commandexecutor() as e:
417 with srcpeer.commandexecutor() as e:
418 remoterevs.append(e.callcommand('lookup', {
418 remoterevs.append(e.callcommand('lookup', {
419 'key': r,
419 'key': r,
420 }).result())
420 }).result())
421 revs = remoterevs
421 revs = remoterevs
422
422
423 # Obtain a lock before checking for or cloning the pooled repo otherwise
423 # Obtain a lock before checking for or cloning the pooled repo otherwise
424 # 2 clients may race creating or populating it.
424 # 2 clients may race creating or populating it.
425 pooldir = os.path.dirname(sharepath)
425 pooldir = os.path.dirname(sharepath)
426 # lock class requires the directory to exist.
426 # lock class requires the directory to exist.
427 try:
427 try:
428 util.makedir(pooldir, False)
428 util.makedir(pooldir, False)
429 except OSError as e:
429 except OSError as e:
430 if e.errno != errno.EEXIST:
430 if e.errno != errno.EEXIST:
431 raise
431 raise
432
432
433 poolvfs = vfsmod.vfs(pooldir)
433 poolvfs = vfsmod.vfs(pooldir)
434 basename = os.path.basename(sharepath)
434 basename = os.path.basename(sharepath)
435
435
436 with lock.lock(poolvfs, '%s.lock' % basename):
436 with lock.lock(poolvfs, '%s.lock' % basename):
437 if os.path.exists(sharepath):
437 if os.path.exists(sharepath):
438 ui.status(_('(sharing from existing pooled repository %s)\n') %
438 ui.status(_('(sharing from existing pooled repository %s)\n') %
439 basename)
439 basename)
440 else:
440 else:
441 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
441 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
442 # Always use pull mode because hardlinks in share mode don't work
442 # Always use pull mode because hardlinks in share mode don't work
443 # well. Never update because working copies aren't necessary in
443 # well. Never update because working copies aren't necessary in
444 # share mode.
444 # share mode.
445 clone(ui, peeropts, source, dest=sharepath, pull=True,
445 clone(ui, peeropts, source, dest=sharepath, pull=True,
446 revs=rev, update=False, stream=stream)
446 revs=rev, update=False, stream=stream)
447
447
448 # Resolve the value to put in [paths] section for the source.
448 # Resolve the value to put in [paths] section for the source.
449 if islocal(source):
449 if islocal(source):
450 defaultpath = os.path.abspath(util.urllocalpath(source))
450 defaultpath = os.path.abspath(util.urllocalpath(source))
451 else:
451 else:
452 defaultpath = source
452 defaultpath = source
453
453
454 sharerepo = repository(ui, path=sharepath)
454 sharerepo = repository(ui, path=sharepath)
455 destrepo = share(ui, sharerepo, dest=dest, update=False, bookmarks=False,
455 destrepo = share(ui, sharerepo, dest=dest, update=False, bookmarks=False,
456 defaultpath=defaultpath)
456 defaultpath=defaultpath)
457
457
458 # We need to perform a pull against the dest repo to fetch bookmarks
458 # We need to perform a pull against the dest repo to fetch bookmarks
459 # and other non-store data that isn't shared by default. In the case of
459 # and other non-store data that isn't shared by default. In the case of
460 # non-existing shared repo, this means we pull from the remote twice. This
460 # non-existing shared repo, this means we pull from the remote twice. This
461 # is a bit weird. But at the time it was implemented, there wasn't an easy
461 # is a bit weird. But at the time it was implemented, there wasn't an easy
462 # way to pull just non-changegroup data.
462 # way to pull just non-changegroup data.
463 exchange.pull(destrepo, srcpeer, heads=revs)
463 exchange.pull(destrepo, srcpeer, heads=revs)
464
464
465 _postshareupdate(destrepo, update)
465 _postshareupdate(destrepo, update)
466
466
467 return srcpeer, peer(ui, peeropts, dest)
467 return srcpeer, peer(ui, peeropts, dest)
468
468
469 # Recomputing branch cache might be slow on big repos,
469 # Recomputing branch cache might be slow on big repos,
470 # so just copy it
470 # so just copy it
471 def _copycache(srcrepo, dstcachedir, fname):
471 def _copycache(srcrepo, dstcachedir, fname):
472 """copy a cache from srcrepo to destcachedir (if it exists)"""
472 """copy a cache from srcrepo to destcachedir (if it exists)"""
473 srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
473 srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
474 dstbranchcache = os.path.join(dstcachedir, fname)
474 dstbranchcache = os.path.join(dstcachedir, fname)
475 if os.path.exists(srcbranchcache):
475 if os.path.exists(srcbranchcache):
476 if not os.path.exists(dstcachedir):
476 if not os.path.exists(dstcachedir):
477 os.mkdir(dstcachedir)
477 os.mkdir(dstcachedir)
478 util.copyfile(srcbranchcache, dstbranchcache)
478 util.copyfile(srcbranchcache, dstbranchcache)
479
479
480 def clone(ui, peeropts, source, dest=None, pull=False, revs=None,
480 def clone(ui, peeropts, source, dest=None, pull=False, revs=None,
481 update=True, stream=False, branch=None, shareopts=None,
481 update=True, stream=False, branch=None, shareopts=None,
482 storeincludepats=None, storeexcludepats=None, depth=None):
482 storeincludepats=None, storeexcludepats=None, depth=None):
483 """Make a copy of an existing repository.
483 """Make a copy of an existing repository.
484
484
485 Create a copy of an existing repository in a new directory. The
485 Create a copy of an existing repository in a new directory. The
486 source and destination are URLs, as passed to the repository
486 source and destination are URLs, as passed to the repository
487 function. Returns a pair of repository peers, the source and
487 function. Returns a pair of repository peers, the source and
488 newly created destination.
488 newly created destination.
489
489
490 The location of the source is added to the new repository's
490 The location of the source is added to the new repository's
491 .hg/hgrc file, as the default to be used for future pulls and
491 .hg/hgrc file, as the default to be used for future pulls and
492 pushes.
492 pushes.
493
493
494 If an exception is raised, the partly cloned/updated destination
494 If an exception is raised, the partly cloned/updated destination
495 repository will be deleted.
495 repository will be deleted.
496
496
497 Arguments:
497 Arguments:
498
498
499 source: repository object or URL
499 source: repository object or URL
500
500
501 dest: URL of destination repository to create (defaults to base
501 dest: URL of destination repository to create (defaults to base
502 name of source repository)
502 name of source repository)
503
503
504 pull: always pull from source repository, even in local case or if the
504 pull: always pull from source repository, even in local case or if the
505 server prefers streaming
505 server prefers streaming
506
506
507 stream: stream raw data uncompressed from repository (fast over
507 stream: stream raw data uncompressed from repository (fast over
508 LAN, slow over WAN)
508 LAN, slow over WAN)
509
509
510 revs: revision to clone up to (implies pull=True)
510 revs: revision to clone up to (implies pull=True)
511
511
512 update: update working directory after clone completes, if
512 update: update working directory after clone completes, if
513 destination is local repository (True means update to default rev,
513 destination is local repository (True means update to default rev,
514 anything else is treated as a revision)
514 anything else is treated as a revision)
515
515
516 branch: branches to clone
516 branch: branches to clone
517
517
518 shareopts: dict of options to control auto sharing behavior. The "pool" key
518 shareopts: dict of options to control auto sharing behavior. The "pool" key
519 activates auto sharing mode and defines the directory for stores. The
519 activates auto sharing mode and defines the directory for stores. The
520 "mode" key determines how to construct the directory name of the shared
520 "mode" key determines how to construct the directory name of the shared
521 repository. "identity" means the name is derived from the node of the first
521 repository. "identity" means the name is derived from the node of the first
522 changeset in the repository. "remote" means the name is derived from the
522 changeset in the repository. "remote" means the name is derived from the
523 remote's path/URL. Defaults to "identity."
523 remote's path/URL. Defaults to "identity."
524
524
525 storeincludepats and storeexcludepats: sets of file patterns to include and
525 storeincludepats and storeexcludepats: sets of file patterns to include and
526 exclude in the repository copy, respectively. If not defined, all files
526 exclude in the repository copy, respectively. If not defined, all files
527 will be included (a "full" clone). Otherwise a "narrow" clone containing
527 will be included (a "full" clone). Otherwise a "narrow" clone containing
528 only the requested files will be performed. If ``storeincludepats`` is not
528 only the requested files will be performed. If ``storeincludepats`` is not
529 defined but ``storeexcludepats`` is, ``storeincludepats`` is assumed to be
529 defined but ``storeexcludepats`` is, ``storeincludepats`` is assumed to be
530 ``path:.``. If both are empty sets, no files will be cloned.
530 ``path:.``. If both are empty sets, no files will be cloned.
531 """
531 """
532
532
533 if isinstance(source, bytes):
533 if isinstance(source, bytes):
534 origsource = ui.expandpath(source)
534 origsource = ui.expandpath(source)
535 source, branches = parseurl(origsource, branch)
535 source, branches = parseurl(origsource, branch)
536 srcpeer = peer(ui, peeropts, source)
536 srcpeer = peer(ui, peeropts, source)
537 else:
537 else:
538 srcpeer = source.peer() # in case we were called with a localrepo
538 srcpeer = source.peer() # in case we were called with a localrepo
539 branches = (None, branch or [])
539 branches = (None, branch or [])
540 origsource = source = srcpeer.url()
540 origsource = source = srcpeer.url()
541 revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
541 revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
542
542
543 if dest is None:
543 if dest is None:
544 dest = defaultdest(source)
544 dest = defaultdest(source)
545 if dest:
545 if dest:
546 ui.status(_("destination directory: %s\n") % dest)
546 ui.status(_("destination directory: %s\n") % dest)
547 else:
547 else:
548 dest = ui.expandpath(dest)
548 dest = ui.expandpath(dest)
549
549
550 dest = util.urllocalpath(dest)
550 dest = util.urllocalpath(dest)
551 source = util.urllocalpath(source)
551 source = util.urllocalpath(source)
552
552
553 if not dest:
553 if not dest:
554 raise error.Abort(_("empty destination path is not valid"))
554 raise error.Abort(_("empty destination path is not valid"))
555
555
556 destvfs = vfsmod.vfs(dest, expandpath=True)
556 destvfs = vfsmod.vfs(dest, expandpath=True)
557 if destvfs.lexists():
557 if destvfs.lexists():
558 if not destvfs.isdir():
558 if not destvfs.isdir():
559 raise error.Abort(_("destination '%s' already exists") % dest)
559 raise error.Abort(_("destination '%s' already exists") % dest)
560 elif destvfs.listdir():
560 elif destvfs.listdir():
561 raise error.Abort(_("destination '%s' is not empty") % dest)
561 raise error.Abort(_("destination '%s' is not empty") % dest)
562
562
563 createopts = {}
563 createopts = {}
564 narrow = False
564 narrow = False
565
565
566 if storeincludepats is not None:
566 if storeincludepats is not None:
567 narrowspec.validatepatterns(storeincludepats)
567 narrowspec.validatepatterns(storeincludepats)
568 narrow = True
568 narrow = True
569
569
570 if storeexcludepats is not None:
570 if storeexcludepats is not None:
571 narrowspec.validatepatterns(storeexcludepats)
571 narrowspec.validatepatterns(storeexcludepats)
572 narrow = True
572 narrow = True
573
573
574 if narrow:
574 if narrow:
575 # Include everything by default if only exclusion patterns defined.
575 # Include everything by default if only exclusion patterns defined.
576 if storeexcludepats and not storeincludepats:
576 if storeexcludepats and not storeincludepats:
577 storeincludepats = {'path:.'}
577 storeincludepats = {'path:.'}
578
578
579 createopts['narrowfiles'] = True
579 createopts['narrowfiles'] = True
580
580
581 if depth:
581 if depth:
582 createopts['shallowfilestore'] = True
582 createopts['shallowfilestore'] = True
583
583
584 if srcpeer.capable(b'lfs-serve'):
584 if srcpeer.capable(b'lfs-serve'):
585 # Repository creation honors the config if it disabled the extension, so
585 # Repository creation honors the config if it disabled the extension, so
586 # we can't just announce that lfs will be enabled. This check avoids
586 # we can't just announce that lfs will be enabled. This check avoids
587 # saying that lfs will be enabled, and then saying it's an unknown
587 # saying that lfs will be enabled, and then saying it's an unknown
588 # feature. The lfs creation option is set in either case so that a
588 # feature. The lfs creation option is set in either case so that a
589 # requirement is added. If the extension is explicitly disabled but the
589 # requirement is added. If the extension is explicitly disabled but the
590 # requirement is set, the clone aborts early, before transferring any
590 # requirement is set, the clone aborts early, before transferring any
591 # data.
591 # data.
592 createopts['lfs'] = True
592 createopts['lfs'] = True
593
593
594 if extensions.disabledext('lfs'):
594 if extensions.disabledext('lfs'):
595 ui.status(_('(remote is using large file support (lfs), but it is '
595 ui.status(_('(remote is using large file support (lfs), but it is '
596 'explicitly disabled in the local configuration)\n'))
596 'explicitly disabled in the local configuration)\n'))
597 else:
597 else:
598 ui.status(_('(remote is using large file support (lfs); lfs will '
598 ui.status(_('(remote is using large file support (lfs); lfs will '
599 'be enabled for this repository)\n'))
599 'be enabled for this repository)\n'))
600
600
601 shareopts = shareopts or {}
601 shareopts = shareopts or {}
602 sharepool = shareopts.get('pool')
602 sharepool = shareopts.get('pool')
603 sharenamemode = shareopts.get('mode')
603 sharenamemode = shareopts.get('mode')
604 if sharepool and islocal(dest):
604 if sharepool and islocal(dest):
605 sharepath = None
605 sharepath = None
606 if sharenamemode == 'identity':
606 if sharenamemode == 'identity':
607 # Resolve the name from the initial changeset in the remote
607 # Resolve the name from the initial changeset in the remote
608 # repository. This returns nullid when the remote is empty. It
608 # repository. This returns nullid when the remote is empty. It
609 # raises RepoLookupError if revision 0 is filtered or otherwise
609 # raises RepoLookupError if revision 0 is filtered or otherwise
610 # not available. If we fail to resolve, sharing is not enabled.
610 # not available. If we fail to resolve, sharing is not enabled.
611 try:
611 try:
612 with srcpeer.commandexecutor() as e:
612 with srcpeer.commandexecutor() as e:
613 rootnode = e.callcommand('lookup', {
613 rootnode = e.callcommand('lookup', {
614 'key': '0',
614 'key': '0',
615 }).result()
615 }).result()
616
616
617 if rootnode != node.nullid:
617 if rootnode != node.nullid:
618 sharepath = os.path.join(sharepool, node.hex(rootnode))
618 sharepath = os.path.join(sharepool, node.hex(rootnode))
619 else:
619 else:
620 ui.status(_('(not using pooled storage: '
620 ui.status(_('(not using pooled storage: '
621 'remote appears to be empty)\n'))
621 'remote appears to be empty)\n'))
622 except error.RepoLookupError:
622 except error.RepoLookupError:
623 ui.status(_('(not using pooled storage: '
623 ui.status(_('(not using pooled storage: '
624 'unable to resolve identity of remote)\n'))
624 'unable to resolve identity of remote)\n'))
625 elif sharenamemode == 'remote':
625 elif sharenamemode == 'remote':
626 sharepath = os.path.join(
626 sharepath = os.path.join(
627 sharepool, node.hex(hashlib.sha1(source).digest()))
627 sharepool, node.hex(hashlib.sha1(source).digest()))
628 else:
628 else:
629 raise error.Abort(_('unknown share naming mode: %s') %
629 raise error.Abort(_('unknown share naming mode: %s') %
630 sharenamemode)
630 sharenamemode)
631
631
632 # TODO this is a somewhat arbitrary restriction.
632 # TODO this is a somewhat arbitrary restriction.
633 if narrow:
633 if narrow:
634 ui.status(_('(pooled storage not supported for narrow clones)\n'))
634 ui.status(_('(pooled storage not supported for narrow clones)\n'))
635 sharepath = None
635 sharepath = None
636
636
637 if sharepath:
637 if sharepath:
638 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
638 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
639 dest, pull=pull, rev=revs, update=update,
639 dest, pull=pull, rev=revs, update=update,
640 stream=stream)
640 stream=stream)
641
641
642 srclock = destlock = cleandir = None
642 srclock = destlock = cleandir = None
643 srcrepo = srcpeer.local()
643 srcrepo = srcpeer.local()
644 try:
644 try:
645 abspath = origsource
645 abspath = origsource
646 if islocal(origsource):
646 if islocal(origsource):
647 abspath = os.path.abspath(util.urllocalpath(origsource))
647 abspath = os.path.abspath(util.urllocalpath(origsource))
648
648
649 if islocal(dest):
649 if islocal(dest):
650 cleandir = dest
650 cleandir = dest
651
651
652 copy = False
652 copy = False
653 if (srcrepo and srcrepo.cancopy() and islocal(dest)
653 if (srcrepo and srcrepo.cancopy() and islocal(dest)
654 and not phases.hassecret(srcrepo)):
654 and not phases.hassecret(srcrepo)):
655 copy = not pull and not revs
655 copy = not pull and not revs
656
656
657 # TODO this is a somewhat arbitrary restriction.
657 # TODO this is a somewhat arbitrary restriction.
658 if narrow:
658 if narrow:
659 copy = False
659 copy = False
660
660
661 if copy:
661 if copy:
662 try:
662 try:
663 # we use a lock here because if we race with commit, we
663 # we use a lock here because if we race with commit, we
664 # can end up with extra data in the cloned revlogs that's
664 # can end up with extra data in the cloned revlogs that's
665 # not pointed to by changesets, thus causing verify to
665 # not pointed to by changesets, thus causing verify to
666 # fail
666 # fail
667 srclock = srcrepo.lock(wait=False)
667 srclock = srcrepo.lock(wait=False)
668 except error.LockError:
668 except error.LockError:
669 copy = False
669 copy = False
670
670
671 if copy:
671 if copy:
672 srcrepo.hook('preoutgoing', throw=True, source='clone')
672 srcrepo.hook('preoutgoing', throw=True, source='clone')
673 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
673 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
674 if not os.path.exists(dest):
674 if not os.path.exists(dest):
675 util.makedirs(dest)
675 util.makedirs(dest)
676 else:
676 else:
677 # only clean up directories we create ourselves
677 # only clean up directories we create ourselves
678 cleandir = hgdir
678 cleandir = hgdir
679 try:
679 try:
680 destpath = hgdir
680 destpath = hgdir
681 util.makedir(destpath, notindexed=True)
681 util.makedir(destpath, notindexed=True)
682 except OSError as inst:
682 except OSError as inst:
683 if inst.errno == errno.EEXIST:
683 if inst.errno == errno.EEXIST:
684 cleandir = None
684 cleandir = None
685 raise error.Abort(_("destination '%s' already exists")
685 raise error.Abort(_("destination '%s' already exists")
686 % dest)
686 % dest)
687 raise
687 raise
688
688
689 destlock = copystore(ui, srcrepo, destpath)
689 destlock = copystore(ui, srcrepo, destpath)
690 # copy bookmarks over
690 # copy bookmarks over
691 srcbookmarks = srcrepo.vfs.join('bookmarks')
691 srcbookmarks = srcrepo.vfs.join('bookmarks')
692 dstbookmarks = os.path.join(destpath, 'bookmarks')
692 dstbookmarks = os.path.join(destpath, 'bookmarks')
693 if os.path.exists(srcbookmarks):
693 if os.path.exists(srcbookmarks):
694 util.copyfile(srcbookmarks, dstbookmarks)
694 util.copyfile(srcbookmarks, dstbookmarks)
695
695
696 dstcachedir = os.path.join(destpath, 'cache')
696 dstcachedir = os.path.join(destpath, 'cache')
697 for cache in cacheutil.cachetocopy(srcrepo):
697 for cache in cacheutil.cachetocopy(srcrepo):
698 _copycache(srcrepo, dstcachedir, cache)
698 _copycache(srcrepo, dstcachedir, cache)
699
699
700 # we need to re-init the repo after manually copying the data
700 # we need to re-init the repo after manually copying the data
701 # into it
701 # into it
702 destpeer = peer(srcrepo, peeropts, dest)
702 destpeer = peer(srcrepo, peeropts, dest)
703 srcrepo.hook('outgoing', source='clone',
703 srcrepo.hook('outgoing', source='clone',
704 node=node.hex(node.nullid))
704 node=node.hex(node.nullid))
705 else:
705 else:
706 try:
706 try:
707 # only pass ui when no srcrepo
707 # only pass ui when no srcrepo
708 destpeer = peer(srcrepo or ui, peeropts, dest, create=True,
708 destpeer = peer(srcrepo or ui, peeropts, dest, create=True,
709 createopts=createopts)
709 createopts=createopts)
710 except OSError as inst:
710 except OSError as inst:
711 if inst.errno == errno.EEXIST:
711 if inst.errno == errno.EEXIST:
712 cleandir = None
712 cleandir = None
713 raise error.Abort(_("destination '%s' already exists")
713 raise error.Abort(_("destination '%s' already exists")
714 % dest)
714 % dest)
715 raise
715 raise
716
716
717 if revs:
717 if revs:
718 if not srcpeer.capable('lookup'):
718 if not srcpeer.capable('lookup'):
719 raise error.Abort(_("src repository does not support "
719 raise error.Abort(_("src repository does not support "
720 "revision lookup and so doesn't "
720 "revision lookup and so doesn't "
721 "support clone by revision"))
721 "support clone by revision"))
722
722
723 # TODO this is batchable.
723 # TODO this is batchable.
724 remoterevs = []
724 remoterevs = []
725 for rev in revs:
725 for rev in revs:
726 with srcpeer.commandexecutor() as e:
726 with srcpeer.commandexecutor() as e:
727 remoterevs.append(e.callcommand('lookup', {
727 remoterevs.append(e.callcommand('lookup', {
728 'key': rev,
728 'key': rev,
729 }).result())
729 }).result())
730 revs = remoterevs
730 revs = remoterevs
731
731
732 checkout = revs[0]
732 checkout = revs[0]
733 else:
733 else:
734 revs = None
734 revs = None
735 local = destpeer.local()
735 local = destpeer.local()
736 if local:
736 if local:
737 if narrow:
737 if narrow:
738 with local.lock():
738 with local.lock():
739 local.setnarrowpats(storeincludepats, storeexcludepats)
739 local.setnarrowpats(storeincludepats, storeexcludepats)
740
740
741 u = util.url(abspath)
741 u = util.url(abspath)
742 defaulturl = bytes(u)
742 defaulturl = bytes(u)
743 local.ui.setconfig('paths', 'default', defaulturl, 'clone')
743 local.ui.setconfig('paths', 'default', defaulturl, 'clone')
744 if not stream:
744 if not stream:
745 if pull:
745 if pull:
746 stream = False
746 stream = False
747 else:
747 else:
748 stream = None
748 stream = None
749 # internal config: ui.quietbookmarkmove
749 # internal config: ui.quietbookmarkmove
750 overrides = {('ui', 'quietbookmarkmove'): True}
750 overrides = {('ui', 'quietbookmarkmove'): True}
751 with local.ui.configoverride(overrides, 'clone'):
751 with local.ui.configoverride(overrides, 'clone'):
752 exchange.pull(local, srcpeer, revs,
752 exchange.pull(local, srcpeer, revs,
753 streamclonerequested=stream,
753 streamclonerequested=stream,
754 includepats=storeincludepats,
754 includepats=storeincludepats,
755 excludepats=storeexcludepats,
755 excludepats=storeexcludepats,
756 depth=depth)
756 depth=depth)
757 elif srcrepo:
757 elif srcrepo:
758 # TODO lift restriction once exchange.push() accepts narrow
758 # TODO lift restriction once exchange.push() accepts narrow
759 # push.
759 # push.
760 if narrow:
760 if narrow:
761 raise error.Abort(_('narrow clone not available for '
761 raise error.Abort(_('narrow clone not available for '
762 'remote destinations'))
762 'remote destinations'))
763
763
764 exchange.push(srcrepo, destpeer, revs=revs,
764 exchange.push(srcrepo, destpeer, revs=revs,
765 bookmarks=srcrepo._bookmarks.keys())
765 bookmarks=srcrepo._bookmarks.keys())
766 else:
766 else:
767 raise error.Abort(_("clone from remote to remote not supported")
767 raise error.Abort(_("clone from remote to remote not supported")
768 )
768 )
769
769
770 cleandir = None
770 cleandir = None
771
771
772 destrepo = destpeer.local()
772 destrepo = destpeer.local()
773 if destrepo:
773 if destrepo:
774 template = uimod.samplehgrcs['cloned']
774 template = uimod.samplehgrcs['cloned']
775 u = util.url(abspath)
775 u = util.url(abspath)
776 u.passwd = None
776 u.passwd = None
777 defaulturl = bytes(u)
777 defaulturl = bytes(u)
778 destrepo.vfs.write('hgrc', util.tonativeeol(template % defaulturl))
778 destrepo.vfs.write('hgrc', util.tonativeeol(template % defaulturl))
779 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
779 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
780
780
781 if ui.configbool('experimental', 'remotenames'):
781 if ui.configbool('experimental', 'remotenames'):
782 logexchange.pullremotenames(destrepo, srcpeer)
782 logexchange.pullremotenames(destrepo, srcpeer)
783
783
784 if update:
784 if update:
785 if update is not True:
785 if update is not True:
786 with srcpeer.commandexecutor() as e:
786 with srcpeer.commandexecutor() as e:
787 checkout = e.callcommand('lookup', {
787 checkout = e.callcommand('lookup', {
788 'key': update,
788 'key': update,
789 }).result()
789 }).result()
790
790
791 uprev = None
791 uprev = None
792 status = None
792 status = None
793 if checkout is not None:
793 if checkout is not None:
794 # Some extensions (at least hg-git and hg-subversion) have
794 # Some extensions (at least hg-git and hg-subversion) have
795 # a peer.lookup() implementation that returns a name instead
795 # a peer.lookup() implementation that returns a name instead
796 # of a nodeid. We work around it here until we've figured
796 # of a nodeid. We work around it here until we've figured
797 # out a better solution.
797 # out a better solution.
798 if len(checkout) == 20 and checkout in destrepo:
798 if len(checkout) == 20 and checkout in destrepo:
799 uprev = checkout
799 uprev = checkout
800 elif scmutil.isrevsymbol(destrepo, checkout):
800 elif scmutil.isrevsymbol(destrepo, checkout):
801 uprev = scmutil.revsymbol(destrepo, checkout).node()
801 uprev = scmutil.revsymbol(destrepo, checkout).node()
802 else:
802 else:
803 if update is not True:
803 if update is not True:
804 try:
804 try:
805 uprev = destrepo.lookup(update)
805 uprev = destrepo.lookup(update)
806 except error.RepoLookupError:
806 except error.RepoLookupError:
807 pass
807 pass
808 if uprev is None:
808 if uprev is None:
809 try:
809 try:
810 uprev = destrepo._bookmarks['@']
810 uprev = destrepo._bookmarks['@']
811 update = '@'
811 update = '@'
812 bn = destrepo[uprev].branch()
812 bn = destrepo[uprev].branch()
813 if bn == 'default':
813 if bn == 'default':
814 status = _("updating to bookmark @\n")
814 status = _("updating to bookmark @\n")
815 else:
815 else:
816 status = (_("updating to bookmark @ on branch %s\n")
816 status = (_("updating to bookmark @ on branch %s\n")
817 % bn)
817 % bn)
818 except KeyError:
818 except KeyError:
819 try:
819 try:
820 uprev = destrepo.branchtip('default')
820 uprev = destrepo.branchtip('default')
821 except error.RepoLookupError:
821 except error.RepoLookupError:
822 uprev = destrepo.lookup('tip')
822 uprev = destrepo.lookup('tip')
823 if not status:
823 if not status:
824 bn = destrepo[uprev].branch()
824 bn = destrepo[uprev].branch()
825 status = _("updating to branch %s\n") % bn
825 status = _("updating to branch %s\n") % bn
826 destrepo.ui.status(status)
826 destrepo.ui.status(status)
827 _update(destrepo, uprev)
827 _update(destrepo, uprev)
828 if update in destrepo._bookmarks:
828 if update in destrepo._bookmarks:
829 bookmarks.activate(destrepo, update)
829 bookmarks.activate(destrepo, update)
830 finally:
830 finally:
831 release(srclock, destlock)
831 release(srclock, destlock)
832 if cleandir is not None:
832 if cleandir is not None:
833 shutil.rmtree(cleandir, True)
833 shutil.rmtree(cleandir, True)
834 if srcpeer is not None:
834 if srcpeer is not None:
835 srcpeer.close()
835 srcpeer.close()
836 return srcpeer, destpeer
836 return srcpeer, destpeer
837
837
838 def _showstats(repo, stats, quietempty=False):
838 def _showstats(repo, stats, quietempty=False):
839 if quietempty and stats.isempty():
839 if quietempty and stats.isempty():
840 return
840 return
841 repo.ui.status(_("%d files updated, %d files merged, "
841 repo.ui.status(_("%d files updated, %d files merged, "
842 "%d files removed, %d files unresolved\n") % (
842 "%d files removed, %d files unresolved\n") % (
843 stats.updatedcount, stats.mergedcount,
843 stats.updatedcount, stats.mergedcount,
844 stats.removedcount, stats.unresolvedcount))
844 stats.removedcount, stats.unresolvedcount))
845
845
846 def updaterepo(repo, node, overwrite, updatecheck=None):
846 def updaterepo(repo, node, overwrite, updatecheck=None):
847 """Update the working directory to node.
847 """Update the working directory to node.
848
848
849 When overwrite is set, changes are clobbered, merged else
849 When overwrite is set, changes are clobbered, merged else
850
850
851 returns stats (see pydoc mercurial.merge.applyupdates)"""
851 returns stats (see pydoc mercurial.merge.applyupdates)"""
852 return mergemod.update(repo, node, branchmerge=False, force=overwrite,
852 return mergemod.update(repo, node, branchmerge=False, force=overwrite,
853 labels=['working copy', 'destination'],
853 labels=['working copy', 'destination'],
854 updatecheck=updatecheck)
854 updatecheck=updatecheck)
855
855
856 def update(repo, node, quietempty=False, updatecheck=None):
856 def update(repo, node, quietempty=False, updatecheck=None):
857 """update the working directory to node"""
857 """update the working directory to node"""
858 stats = updaterepo(repo, node, False, updatecheck=updatecheck)
858 stats = updaterepo(repo, node, False, updatecheck=updatecheck)
859 _showstats(repo, stats, quietempty)
859 _showstats(repo, stats, quietempty)
860 if stats.unresolvedcount:
860 if stats.unresolvedcount:
861 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
861 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
862 return stats.unresolvedcount > 0
862 return stats.unresolvedcount > 0
863
863
864 # naming conflict in clone()
864 # naming conflict in clone()
865 _update = update
865 _update = update
866
866
867 def clean(repo, node, show_stats=True, quietempty=False):
867 def clean(repo, node, show_stats=True, quietempty=False):
868 """forcibly switch the working directory to node, clobbering changes"""
868 """forcibly switch the working directory to node, clobbering changes"""
869 stats = updaterepo(repo, node, True)
869 stats = updaterepo(repo, node, True)
870 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
870 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
871 if show_stats:
871 if show_stats:
872 _showstats(repo, stats, quietempty)
872 _showstats(repo, stats, quietempty)
873 return stats.unresolvedcount > 0
873 return stats.unresolvedcount > 0
874
874
875 # naming conflict in updatetotally()
875 # naming conflict in updatetotally()
876 _clean = clean
876 _clean = clean
877
877
878 def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
878 def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
879 """Update the working directory with extra care for non-file components
879 """Update the working directory with extra care for non-file components
880
880
881 This takes care of non-file components below:
881 This takes care of non-file components below:
882
882
883 :bookmark: might be advanced or (in)activated
883 :bookmark: might be advanced or (in)activated
884
884
885 This takes arguments below:
885 This takes arguments below:
886
886
887 :checkout: to which revision the working directory is updated
887 :checkout: to which revision the working directory is updated
888 :brev: a name, which might be a bookmark to be activated after updating
888 :brev: a name, which might be a bookmark to be activated after updating
889 :clean: whether changes in the working directory can be discarded
889 :clean: whether changes in the working directory can be discarded
890 :updatecheck: how to deal with a dirty working directory
890 :updatecheck: how to deal with a dirty working directory
891
891
892 Valid values for updatecheck are (None => linear):
892 Valid values for updatecheck are (None => linear):
893
893
894 * abort: abort if the working directory is dirty
894 * abort: abort if the working directory is dirty
895 * none: don't check (merge working directory changes into destination)
895 * none: don't check (merge working directory changes into destination)
896 * linear: check that update is linear before merging working directory
896 * linear: check that update is linear before merging working directory
897 changes into destination
897 changes into destination
898 * noconflict: check that the update does not result in file merges
898 * noconflict: check that the update does not result in file merges
899
899
900 This returns whether conflict is detected at updating or not.
900 This returns whether conflict is detected at updating or not.
901 """
901 """
902 if updatecheck is None:
902 if updatecheck is None:
903 updatecheck = ui.config('commands', 'update.check')
903 updatecheck = ui.config('commands', 'update.check')
904 if updatecheck not in ('abort', 'none', 'linear', 'noconflict'):
904 if updatecheck not in ('abort', 'none', 'linear', 'noconflict'):
905 # If not configured, or invalid value configured
905 # If not configured, or invalid value configured
906 updatecheck = 'linear'
906 updatecheck = 'linear'
907 with repo.wlock():
907 with repo.wlock():
908 movemarkfrom = None
908 movemarkfrom = None
909 warndest = False
909 warndest = False
910 if checkout is None:
910 if checkout is None:
911 updata = destutil.destupdate(repo, clean=clean)
911 updata = destutil.destupdate(repo, clean=clean)
912 checkout, movemarkfrom, brev = updata
912 checkout, movemarkfrom, brev = updata
913 warndest = True
913 warndest = True
914
914
915 if clean:
915 if clean:
916 ret = _clean(repo, checkout)
916 ret = _clean(repo, checkout)
917 else:
917 else:
918 if updatecheck == 'abort':
918 if updatecheck == 'abort':
919 cmdutil.bailifchanged(repo, merge=False)
919 cmdutil.bailifchanged(repo, merge=False)
920 updatecheck = 'none'
920 updatecheck = 'none'
921 ret = _update(repo, checkout, updatecheck=updatecheck)
921 ret = _update(repo, checkout, updatecheck=updatecheck)
922
922
923 if not ret and movemarkfrom:
923 if not ret and movemarkfrom:
924 if movemarkfrom == repo['.'].node():
924 if movemarkfrom == repo['.'].node():
925 pass # no-op update
925 pass # no-op update
926 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
926 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
927 b = ui.label(repo._activebookmark, 'bookmarks.active')
927 b = ui.label(repo._activebookmark, 'bookmarks.active')
928 ui.status(_("updating bookmark %s\n") % b)
928 ui.status(_("updating bookmark %s\n") % b)
929 else:
929 else:
930 # this can happen with a non-linear update
930 # this can happen with a non-linear update
931 b = ui.label(repo._activebookmark, 'bookmarks')
931 b = ui.label(repo._activebookmark, 'bookmarks')
932 ui.status(_("(leaving bookmark %s)\n") % b)
932 ui.status(_("(leaving bookmark %s)\n") % b)
933 bookmarks.deactivate(repo)
933 bookmarks.deactivate(repo)
934 elif brev in repo._bookmarks:
934 elif brev in repo._bookmarks:
935 if brev != repo._activebookmark:
935 if brev != repo._activebookmark:
936 b = ui.label(brev, 'bookmarks.active')
936 b = ui.label(brev, 'bookmarks.active')
937 ui.status(_("(activating bookmark %s)\n") % b)
937 ui.status(_("(activating bookmark %s)\n") % b)
938 bookmarks.activate(repo, brev)
938 bookmarks.activate(repo, brev)
939 elif brev:
939 elif brev:
940 if repo._activebookmark:
940 if repo._activebookmark:
941 b = ui.label(repo._activebookmark, 'bookmarks')
941 b = ui.label(repo._activebookmark, 'bookmarks')
942 ui.status(_("(leaving bookmark %s)\n") % b)
942 ui.status(_("(leaving bookmark %s)\n") % b)
943 bookmarks.deactivate(repo)
943 bookmarks.deactivate(repo)
944
944
945 if warndest:
945 if warndest:
946 destutil.statusotherdests(ui, repo)
946 destutil.statusotherdests(ui, repo)
947
947
948 return ret
948 return ret
949
949
950 def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None,
950 def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None,
951 abort=False):
951 abort=False):
952 """Branch merge with node, resolving changes. Return true if any
952 """Branch merge with node, resolving changes. Return true if any
953 unresolved conflicts."""
953 unresolved conflicts."""
954 if not abort:
954 if not abort:
955 stats = mergemod.update(repo, node, branchmerge=True, force=force,
955 stats = mergemod.update(repo, node, branchmerge=True, force=force,
956 mergeforce=mergeforce, labels=labels)
956 mergeforce=mergeforce, labels=labels)
957 else:
957 else:
958 ms = mergemod.mergestate.read(repo)
958 ms = mergemod.mergestate.read(repo)
959 if ms.active():
959 if ms.active():
960 # there were conflicts
960 # there were conflicts
961 node = ms.localctx.hex()
961 node = ms.localctx.hex()
962 else:
962 else:
963 # there were no conficts, mergestate was not stored
963 # there were no conficts, mergestate was not stored
964 node = repo['.'].hex()
964 node = repo['.'].hex()
965
965
966 repo.ui.status(_("aborting the merge, updating back to"
966 repo.ui.status(_("aborting the merge, updating back to"
967 " %s\n") % node[:12])
967 " %s\n") % node[:12])
968 stats = mergemod.update(repo, node, branchmerge=False, force=True,
968 stats = mergemod.update(repo, node, branchmerge=False, force=True,
969 labels=labels)
969 labels=labels)
970
970
971 _showstats(repo, stats)
971 _showstats(repo, stats)
972 if stats.unresolvedcount:
972 if stats.unresolvedcount:
973 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
973 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
974 "or 'hg merge --abort' to abandon\n"))
974 "or 'hg merge --abort' to abandon\n"))
975 elif remind and not abort:
975 elif remind and not abort:
976 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
976 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
977 return stats.unresolvedcount > 0
977 return stats.unresolvedcount > 0
978
978
979 def _incoming(displaychlist, subreporecurse, ui, repo, source,
979 def _incoming(displaychlist, subreporecurse, ui, repo, source,
980 opts, buffered=False):
980 opts, buffered=False):
981 """
981 """
982 Helper for incoming / gincoming.
982 Helper for incoming / gincoming.
983 displaychlist gets called with
983 displaychlist gets called with
984 (remoterepo, incomingchangesetlist, displayer) parameters,
984 (remoterepo, incomingchangesetlist, displayer) parameters,
985 and is supposed to contain only code that can't be unified.
985 and is supposed to contain only code that can't be unified.
986 """
986 """
987 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
987 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
988 other = peer(repo, opts, source)
988 other = peer(repo, opts, source)
989 ui.status(_('comparing with %s\n') % util.hidepassword(source))
989 ui.status(_('comparing with %s\n') % util.hidepassword(source))
990 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
990 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
991
991
992 if revs:
992 if revs:
993 revs = [other.lookup(rev) for rev in revs]
993 revs = [other.lookup(rev) for rev in revs]
994 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
994 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
995 revs, opts["bundle"], opts["force"])
995 revs, opts["bundle"], opts["force"])
996 try:
996 try:
997 if not chlist:
997 if not chlist:
998 ui.status(_("no changes found\n"))
998 ui.status(_("no changes found\n"))
999 return subreporecurse()
999 return subreporecurse()
1000 ui.pager('incoming')
1000 ui.pager('incoming')
1001 displayer = logcmdutil.changesetdisplayer(ui, other, opts,
1001 displayer = logcmdutil.changesetdisplayer(ui, other, opts,
1002 buffered=buffered)
1002 buffered=buffered)
1003 displaychlist(other, chlist, displayer)
1003 displaychlist(other, chlist, displayer)
1004 displayer.close()
1004 displayer.close()
1005 finally:
1005 finally:
1006 cleanupfn()
1006 cleanupfn()
1007 subreporecurse()
1007 subreporecurse()
1008 return 0 # exit code is zero since we found incoming changes
1008 return 0 # exit code is zero since we found incoming changes
1009
1009
1010 def incoming(ui, repo, source, opts):
1010 def incoming(ui, repo, source, opts):
1011 def subreporecurse():
1011 def subreporecurse():
1012 ret = 1
1012 ret = 1
1013 if opts.get('subrepos'):
1013 if opts.get('subrepos'):
1014 ctx = repo[None]
1014 ctx = repo[None]
1015 for subpath in sorted(ctx.substate):
1015 for subpath in sorted(ctx.substate):
1016 sub = ctx.sub(subpath)
1016 sub = ctx.sub(subpath)
1017 ret = min(ret, sub.incoming(ui, source, opts))
1017 ret = min(ret, sub.incoming(ui, source, opts))
1018 return ret
1018 return ret
1019
1019
1020 def display(other, chlist, displayer):
1020 def display(other, chlist, displayer):
1021 limit = logcmdutil.getlimit(opts)
1021 limit = logcmdutil.getlimit(opts)
1022 if opts.get('newest_first'):
1022 if opts.get('newest_first'):
1023 chlist.reverse()
1023 chlist.reverse()
1024 count = 0
1024 count = 0
1025 for n in chlist:
1025 for n in chlist:
1026 if limit is not None and count >= limit:
1026 if limit is not None and count >= limit:
1027 break
1027 break
1028 parents = [p for p in other.changelog.parents(n) if p != nullid]
1028 parents = [p for p in other.changelog.parents(n) if p != nullid]
1029 if opts.get('no_merges') and len(parents) == 2:
1029 if opts.get('no_merges') and len(parents) == 2:
1030 continue
1030 continue
1031 count += 1
1031 count += 1
1032 displayer.show(other[n])
1032 displayer.show(other[n])
1033 return _incoming(display, subreporecurse, ui, repo, source, opts)
1033 return _incoming(display, subreporecurse, ui, repo, source, opts)
1034
1034
1035 def _outgoing(ui, repo, dest, opts):
1035 def _outgoing(ui, repo, dest, opts):
1036 path = ui.paths.getpath(dest, default=('default-push', 'default'))
1036 path = ui.paths.getpath(dest, default=('default-push', 'default'))
1037 if not path:
1037 if not path:
1038 raise error.Abort(_('default repository not configured!'),
1038 raise error.Abort(_('default repository not configured!'),
1039 hint=_("see 'hg help config.paths'"))
1039 hint=_("see 'hg help config.paths'"))
1040 dest = path.pushloc or path.loc
1040 dest = path.pushloc or path.loc
1041 branches = path.branch, opts.get('branch') or []
1041 branches = path.branch, opts.get('branch') or []
1042
1042
1043 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1043 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1044 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
1044 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
1045 if revs:
1045 if revs:
1046 revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
1046 revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
1047
1047
1048 other = peer(repo, opts, dest)
1048 other = peer(repo, opts, dest)
1049 outgoing = discovery.findcommonoutgoing(repo, other, revs,
1049 outgoing = discovery.findcommonoutgoing(repo, other, revs,
1050 force=opts.get('force'))
1050 force=opts.get('force'))
1051 o = outgoing.missing
1051 o = outgoing.missing
1052 if not o:
1052 if not o:
1053 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
1053 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
1054 return o, other
1054 return o, other
1055
1055
1056 def outgoing(ui, repo, dest, opts):
1056 def outgoing(ui, repo, dest, opts):
1057 def recurse():
1057 def recurse():
1058 ret = 1
1058 ret = 1
1059 if opts.get('subrepos'):
1059 if opts.get('subrepos'):
1060 ctx = repo[None]
1060 ctx = repo[None]
1061 for subpath in sorted(ctx.substate):
1061 for subpath in sorted(ctx.substate):
1062 sub = ctx.sub(subpath)
1062 sub = ctx.sub(subpath)
1063 ret = min(ret, sub.outgoing(ui, dest, opts))
1063 ret = min(ret, sub.outgoing(ui, dest, opts))
1064 return ret
1064 return ret
1065
1065
1066 limit = logcmdutil.getlimit(opts)
1066 limit = logcmdutil.getlimit(opts)
1067 o, other = _outgoing(ui, repo, dest, opts)
1067 o, other = _outgoing(ui, repo, dest, opts)
1068 if not o:
1068 if not o:
1069 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1069 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1070 return recurse()
1070 return recurse()
1071
1071
1072 if opts.get('newest_first'):
1072 if opts.get('newest_first'):
1073 o.reverse()
1073 o.reverse()
1074 ui.pager('outgoing')
1074 ui.pager('outgoing')
1075 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
1075 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
1076 count = 0
1076 count = 0
1077 for n in o:
1077 for n in o:
1078 if limit is not None and count >= limit:
1078 if limit is not None and count >= limit:
1079 break
1079 break
1080 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1080 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1081 if opts.get('no_merges') and len(parents) == 2:
1081 if opts.get('no_merges') and len(parents) == 2:
1082 continue
1082 continue
1083 count += 1
1083 count += 1
1084 displayer.show(repo[n])
1084 displayer.show(repo[n])
1085 displayer.close()
1085 displayer.close()
1086 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1086 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1087 recurse()
1087 recurse()
1088 return 0 # exit code is zero since we found outgoing changes
1088 return 0 # exit code is zero since we found outgoing changes
1089
1089
1090 def verify(repo):
1090 def verify(repo):
1091 """verify the consistency of a repository"""
1091 """verify the consistency of a repository"""
1092 ret = verifymod.verify(repo)
1092 ret = verifymod.verify(repo)
1093
1093
1094 # Broken subrepo references in hidden csets don't seem worth worrying about,
1094 # Broken subrepo references in hidden csets don't seem worth worrying about,
1095 # since they can't be pushed/pulled, and --hidden can be used if they are a
1095 # since they can't be pushed/pulled, and --hidden can be used if they are a
1096 # concern.
1096 # concern.
1097
1097
1098 # pathto() is needed for -R case
1098 # pathto() is needed for -R case
1099 revs = repo.revs("filelog(%s)",
1099 revs = repo.revs("filelog(%s)",
1100 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
1100 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
1101
1101
1102 if revs:
1102 if revs:
1103 repo.ui.status(_('checking subrepo links\n'))
1103 repo.ui.status(_('checking subrepo links\n'))
1104 for rev in revs:
1104 for rev in revs:
1105 ctx = repo[rev]
1105 ctx = repo[rev]
1106 try:
1106 try:
1107 for subpath in ctx.substate:
1107 for subpath in ctx.substate:
1108 try:
1108 try:
1109 ret = (ctx.sub(subpath, allowcreate=False).verify()
1109 ret = (ctx.sub(subpath, allowcreate=False).verify()
1110 or ret)
1110 or ret)
1111 except error.RepoError as e:
1111 except error.RepoError as e:
1112 repo.ui.warn(('%d: %s\n') % (rev, e))
1112 repo.ui.warn(('%d: %s\n') % (rev, e))
1113 except Exception:
1113 except Exception:
1114 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
1114 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
1115 node.short(ctx.node()))
1115 node.short(ctx.node()))
1116
1116
1117 return ret
1117 return ret
1118
1118
1119 def remoteui(src, opts):
1119 def remoteui(src, opts):
1120 'build a remote ui from ui or repo and opts'
1120 'build a remote ui from ui or repo and opts'
1121 if util.safehasattr(src, 'baseui'): # looks like a repository
1121 if util.safehasattr(src, 'baseui'): # looks like a repository
1122 dst = src.baseui.copy() # drop repo-specific config
1122 dst = src.baseui.copy() # drop repo-specific config
1123 src = src.ui # copy target options from repo
1123 src = src.ui # copy target options from repo
1124 else: # assume it's a global ui object
1124 else: # assume it's a global ui object
1125 dst = src.copy() # keep all global options
1125 dst = src.copy() # keep all global options
1126
1126
1127 # copy ssh-specific options
1127 # copy ssh-specific options
1128 for o in 'ssh', 'remotecmd':
1128 for o in 'ssh', 'remotecmd':
1129 v = opts.get(o) or src.config('ui', o)
1129 v = opts.get(o) or src.config('ui', o)
1130 if v:
1130 if v:
1131 dst.setconfig("ui", o, v, 'copied')
1131 dst.setconfig("ui", o, v, 'copied')
1132
1132
1133 # copy bundle-specific options
1133 # copy bundle-specific options
1134 r = src.config('bundle', 'mainreporoot')
1134 r = src.config('bundle', 'mainreporoot')
1135 if r:
1135 if r:
1136 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
1136 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
1137
1137
1138 # copy selected local settings to the remote ui
1138 # copy selected local settings to the remote ui
1139 for sect in ('auth', 'hostfingerprints', 'hostsecurity', 'http_proxy'):
1139 for sect in ('auth', 'hostfingerprints', 'hostsecurity', 'http_proxy'):
1140 for key, val in src.configitems(sect):
1140 for key, val in src.configitems(sect):
1141 dst.setconfig(sect, key, val, 'copied')
1141 dst.setconfig(sect, key, val, 'copied')
1142 v = src.config('web', 'cacerts')
1142 v = src.config('web', 'cacerts')
1143 if v:
1143 if v:
1144 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
1144 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
1145
1145
1146 return dst
1146 return dst
1147
1147
1148 # Files of interest
1148 # Files of interest
1149 # Used to check if the repository has changed looking at mtime and size of
1149 # Used to check if the repository has changed looking at mtime and size of
1150 # these files.
1150 # these files.
1151 foi = [('spath', '00changelog.i'),
1151 foi = [('spath', '00changelog.i'),
1152 ('spath', 'phaseroots'), # ! phase can change content at the same size
1152 ('spath', 'phaseroots'), # ! phase can change content at the same size
1153 ('spath', 'obsstore'),
1153 ('spath', 'obsstore'),
1154 ('path', 'bookmarks'), # ! bookmark can change content at the same size
1154 ('path', 'bookmarks'), # ! bookmark can change content at the same size
1155 ]
1155 ]
1156
1156
1157 class cachedlocalrepo(object):
1157 class cachedlocalrepo(object):
1158 """Holds a localrepository that can be cached and reused."""
1158 """Holds a localrepository that can be cached and reused."""
1159
1159
1160 def __init__(self, repo):
1160 def __init__(self, repo):
1161 """Create a new cached repo from an existing repo.
1161 """Create a new cached repo from an existing repo.
1162
1162
1163 We assume the passed in repo was recently created. If the
1163 We assume the passed in repo was recently created. If the
1164 repo has changed between when it was created and when it was
1164 repo has changed between when it was created and when it was
1165 turned into a cache, it may not refresh properly.
1165 turned into a cache, it may not refresh properly.
1166 """
1166 """
1167 assert isinstance(repo, localrepo.localrepository)
1167 assert isinstance(repo, localrepo.localrepository)
1168 self._repo = repo
1168 self._repo = repo
1169 self._state, self.mtime = self._repostate()
1169 self._state, self.mtime = self._repostate()
1170 self._filtername = repo.filtername
1170 self._filtername = repo.filtername
1171
1171
1172 def fetch(self):
1172 def fetch(self):
1173 """Refresh (if necessary) and return a repository.
1173 """Refresh (if necessary) and return a repository.
1174
1174
1175 If the cached instance is out of date, it will be recreated
1175 If the cached instance is out of date, it will be recreated
1176 automatically and returned.
1176 automatically and returned.
1177
1177
1178 Returns a tuple of the repo and a boolean indicating whether a new
1178 Returns a tuple of the repo and a boolean indicating whether a new
1179 repo instance was created.
1179 repo instance was created.
1180 """
1180 """
1181 # We compare the mtimes and sizes of some well-known files to
1181 # We compare the mtimes and sizes of some well-known files to
1182 # determine if the repo changed. This is not precise, as mtimes
1182 # determine if the repo changed. This is not precise, as mtimes
1183 # are susceptible to clock skew and imprecise filesystems and
1183 # are susceptible to clock skew and imprecise filesystems and
1184 # file content can change while maintaining the same size.
1184 # file content can change while maintaining the same size.
1185
1185
1186 state, mtime = self._repostate()
1186 state, mtime = self._repostate()
1187 if state == self._state:
1187 if state == self._state:
1188 return self._repo, False
1188 return self._repo, False
1189
1189
1190 repo = repository(self._repo.baseui, self._repo.url())
1190 repo = repository(self._repo.baseui, self._repo.url())
1191 if self._filtername:
1191 if self._filtername:
1192 self._repo = repo.filtered(self._filtername)
1192 self._repo = repo.filtered(self._filtername)
1193 else:
1193 else:
1194 self._repo = repo.unfiltered()
1194 self._repo = repo.unfiltered()
1195 self._state = state
1195 self._state = state
1196 self.mtime = mtime
1196 self.mtime = mtime
1197
1197
1198 return self._repo, True
1198 return self._repo, True
1199
1199
1200 def _repostate(self):
1200 def _repostate(self):
1201 state = []
1201 state = []
1202 maxmtime = -1
1202 maxmtime = -1
1203 for attr, fname in foi:
1203 for attr, fname in foi:
1204 prefix = getattr(self._repo, attr)
1204 prefix = getattr(self._repo, attr)
1205 p = os.path.join(prefix, fname)
1205 p = os.path.join(prefix, fname)
1206 try:
1206 try:
1207 st = os.stat(p)
1207 st = os.stat(p)
1208 except OSError:
1208 except OSError:
1209 st = os.stat(prefix)
1209 st = os.stat(prefix)
1210 state.append((st[stat.ST_MTIME], st.st_size))
1210 state.append((st[stat.ST_MTIME], st.st_size))
1211 maxmtime = max(maxmtime, st[stat.ST_MTIME])
1211 maxmtime = max(maxmtime, st[stat.ST_MTIME])
1212
1212
1213 return tuple(state), maxmtime
1213 return tuple(state), maxmtime
1214
1214
1215 def copy(self):
1215 def copy(self):
1216 """Obtain a copy of this class instance.
1216 """Obtain a copy of this class instance.
1217
1217
1218 A new localrepository instance is obtained. The new instance should be
1218 A new localrepository instance is obtained. The new instance should be
1219 completely independent of the original.
1219 completely independent of the original.
1220 """
1220 """
1221 repo = repository(self._repo.baseui, self._repo.origroot)
1221 repo = repository(self._repo.baseui, self._repo.origroot)
1222 if self._filtername:
1222 if self._filtername:
1223 repo = repo.filtered(self._filtername)
1223 repo = repo.filtered(self._filtername)
1224 else:
1224 else:
1225 repo = repo.unfiltered()
1225 repo = repo.unfiltered()
1226 c = cachedlocalrepo(repo)
1226 c = cachedlocalrepo(repo)
1227 c._state = self._state
1227 c._state = self._state
1228 c.mtime = self.mtime
1228 c.mtime = self.mtime
1229 return c
1229 return c
@@ -1,453 +1,454
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11
11
12 from .i18n import _
12 from .i18n import _
13 from .node import (
13 from .node import (
14 nullid,
14 nullid,
15 short,
15 short,
16 )
16 )
17
17
18 from . import (
18 from . import (
19 error,
19 error,
20 pycompat,
20 pycompat,
21 revlog,
21 revlog,
22 util,
22 util,
23 )
23 )
24
24
25 def verify(repo):
25 def verify(repo):
26 with repo.lock():
26 with repo.lock():
27 return verifier(repo).verify()
27 return verifier(repo).verify()
28
28
29 def _normpath(f):
29 def _normpath(f):
30 # under hg < 2.4, convert didn't sanitize paths properly, so a
30 # under hg < 2.4, convert didn't sanitize paths properly, so a
31 # converted repo may contain repeated slashes
31 # converted repo may contain repeated slashes
32 while '//' in f:
32 while '//' in f:
33 f = f.replace('//', '/')
33 f = f.replace('//', '/')
34 return f
34 return f
35
35
36 class verifier(object):
36 class verifier(object):
37 def __init__(self, repo):
37 def __init__(self, repo):
38 self.repo = repo.unfiltered()
38 self.repo = repo.unfiltered()
39 self.ui = repo.ui
39 self.ui = repo.ui
40 self.match = repo.narrowmatch()
40 self.match = repo.narrowmatch()
41 self.badrevs = set()
41 self.badrevs = set()
42 self.errors = 0
42 self.errors = 0
43 self.warnings = 0
43 self.warnings = 0
44 self.havecl = len(repo.changelog) > 0
44 self.havecl = len(repo.changelog) > 0
45 self.havemf = len(repo.manifestlog.getstorage(b'')) > 0
45 self.havemf = len(repo.manifestlog.getstorage(b'')) > 0
46 self.revlogv1 = repo.changelog.version != revlog.REVLOGV0
46 self.revlogv1 = repo.changelog.version != revlog.REVLOGV0
47 self.lrugetctx = util.lrucachefunc(repo.__getitem__)
47 self.lrugetctx = util.lrucachefunc(repo.__getitem__)
48 self.refersmf = False
48 self.refersmf = False
49 self.fncachewarned = False
49 self.fncachewarned = False
50 # developer config: verify.skipflags
50 # developer config: verify.skipflags
51 self.skipflags = repo.ui.configint('verify', 'skipflags')
51 self.skipflags = repo.ui.configint('verify', 'skipflags')
52 self.warnorphanstorefiles = True
52 self.warnorphanstorefiles = True
53
53
54 def warn(self, msg):
54 def warn(self, msg):
55 self.ui.warn(msg + "\n")
55 self.ui.warn(msg + "\n")
56 self.warnings += 1
56 self.warnings += 1
57
57
58 def err(self, linkrev, msg, filename=None):
58 def err(self, linkrev, msg, filename=None):
59 if linkrev is not None:
59 if linkrev is not None:
60 self.badrevs.add(linkrev)
60 self.badrevs.add(linkrev)
61 linkrev = "%d" % linkrev
61 linkrev = "%d" % linkrev
62 else:
62 else:
63 linkrev = '?'
63 linkrev = '?'
64 msg = "%s: %s" % (linkrev, msg)
64 msg = "%s: %s" % (linkrev, msg)
65 if filename:
65 if filename:
66 msg = "%s@%s" % (filename, msg)
66 msg = "%s@%s" % (filename, msg)
67 self.ui.warn(" " + msg + "\n")
67 self.ui.warn(" " + msg + "\n")
68 self.errors += 1
68 self.errors += 1
69
69
70 def exc(self, linkrev, msg, inst, filename=None):
70 def exc(self, linkrev, msg, inst, filename=None):
71 fmsg = pycompat.bytestr(inst)
71 fmsg = pycompat.bytestr(inst)
72 if not fmsg:
72 if not fmsg:
73 fmsg = pycompat.byterepr(inst)
73 fmsg = pycompat.byterepr(inst)
74 self.err(linkrev, "%s: %s" % (msg, fmsg), filename)
74 self.err(linkrev, "%s: %s" % (msg, fmsg), filename)
75
75
76 def checklog(self, obj, name, linkrev):
76 def checklog(self, obj, name, linkrev):
77 if not len(obj) and (self.havecl or self.havemf):
77 if not len(obj) and (self.havecl or self.havemf):
78 self.err(linkrev, _("empty or missing %s") % name)
78 self.err(linkrev, _("empty or missing %s") % name)
79 return
79 return
80
80
81 d = obj.checksize()
81 d = obj.checksize()
82 if d[0]:
82 if d[0]:
83 self.err(None, _("data length off by %d bytes") % d[0], name)
83 self.err(None, _("data length off by %d bytes") % d[0], name)
84 if d[1]:
84 if d[1]:
85 self.err(None, _("index contains %d extra bytes") % d[1], name)
85 self.err(None, _("index contains %d extra bytes") % d[1], name)
86
86
87 if obj.version != revlog.REVLOGV0:
87 if obj.version != revlog.REVLOGV0:
88 if not self.revlogv1:
88 if not self.revlogv1:
89 self.warn(_("warning: `%s' uses revlog format 1") % name)
89 self.warn(_("warning: `%s' uses revlog format 1") % name)
90 elif self.revlogv1:
90 elif self.revlogv1:
91 self.warn(_("warning: `%s' uses revlog format 0") % name)
91 self.warn(_("warning: `%s' uses revlog format 0") % name)
92
92
93 def checkentry(self, obj, i, node, seen, linkrevs, f):
93 def checkentry(self, obj, i, node, seen, linkrevs, f):
94 lr = obj.linkrev(obj.rev(node))
94 lr = obj.linkrev(obj.rev(node))
95 if lr < 0 or (self.havecl and lr not in linkrevs):
95 if lr < 0 or (self.havecl and lr not in linkrevs):
96 if lr < 0 or lr >= len(self.repo.changelog):
96 if lr < 0 or lr >= len(self.repo.changelog):
97 msg = _("rev %d points to nonexistent changeset %d")
97 msg = _("rev %d points to nonexistent changeset %d")
98 else:
98 else:
99 msg = _("rev %d points to unexpected changeset %d")
99 msg = _("rev %d points to unexpected changeset %d")
100 self.err(None, msg % (i, lr), f)
100 self.err(None, msg % (i, lr), f)
101 if linkrevs:
101 if linkrevs:
102 if f and len(linkrevs) > 1:
102 if f and len(linkrevs) > 1:
103 try:
103 try:
104 # attempt to filter down to real linkrevs
104 # attempt to filter down to real linkrevs
105 linkrevs = [l for l in linkrevs
105 linkrevs = [l for l in linkrevs
106 if self.lrugetctx(l)[f].filenode() == node]
106 if self.lrugetctx(l)[f].filenode() == node]
107 except Exception:
107 except Exception:
108 pass
108 pass
109 self.warn(_(" (expected %s)") % " ".join
109 self.warn(_(" (expected %s)") % " ".join
110 (map(pycompat.bytestr, linkrevs)))
110 (map(pycompat.bytestr, linkrevs)))
111 lr = None # can't be trusted
111 lr = None # can't be trusted
112
112
113 try:
113 try:
114 p1, p2 = obj.parents(node)
114 p1, p2 = obj.parents(node)
115 if p1 not in seen and p1 != nullid:
115 if p1 not in seen and p1 != nullid:
116 self.err(lr, _("unknown parent 1 %s of %s") %
116 self.err(lr, _("unknown parent 1 %s of %s") %
117 (short(p1), short(node)), f)
117 (short(p1), short(node)), f)
118 if p2 not in seen and p2 != nullid:
118 if p2 not in seen and p2 != nullid:
119 self.err(lr, _("unknown parent 2 %s of %s") %
119 self.err(lr, _("unknown parent 2 %s of %s") %
120 (short(p2), short(node)), f)
120 (short(p2), short(node)), f)
121 except Exception as inst:
121 except Exception as inst:
122 self.exc(lr, _("checking parents of %s") % short(node), inst, f)
122 self.exc(lr, _("checking parents of %s") % short(node), inst, f)
123
123
124 if node in seen:
124 if node in seen:
125 self.err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
125 self.err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
126 seen[node] = i
126 seen[node] = i
127 return lr
127 return lr
128
128
129 def verify(self):
129 def verify(self):
130 repo = self.repo
130 repo = self.repo
131
131
132 ui = repo.ui
132 ui = repo.ui
133
133
134 if not repo.url().startswith('file:'):
134 if not repo.url().startswith('file:'):
135 raise error.Abort(_("cannot verify bundle or remote repos"))
135 raise error.Abort(_("cannot verify bundle or remote repos"))
136
136
137 if os.path.exists(repo.sjoin("journal")):
137 if os.path.exists(repo.sjoin("journal")):
138 ui.warn(_("abandoned transaction found - run hg recover\n"))
138 ui.warn(_("abandoned transaction found - run hg recover\n"))
139
139
140 if ui.verbose or not self.revlogv1:
140 if ui.verbose or not self.revlogv1:
141 ui.status(_("repository uses revlog format %d\n") %
141 ui.status(_("repository uses revlog format %d\n") %
142 (self.revlogv1 and 1 or 0))
142 (self.revlogv1 and 1 or 0))
143
143
144 mflinkrevs, filelinkrevs = self._verifychangelog()
144 mflinkrevs, filelinkrevs = self._verifychangelog()
145
145
146 filenodes = self._verifymanifest(mflinkrevs)
146 filenodes = self._verifymanifest(mflinkrevs)
147 del mflinkrevs
147 del mflinkrevs
148
148
149 self._crosscheckfiles(filelinkrevs, filenodes)
149 self._crosscheckfiles(filelinkrevs, filenodes)
150
150
151 totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs)
151 totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs)
152
152
153 ui.status(_("checked %d changesets with %d changes to %d files\n") %
153 ui.status(_("checked %d changesets with %d changes to %d files\n") %
154 (len(repo.changelog), filerevisions, totalfiles))
154 (len(repo.changelog), filerevisions, totalfiles))
155 if self.warnings:
155 if self.warnings:
156 ui.warn(_("%d warnings encountered!\n") % self.warnings)
156 ui.warn(_("%d warnings encountered!\n") % self.warnings)
157 if self.fncachewarned:
157 if self.fncachewarned:
158 ui.warn(_('hint: run "hg debugrebuildfncache" to recover from '
158 ui.warn(_('hint: run "hg debugrebuildfncache" to recover from '
159 'corrupt fncache\n'))
159 'corrupt fncache\n'))
160 if self.errors:
160 if self.errors:
161 ui.warn(_("%d integrity errors encountered!\n") % self.errors)
161 ui.warn(_("%d integrity errors encountered!\n") % self.errors)
162 if self.badrevs:
162 if self.badrevs:
163 ui.warn(_("(first damaged changeset appears to be %d)\n")
163 ui.warn(_("(first damaged changeset appears to be %d)\n")
164 % min(self.badrevs))
164 % min(self.badrevs))
165 return 1
165 return 1
166
166
167 def _verifychangelog(self):
167 def _verifychangelog(self):
168 ui = self.ui
168 ui = self.ui
169 repo = self.repo
169 repo = self.repo
170 match = self.match
170 match = self.match
171 cl = repo.changelog
171 cl = repo.changelog
172
172
173 ui.status(_("checking changesets\n"))
173 ui.status(_("checking changesets\n"))
174 mflinkrevs = {}
174 mflinkrevs = {}
175 filelinkrevs = {}
175 filelinkrevs = {}
176 seen = {}
176 seen = {}
177 self.checklog(cl, "changelog", 0)
177 self.checklog(cl, "changelog", 0)
178 progress = ui.makeprogress(_('checking'), unit=_('changesets'),
178 progress = ui.makeprogress(_('checking'), unit=_('changesets'),
179 total=len(repo))
179 total=len(repo))
180 for i in repo:
180 for i in repo:
181 progress.update(i)
181 progress.update(i)
182 n = cl.node(i)
182 n = cl.node(i)
183 self.checkentry(cl, i, n, seen, [i], "changelog")
183 self.checkentry(cl, i, n, seen, [i], "changelog")
184
184
185 try:
185 try:
186 changes = cl.read(n)
186 changes = cl.read(n)
187 if changes[0] != nullid:
187 if changes[0] != nullid:
188 mflinkrevs.setdefault(changes[0], []).append(i)
188 mflinkrevs.setdefault(changes[0], []).append(i)
189 self.refersmf = True
189 self.refersmf = True
190 for f in changes[3]:
190 for f in changes[3]:
191 if match(f):
191 if match(f):
192 filelinkrevs.setdefault(_normpath(f), []).append(i)
192 filelinkrevs.setdefault(_normpath(f), []).append(i)
193 except Exception as inst:
193 except Exception as inst:
194 self.refersmf = True
194 self.refersmf = True
195 self.exc(i, _("unpacking changeset %s") % short(n), inst)
195 self.exc(i, _("unpacking changeset %s") % short(n), inst)
196 progress.complete()
196 progress.complete()
197 return mflinkrevs, filelinkrevs
197 return mflinkrevs, filelinkrevs
198
198
199 def _verifymanifest(self, mflinkrevs, dir="", storefiles=None,
199 def _verifymanifest(self, mflinkrevs, dir="", storefiles=None,
200 subdirprogress=None):
200 subdirprogress=None):
201 repo = self.repo
201 repo = self.repo
202 ui = self.ui
202 ui = self.ui
203 match = self.match
203 match = self.match
204 mfl = self.repo.manifestlog
204 mfl = self.repo.manifestlog
205 mf = mfl.getstorage(dir)
205 mf = mfl.getstorage(dir)
206
206
207 if not dir:
207 if not dir:
208 self.ui.status(_("checking manifests\n"))
208 self.ui.status(_("checking manifests\n"))
209
209
210 filenodes = {}
210 filenodes = {}
211 subdirnodes = {}
211 subdirnodes = {}
212 seen = {}
212 seen = {}
213 label = "manifest"
213 label = "manifest"
214 if dir:
214 if dir:
215 label = dir
215 label = dir
216 revlogfiles = mf.files()
216 revlogfiles = mf.files()
217 storefiles.difference_update(revlogfiles)
217 storefiles.difference_update(revlogfiles)
218 if subdirprogress: # should be true since we're in a subdirectory
218 if subdirprogress: # should be true since we're in a subdirectory
219 subdirprogress.increment()
219 subdirprogress.increment()
220 if self.refersmf:
220 if self.refersmf:
221 # Do not check manifest if there are only changelog entries with
221 # Do not check manifest if there are only changelog entries with
222 # null manifests.
222 # null manifests.
223 self.checklog(mf, label, 0)
223 self.checklog(mf, label, 0)
224 progress = ui.makeprogress(_('checking'), unit=_('manifests'),
224 progress = ui.makeprogress(_('checking'), unit=_('manifests'),
225 total=len(mf))
225 total=len(mf))
226 for i in mf:
226 for i in mf:
227 if not dir:
227 if not dir:
228 progress.update(i)
228 progress.update(i)
229 n = mf.node(i)
229 n = mf.node(i)
230 lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []), label)
230 lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []), label)
231 if n in mflinkrevs:
231 if n in mflinkrevs:
232 del mflinkrevs[n]
232 del mflinkrevs[n]
233 elif dir:
233 elif dir:
234 self.err(lr, _("%s not in parent-directory manifest") %
234 self.err(lr, _("%s not in parent-directory manifest") %
235 short(n), label)
235 short(n), label)
236 else:
236 else:
237 self.err(lr, _("%s not in changesets") % short(n), label)
237 self.err(lr, _("%s not in changesets") % short(n), label)
238
238
239 try:
239 try:
240 mfdelta = mfl.get(dir, n).readdelta(shallow=True)
240 mfdelta = mfl.get(dir, n).readdelta(shallow=True)
241 for f, fn, fl in mfdelta.iterentries():
241 for f, fn, fl in mfdelta.iterentries():
242 if not f:
242 if not f:
243 self.err(lr, _("entry without name in manifest"))
243 self.err(lr, _("entry without name in manifest"))
244 elif f == "/dev/null": # ignore this in very old repos
244 elif f == "/dev/null": # ignore this in very old repos
245 continue
245 continue
246 fullpath = dir + _normpath(f)
246 fullpath = dir + _normpath(f)
247 if fl == 't':
247 if fl == 't':
248 if not match.visitdir(fullpath):
248 if not match.visitdir(fullpath):
249 continue
249 continue
250 subdirnodes.setdefault(fullpath + '/', {}).setdefault(
250 subdirnodes.setdefault(fullpath + '/', {}).setdefault(
251 fn, []).append(lr)
251 fn, []).append(lr)
252 else:
252 else:
253 if not match(fullpath):
253 if not match(fullpath):
254 continue
254 continue
255 filenodes.setdefault(fullpath, {}).setdefault(fn, lr)
255 filenodes.setdefault(fullpath, {}).setdefault(fn, lr)
256 except Exception as inst:
256 except Exception as inst:
257 self.exc(lr, _("reading delta %s") % short(n), inst, label)
257 self.exc(lr, _("reading delta %s") % short(n), inst, label)
258 if not dir:
258 if not dir:
259 progress.complete()
259 progress.complete()
260
260
261 if self.havemf:
261 if self.havemf:
262 for c, m in sorted([(c, m) for m in mflinkrevs
262 for c, m in sorted([(c, m) for m in mflinkrevs
263 for c in mflinkrevs[m]]):
263 for c in mflinkrevs[m]]):
264 if dir:
264 if dir:
265 self.err(c, _("parent-directory manifest refers to unknown "
265 self.err(c, _("parent-directory manifest refers to unknown "
266 "revision %s") % short(m), label)
266 "revision %s") % short(m), label)
267 else:
267 else:
268 self.err(c, _("changeset refers to unknown revision %s") %
268 self.err(c, _("changeset refers to unknown revision %s") %
269 short(m), label)
269 short(m), label)
270
270
271 if not dir and subdirnodes:
271 if not dir and subdirnodes:
272 self.ui.status(_("checking directory manifests\n"))
272 self.ui.status(_("checking directory manifests\n"))
273 storefiles = set()
273 storefiles = set()
274 subdirs = set()
274 subdirs = set()
275 revlogv1 = self.revlogv1
275 revlogv1 = self.revlogv1
276 for f, f2, size in repo.store.datafiles():
276 for f, f2, size in repo.store.datafiles():
277 if not f:
277 if not f:
278 self.err(None, _("cannot decode filename '%s'") % f2)
278 self.err(None, _("cannot decode filename '%s'") % f2)
279 elif (size > 0 or not revlogv1) and f.startswith('meta/'):
279 elif (size > 0 or not revlogv1) and f.startswith('meta/'):
280 storefiles.add(_normpath(f))
280 storefiles.add(_normpath(f))
281 subdirs.add(os.path.dirname(f))
281 subdirs.add(os.path.dirname(f))
282 subdirprogress = ui.makeprogress(_('checking'), unit=_('manifests'),
282 subdirprogress = ui.makeprogress(_('checking'), unit=_('manifests'),
283 total=len(subdirs))
283 total=len(subdirs))
284
284
285 for subdir, linkrevs in subdirnodes.iteritems():
285 for subdir, linkrevs in subdirnodes.iteritems():
286 subdirfilenodes = self._verifymanifest(linkrevs, subdir, storefiles,
286 subdirfilenodes = self._verifymanifest(linkrevs, subdir, storefiles,
287 subdirprogress)
287 subdirprogress)
288 for f, onefilenodes in subdirfilenodes.iteritems():
288 for f, onefilenodes in subdirfilenodes.iteritems():
289 filenodes.setdefault(f, {}).update(onefilenodes)
289 filenodes.setdefault(f, {}).update(onefilenodes)
290
290
291 if not dir and subdirnodes:
291 if not dir and subdirnodes:
292 subdirprogress.complete()
292 subdirprogress.complete()
293 if self.warnorphanstorefiles:
293 if self.warnorphanstorefiles:
294 for f in sorted(storefiles):
294 for f in sorted(storefiles):
295 self.warn(_("warning: orphan data file '%s'") % f)
295 self.warn(_("warning: orphan data file '%s'") % f)
296
296
297 return filenodes
297 return filenodes
298
298
299 def _crosscheckfiles(self, filelinkrevs, filenodes):
299 def _crosscheckfiles(self, filelinkrevs, filenodes):
300 repo = self.repo
300 repo = self.repo
301 ui = self.ui
301 ui = self.ui
302 ui.status(_("crosschecking files in changesets and manifests\n"))
302 ui.status(_("crosschecking files in changesets and manifests\n"))
303
303
304 total = len(filelinkrevs) + len(filenodes)
304 total = len(filelinkrevs) + len(filenodes)
305 progress = ui.makeprogress(_('crosschecking'), total=total)
305 progress = ui.makeprogress(_('crosschecking'), unit=_('files'),
306 total=total)
306 if self.havemf:
307 if self.havemf:
307 for f in sorted(filelinkrevs):
308 for f in sorted(filelinkrevs):
308 progress.increment()
309 progress.increment()
309 if f not in filenodes:
310 if f not in filenodes:
310 lr = filelinkrevs[f][0]
311 lr = filelinkrevs[f][0]
311 self.err(lr, _("in changeset but not in manifest"), f)
312 self.err(lr, _("in changeset but not in manifest"), f)
312
313
313 if self.havecl:
314 if self.havecl:
314 for f in sorted(filenodes):
315 for f in sorted(filenodes):
315 progress.increment()
316 progress.increment()
316 if f not in filelinkrevs:
317 if f not in filelinkrevs:
317 try:
318 try:
318 fl = repo.file(f)
319 fl = repo.file(f)
319 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
320 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
320 except Exception:
321 except Exception:
321 lr = None
322 lr = None
322 self.err(lr, _("in manifest but not in changeset"), f)
323 self.err(lr, _("in manifest but not in changeset"), f)
323
324
324 progress.complete()
325 progress.complete()
325
326
326 def _verifyfiles(self, filenodes, filelinkrevs):
327 def _verifyfiles(self, filenodes, filelinkrevs):
327 repo = self.repo
328 repo = self.repo
328 ui = self.ui
329 ui = self.ui
329 lrugetctx = self.lrugetctx
330 lrugetctx = self.lrugetctx
330 revlogv1 = self.revlogv1
331 revlogv1 = self.revlogv1
331 havemf = self.havemf
332 havemf = self.havemf
332 ui.status(_("checking files\n"))
333 ui.status(_("checking files\n"))
333
334
334 storefiles = set()
335 storefiles = set()
335 for f, f2, size in repo.store.datafiles():
336 for f, f2, size in repo.store.datafiles():
336 if not f:
337 if not f:
337 self.err(None, _("cannot decode filename '%s'") % f2)
338 self.err(None, _("cannot decode filename '%s'") % f2)
338 elif (size > 0 or not revlogv1) and f.startswith('data/'):
339 elif (size > 0 or not revlogv1) and f.startswith('data/'):
339 storefiles.add(_normpath(f))
340 storefiles.add(_normpath(f))
340
341
341 state = {
342 state = {
342 # TODO this assumes revlog storage for changelog.
343 # TODO this assumes revlog storage for changelog.
343 'expectedversion': self.repo.changelog.version & 0xFFFF,
344 'expectedversion': self.repo.changelog.version & 0xFFFF,
344 'skipflags': self.skipflags,
345 'skipflags': self.skipflags,
345 # experimental config: censor.policy
346 # experimental config: censor.policy
346 'erroroncensored': ui.config('censor', 'policy') == 'abort',
347 'erroroncensored': ui.config('censor', 'policy') == 'abort',
347 }
348 }
348
349
349 files = sorted(set(filenodes) | set(filelinkrevs))
350 files = sorted(set(filenodes) | set(filelinkrevs))
350 revisions = 0
351 revisions = 0
351 progress = ui.makeprogress(_('checking'), unit=_('files'),
352 progress = ui.makeprogress(_('checking'), unit=_('files'),
352 total=len(files))
353 total=len(files))
353 for i, f in enumerate(files):
354 for i, f in enumerate(files):
354 progress.update(i, item=f)
355 progress.update(i, item=f)
355 try:
356 try:
356 linkrevs = filelinkrevs[f]
357 linkrevs = filelinkrevs[f]
357 except KeyError:
358 except KeyError:
358 # in manifest but not in changelog
359 # in manifest but not in changelog
359 linkrevs = []
360 linkrevs = []
360
361
361 if linkrevs:
362 if linkrevs:
362 lr = linkrevs[0]
363 lr = linkrevs[0]
363 else:
364 else:
364 lr = None
365 lr = None
365
366
366 try:
367 try:
367 fl = repo.file(f)
368 fl = repo.file(f)
368 except error.StorageError as e:
369 except error.StorageError as e:
369 self.err(lr, _("broken revlog! (%s)") % e, f)
370 self.err(lr, _("broken revlog! (%s)") % e, f)
370 continue
371 continue
371
372
372 for ff in fl.files():
373 for ff in fl.files():
373 try:
374 try:
374 storefiles.remove(ff)
375 storefiles.remove(ff)
375 except KeyError:
376 except KeyError:
376 if self.warnorphanstorefiles:
377 if self.warnorphanstorefiles:
377 self.warn(_(" warning: revlog '%s' not in fncache!") %
378 self.warn(_(" warning: revlog '%s' not in fncache!") %
378 ff)
379 ff)
379 self.fncachewarned = True
380 self.fncachewarned = True
380
381
381 if not len(fl) and (self.havecl or self.havemf):
382 if not len(fl) and (self.havecl or self.havemf):
382 self.err(lr, _("empty or missing %s") % f)
383 self.err(lr, _("empty or missing %s") % f)
383 else:
384 else:
384 # Guard against implementations not setting this.
385 # Guard against implementations not setting this.
385 state['skipread'] = set()
386 state['skipread'] = set()
386 for problem in fl.verifyintegrity(state):
387 for problem in fl.verifyintegrity(state):
387 if problem.node is not None:
388 if problem.node is not None:
388 linkrev = fl.linkrev(fl.rev(problem.node))
389 linkrev = fl.linkrev(fl.rev(problem.node))
389 else:
390 else:
390 linkrev = None
391 linkrev = None
391
392
392 if problem.warning:
393 if problem.warning:
393 self.warn(problem.warning)
394 self.warn(problem.warning)
394 elif problem.error:
395 elif problem.error:
395 self.err(linkrev if linkrev is not None else lr,
396 self.err(linkrev if linkrev is not None else lr,
396 problem.error, f)
397 problem.error, f)
397 else:
398 else:
398 raise error.ProgrammingError(
399 raise error.ProgrammingError(
399 'problem instance does not set warning or error '
400 'problem instance does not set warning or error '
400 'attribute: %s' % problem.msg)
401 'attribute: %s' % problem.msg)
401
402
402 seen = {}
403 seen = {}
403 for i in fl:
404 for i in fl:
404 revisions += 1
405 revisions += 1
405 n = fl.node(i)
406 n = fl.node(i)
406 lr = self.checkentry(fl, i, n, seen, linkrevs, f)
407 lr = self.checkentry(fl, i, n, seen, linkrevs, f)
407 if f in filenodes:
408 if f in filenodes:
408 if havemf and n not in filenodes[f]:
409 if havemf and n not in filenodes[f]:
409 self.err(lr, _("%s not in manifests") % (short(n)), f)
410 self.err(lr, _("%s not in manifests") % (short(n)), f)
410 else:
411 else:
411 del filenodes[f][n]
412 del filenodes[f][n]
412
413
413 if n in state['skipread']:
414 if n in state['skipread']:
414 continue
415 continue
415
416
416 # check renames
417 # check renames
417 try:
418 try:
418 # This requires resolving fulltext (at least on revlogs). We
419 # This requires resolving fulltext (at least on revlogs). We
419 # may want ``verifyintegrity()`` to pass a set of nodes with
420 # may want ``verifyintegrity()`` to pass a set of nodes with
420 # rename metadata as an optimization.
421 # rename metadata as an optimization.
421 rp = fl.renamed(n)
422 rp = fl.renamed(n)
422 if rp:
423 if rp:
423 if lr is not None and ui.verbose:
424 if lr is not None and ui.verbose:
424 ctx = lrugetctx(lr)
425 ctx = lrugetctx(lr)
425 if not any(rp[0] in pctx for pctx in ctx.parents()):
426 if not any(rp[0] in pctx for pctx in ctx.parents()):
426 self.warn(_("warning: copy source of '%s' not"
427 self.warn(_("warning: copy source of '%s' not"
427 " in parents of %s") % (f, ctx))
428 " in parents of %s") % (f, ctx))
428 fl2 = repo.file(rp[0])
429 fl2 = repo.file(rp[0])
429 if not len(fl2):
430 if not len(fl2):
430 self.err(lr, _("empty or missing copy source "
431 self.err(lr, _("empty or missing copy source "
431 "revlog %s:%s") % (rp[0], short(rp[1])), f)
432 "revlog %s:%s") % (rp[0], short(rp[1])), f)
432 elif rp[1] == nullid:
433 elif rp[1] == nullid:
433 ui.note(_("warning: %s@%s: copy source"
434 ui.note(_("warning: %s@%s: copy source"
434 " revision is nullid %s:%s\n")
435 " revision is nullid %s:%s\n")
435 % (f, lr, rp[0], short(rp[1])))
436 % (f, lr, rp[0], short(rp[1])))
436 else:
437 else:
437 fl2.rev(rp[1])
438 fl2.rev(rp[1])
438 except Exception as inst:
439 except Exception as inst:
439 self.exc(lr, _("checking rename of %s") % short(n), inst, f)
440 self.exc(lr, _("checking rename of %s") % short(n), inst, f)
440
441
441 # cross-check
442 # cross-check
442 if f in filenodes:
443 if f in filenodes:
443 fns = [(v, k) for k, v in filenodes[f].iteritems()]
444 fns = [(v, k) for k, v in filenodes[f].iteritems()]
444 for lr, node in sorted(fns):
445 for lr, node in sorted(fns):
445 self.err(lr, _("manifest refers to unknown revision %s") %
446 self.err(lr, _("manifest refers to unknown revision %s") %
446 short(node), f)
447 short(node), f)
447 progress.complete()
448 progress.complete()
448
449
449 if self.warnorphanstorefiles:
450 if self.warnorphanstorefiles:
450 for f in sorted(storefiles):
451 for f in sorted(storefiles):
451 self.warn(_("warning: orphan data file '%s'") % f)
452 self.warn(_("warning: orphan data file '%s'") % f)
452
453
453 return len(files), revisions
454 return len(files), revisions
@@ -1,1296 +1,1296
1 #testcases sshv1 sshv2
1 #testcases sshv1 sshv2
2
2
3 #if sshv2
3 #if sshv2
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > sshpeer.advertise-v2 = true
6 > sshpeer.advertise-v2 = true
7 > sshserver.support-v2 = true
7 > sshserver.support-v2 = true
8 > EOF
8 > EOF
9 #endif
9 #endif
10
10
11 Prepare repo a:
11 Prepare repo a:
12
12
13 $ hg init a
13 $ hg init a
14 $ cd a
14 $ cd a
15 $ echo a > a
15 $ echo a > a
16 $ hg add a
16 $ hg add a
17 $ hg commit -m test
17 $ hg commit -m test
18 $ echo first line > b
18 $ echo first line > b
19 $ hg add b
19 $ hg add b
20
20
21 Create a non-inlined filelog:
21 Create a non-inlined filelog:
22
22
23 $ "$PYTHON" -c 'open("data1", "wb").write(b"".join(b"%d\n" % x for x in range(10000)))'
23 $ "$PYTHON" -c 'open("data1", "wb").write(b"".join(b"%d\n" % x for x in range(10000)))'
24 $ for j in 0 1 2 3 4 5 6 7 8 9; do
24 $ for j in 0 1 2 3 4 5 6 7 8 9; do
25 > cat data1 >> b
25 > cat data1 >> b
26 > hg commit -m test
26 > hg commit -m test
27 > done
27 > done
28
28
29 List files in store/data (should show a 'b.d'):
29 List files in store/data (should show a 'b.d'):
30
30
31 #if reporevlogstore
31 #if reporevlogstore
32 $ for i in .hg/store/data/*; do
32 $ for i in .hg/store/data/*; do
33 > echo $i
33 > echo $i
34 > done
34 > done
35 .hg/store/data/a.i
35 .hg/store/data/a.i
36 .hg/store/data/b.d
36 .hg/store/data/b.d
37 .hg/store/data/b.i
37 .hg/store/data/b.i
38 #endif
38 #endif
39
39
40 Trigger branchcache creation:
40 Trigger branchcache creation:
41
41
42 $ hg branches
42 $ hg branches
43 default 10:a7949464abda
43 default 10:a7949464abda
44 $ ls .hg/cache
44 $ ls .hg/cache
45 branch2-served
45 branch2-served
46 checkisexec (execbit !)
46 checkisexec (execbit !)
47 checklink (symlink !)
47 checklink (symlink !)
48 checklink-target (symlink !)
48 checklink-target (symlink !)
49 checknoexec (execbit !)
49 checknoexec (execbit !)
50 manifestfulltextcache (reporevlogstore !)
50 manifestfulltextcache (reporevlogstore !)
51 rbc-names-v1
51 rbc-names-v1
52 rbc-revs-v1
52 rbc-revs-v1
53
53
54 Default operation:
54 Default operation:
55
55
56 $ hg clone . ../b
56 $ hg clone . ../b
57 updating to branch default
57 updating to branch default
58 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 $ cd ../b
59 $ cd ../b
60
60
61 Ensure branchcache got copied over:
61 Ensure branchcache got copied over:
62
62
63 $ ls .hg/cache
63 $ ls .hg/cache
64 branch2-served
64 branch2-served
65 checkisexec (execbit !)
65 checkisexec (execbit !)
66 checklink (symlink !)
66 checklink (symlink !)
67 checklink-target (symlink !)
67 checklink-target (symlink !)
68 rbc-names-v1
68 rbc-names-v1
69 rbc-revs-v1
69 rbc-revs-v1
70
70
71 $ cat a
71 $ cat a
72 a
72 a
73 $ hg verify
73 $ hg verify
74 checking changesets
74 checking changesets
75 checking manifests
75 checking manifests
76 crosschecking files in changesets and manifests
76 crosschecking files in changesets and manifests
77 checking files
77 checking files
78 checked 11 changesets with 11 changes to 2 files
78 checked 11 changesets with 11 changes to 2 files
79
79
80 Invalid dest '' must abort:
80 Invalid dest '' must abort:
81
81
82 $ hg clone . ''
82 $ hg clone . ''
83 abort: empty destination path is not valid
83 abort: empty destination path is not valid
84 [255]
84 [255]
85
85
86 No update, with debug option:
86 No update, with debug option:
87
87
88 #if hardlink
88 #if hardlink
89 $ hg --debug clone -U . ../c --config progress.debug=true
89 $ hg --debug clone -U . ../c --config progress.debug=true
90 linking: 1
90 linking: 1 files
91 linking: 2
91 linking: 2 files
92 linking: 3
92 linking: 3 files
93 linking: 4
93 linking: 4 files
94 linking: 5
94 linking: 5 files
95 linking: 6
95 linking: 6 files
96 linking: 7
96 linking: 7 files
97 linking: 8
97 linking: 8 files
98 linked 8 files (reporevlogstore !)
98 linked 8 files (reporevlogstore !)
99 linking: 9 (reposimplestore !)
99 linking: 9 files (reposimplestore !)
100 linking: 10 (reposimplestore !)
100 linking: 10 files (reposimplestore !)
101 linking: 11 (reposimplestore !)
101 linking: 11 files (reposimplestore !)
102 linking: 12 (reposimplestore !)
102 linking: 12 files (reposimplestore !)
103 linking: 13 (reposimplestore !)
103 linking: 13 files (reposimplestore !)
104 linking: 14 (reposimplestore !)
104 linking: 14 files (reposimplestore !)
105 linking: 15 (reposimplestore !)
105 linking: 15 files (reposimplestore !)
106 linking: 16 (reposimplestore !)
106 linking: 16 files (reposimplestore !)
107 linking: 17 (reposimplestore !)
107 linking: 17 files (reposimplestore !)
108 linking: 18 (reposimplestore !)
108 linking: 18 files (reposimplestore !)
109 linked 18 files (reposimplestore !)
109 linked 18 files (reposimplestore !)
110 #else
110 #else
111 $ hg --debug clone -U . ../c --config progress.debug=true
111 $ hg --debug clone -U . ../c --config progress.debug=true
112 linking: 1
112 linking: 1 files
113 copying: 2
113 copying: 2 files
114 copying: 3
114 copying: 3 files
115 copying: 4
115 copying: 4 files
116 copying: 5
116 copying: 5 files
117 copying: 6
117 copying: 6 files
118 copying: 7
118 copying: 7 files
119 copying: 8
119 copying: 8 files
120 copied 8 files (reporevlogstore !)
120 copied 8 files (reporevlogstore !)
121 copying: 9 (reposimplestore !)
121 copying: 9 files (reposimplestore !)
122 copying: 10 (reposimplestore !)
122 copying: 10 files (reposimplestore !)
123 copying: 11 (reposimplestore !)
123 copying: 11 files (reposimplestore !)
124 copying: 12 (reposimplestore !)
124 copying: 12 files (reposimplestore !)
125 copying: 13 (reposimplestore !)
125 copying: 13 files (reposimplestore !)
126 copying: 14 (reposimplestore !)
126 copying: 14 files (reposimplestore !)
127 copying: 15 (reposimplestore !)
127 copying: 15 files (reposimplestore !)
128 copying: 16 (reposimplestore !)
128 copying: 16 files (reposimplestore !)
129 copying: 17 (reposimplestore !)
129 copying: 17 files (reposimplestore !)
130 copying: 18 (reposimplestore !)
130 copying: 18 files (reposimplestore !)
131 copied 18 files (reposimplestore !)
131 copied 18 files (reposimplestore !)
132 #endif
132 #endif
133 $ cd ../c
133 $ cd ../c
134
134
135 Ensure branchcache got copied over:
135 Ensure branchcache got copied over:
136
136
137 $ ls .hg/cache
137 $ ls .hg/cache
138 branch2-served
138 branch2-served
139 rbc-names-v1
139 rbc-names-v1
140 rbc-revs-v1
140 rbc-revs-v1
141
141
142 $ cat a 2>/dev/null || echo "a not present"
142 $ cat a 2>/dev/null || echo "a not present"
143 a not present
143 a not present
144 $ hg verify
144 $ hg verify
145 checking changesets
145 checking changesets
146 checking manifests
146 checking manifests
147 crosschecking files in changesets and manifests
147 crosschecking files in changesets and manifests
148 checking files
148 checking files
149 checked 11 changesets with 11 changes to 2 files
149 checked 11 changesets with 11 changes to 2 files
150
150
151 Default destination:
151 Default destination:
152
152
153 $ mkdir ../d
153 $ mkdir ../d
154 $ cd ../d
154 $ cd ../d
155 $ hg clone ../a
155 $ hg clone ../a
156 destination directory: a
156 destination directory: a
157 updating to branch default
157 updating to branch default
158 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
158 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 $ cd a
159 $ cd a
160 $ hg cat a
160 $ hg cat a
161 a
161 a
162 $ cd ../..
162 $ cd ../..
163
163
164 Check that we drop the 'file:' from the path before writing the .hgrc:
164 Check that we drop the 'file:' from the path before writing the .hgrc:
165
165
166 $ hg clone file:a e
166 $ hg clone file:a e
167 updating to branch default
167 updating to branch default
168 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
168 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
169 $ grep 'file:' e/.hg/hgrc
169 $ grep 'file:' e/.hg/hgrc
170 [1]
170 [1]
171
171
172 Check that path aliases are expanded:
172 Check that path aliases are expanded:
173
173
174 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
174 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
175 $ hg -R f showconfig paths.default
175 $ hg -R f showconfig paths.default
176 $TESTTMP/a#0
176 $TESTTMP/a#0
177
177
178 Use --pull:
178 Use --pull:
179
179
180 $ hg clone --pull a g
180 $ hg clone --pull a g
181 requesting all changes
181 requesting all changes
182 adding changesets
182 adding changesets
183 adding manifests
183 adding manifests
184 adding file changes
184 adding file changes
185 added 11 changesets with 11 changes to 2 files
185 added 11 changesets with 11 changes to 2 files
186 new changesets acb14030fe0a:a7949464abda
186 new changesets acb14030fe0a:a7949464abda
187 updating to branch default
187 updating to branch default
188 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
188 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
189 $ hg -R g verify
189 $ hg -R g verify
190 checking changesets
190 checking changesets
191 checking manifests
191 checking manifests
192 crosschecking files in changesets and manifests
192 crosschecking files in changesets and manifests
193 checking files
193 checking files
194 checked 11 changesets with 11 changes to 2 files
194 checked 11 changesets with 11 changes to 2 files
195
195
196 Invalid dest '' with --pull must abort (issue2528):
196 Invalid dest '' with --pull must abort (issue2528):
197
197
198 $ hg clone --pull a ''
198 $ hg clone --pull a ''
199 abort: empty destination path is not valid
199 abort: empty destination path is not valid
200 [255]
200 [255]
201
201
202 Clone to '.':
202 Clone to '.':
203
203
204 $ mkdir h
204 $ mkdir h
205 $ cd h
205 $ cd h
206 $ hg clone ../a .
206 $ hg clone ../a .
207 updating to branch default
207 updating to branch default
208 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
208 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
209 $ cd ..
209 $ cd ..
210
210
211
211
212 *** Tests for option -u ***
212 *** Tests for option -u ***
213
213
214 Adding some more history to repo a:
214 Adding some more history to repo a:
215
215
216 $ cd a
216 $ cd a
217 $ hg tag ref1
217 $ hg tag ref1
218 $ echo the quick brown fox >a
218 $ echo the quick brown fox >a
219 $ hg ci -m "hacked default"
219 $ hg ci -m "hacked default"
220 $ hg up ref1
220 $ hg up ref1
221 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
221 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
222 $ hg branch stable
222 $ hg branch stable
223 marked working directory as branch stable
223 marked working directory as branch stable
224 (branches are permanent and global, did you want a bookmark?)
224 (branches are permanent and global, did you want a bookmark?)
225 $ echo some text >a
225 $ echo some text >a
226 $ hg ci -m "starting branch stable"
226 $ hg ci -m "starting branch stable"
227 $ hg tag ref2
227 $ hg tag ref2
228 $ echo some more text >a
228 $ echo some more text >a
229 $ hg ci -m "another change for branch stable"
229 $ hg ci -m "another change for branch stable"
230 $ hg up ref2
230 $ hg up ref2
231 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
231 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
232 $ hg parents
232 $ hg parents
233 changeset: 13:e8ece76546a6
233 changeset: 13:e8ece76546a6
234 branch: stable
234 branch: stable
235 tag: ref2
235 tag: ref2
236 parent: 10:a7949464abda
236 parent: 10:a7949464abda
237 user: test
237 user: test
238 date: Thu Jan 01 00:00:00 1970 +0000
238 date: Thu Jan 01 00:00:00 1970 +0000
239 summary: starting branch stable
239 summary: starting branch stable
240
240
241
241
242 Repo a has two heads:
242 Repo a has two heads:
243
243
244 $ hg heads
244 $ hg heads
245 changeset: 15:0aae7cf88f0d
245 changeset: 15:0aae7cf88f0d
246 branch: stable
246 branch: stable
247 tag: tip
247 tag: tip
248 user: test
248 user: test
249 date: Thu Jan 01 00:00:00 1970 +0000
249 date: Thu Jan 01 00:00:00 1970 +0000
250 summary: another change for branch stable
250 summary: another change for branch stable
251
251
252 changeset: 12:f21241060d6a
252 changeset: 12:f21241060d6a
253 user: test
253 user: test
254 date: Thu Jan 01 00:00:00 1970 +0000
254 date: Thu Jan 01 00:00:00 1970 +0000
255 summary: hacked default
255 summary: hacked default
256
256
257
257
258 $ cd ..
258 $ cd ..
259
259
260
260
261 Testing --noupdate with --updaterev (must abort):
261 Testing --noupdate with --updaterev (must abort):
262
262
263 $ hg clone --noupdate --updaterev 1 a ua
263 $ hg clone --noupdate --updaterev 1 a ua
264 abort: cannot specify both --noupdate and --updaterev
264 abort: cannot specify both --noupdate and --updaterev
265 [255]
265 [255]
266
266
267
267
268 Testing clone -u:
268 Testing clone -u:
269
269
270 $ hg clone -u . a ua
270 $ hg clone -u . a ua
271 updating to branch stable
271 updating to branch stable
272 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
272 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
273
273
274 Repo ua has both heads:
274 Repo ua has both heads:
275
275
276 $ hg -R ua heads
276 $ hg -R ua heads
277 changeset: 15:0aae7cf88f0d
277 changeset: 15:0aae7cf88f0d
278 branch: stable
278 branch: stable
279 tag: tip
279 tag: tip
280 user: test
280 user: test
281 date: Thu Jan 01 00:00:00 1970 +0000
281 date: Thu Jan 01 00:00:00 1970 +0000
282 summary: another change for branch stable
282 summary: another change for branch stable
283
283
284 changeset: 12:f21241060d6a
284 changeset: 12:f21241060d6a
285 user: test
285 user: test
286 date: Thu Jan 01 00:00:00 1970 +0000
286 date: Thu Jan 01 00:00:00 1970 +0000
287 summary: hacked default
287 summary: hacked default
288
288
289
289
290 Same revision checked out in repo a and ua:
290 Same revision checked out in repo a and ua:
291
291
292 $ hg -R a parents --template "{node|short}\n"
292 $ hg -R a parents --template "{node|short}\n"
293 e8ece76546a6
293 e8ece76546a6
294 $ hg -R ua parents --template "{node|short}\n"
294 $ hg -R ua parents --template "{node|short}\n"
295 e8ece76546a6
295 e8ece76546a6
296
296
297 $ rm -r ua
297 $ rm -r ua
298
298
299
299
300 Testing clone --pull -u:
300 Testing clone --pull -u:
301
301
302 $ hg clone --pull -u . a ua
302 $ hg clone --pull -u . a ua
303 requesting all changes
303 requesting all changes
304 adding changesets
304 adding changesets
305 adding manifests
305 adding manifests
306 adding file changes
306 adding file changes
307 added 16 changesets with 16 changes to 3 files (+1 heads)
307 added 16 changesets with 16 changes to 3 files (+1 heads)
308 new changesets acb14030fe0a:0aae7cf88f0d
308 new changesets acb14030fe0a:0aae7cf88f0d
309 updating to branch stable
309 updating to branch stable
310 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
310 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
311
311
312 Repo ua has both heads:
312 Repo ua has both heads:
313
313
314 $ hg -R ua heads
314 $ hg -R ua heads
315 changeset: 15:0aae7cf88f0d
315 changeset: 15:0aae7cf88f0d
316 branch: stable
316 branch: stable
317 tag: tip
317 tag: tip
318 user: test
318 user: test
319 date: Thu Jan 01 00:00:00 1970 +0000
319 date: Thu Jan 01 00:00:00 1970 +0000
320 summary: another change for branch stable
320 summary: another change for branch stable
321
321
322 changeset: 12:f21241060d6a
322 changeset: 12:f21241060d6a
323 user: test
323 user: test
324 date: Thu Jan 01 00:00:00 1970 +0000
324 date: Thu Jan 01 00:00:00 1970 +0000
325 summary: hacked default
325 summary: hacked default
326
326
327
327
328 Same revision checked out in repo a and ua:
328 Same revision checked out in repo a and ua:
329
329
330 $ hg -R a parents --template "{node|short}\n"
330 $ hg -R a parents --template "{node|short}\n"
331 e8ece76546a6
331 e8ece76546a6
332 $ hg -R ua parents --template "{node|short}\n"
332 $ hg -R ua parents --template "{node|short}\n"
333 e8ece76546a6
333 e8ece76546a6
334
334
335 $ rm -r ua
335 $ rm -r ua
336
336
337
337
338 Testing clone -u <branch>:
338 Testing clone -u <branch>:
339
339
340 $ hg clone -u stable a ua
340 $ hg clone -u stable a ua
341 updating to branch stable
341 updating to branch stable
342 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
342 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
343
343
344 Repo ua has both heads:
344 Repo ua has both heads:
345
345
346 $ hg -R ua heads
346 $ hg -R ua heads
347 changeset: 15:0aae7cf88f0d
347 changeset: 15:0aae7cf88f0d
348 branch: stable
348 branch: stable
349 tag: tip
349 tag: tip
350 user: test
350 user: test
351 date: Thu Jan 01 00:00:00 1970 +0000
351 date: Thu Jan 01 00:00:00 1970 +0000
352 summary: another change for branch stable
352 summary: another change for branch stable
353
353
354 changeset: 12:f21241060d6a
354 changeset: 12:f21241060d6a
355 user: test
355 user: test
356 date: Thu Jan 01 00:00:00 1970 +0000
356 date: Thu Jan 01 00:00:00 1970 +0000
357 summary: hacked default
357 summary: hacked default
358
358
359
359
360 Branch 'stable' is checked out:
360 Branch 'stable' is checked out:
361
361
362 $ hg -R ua parents
362 $ hg -R ua parents
363 changeset: 15:0aae7cf88f0d
363 changeset: 15:0aae7cf88f0d
364 branch: stable
364 branch: stable
365 tag: tip
365 tag: tip
366 user: test
366 user: test
367 date: Thu Jan 01 00:00:00 1970 +0000
367 date: Thu Jan 01 00:00:00 1970 +0000
368 summary: another change for branch stable
368 summary: another change for branch stable
369
369
370
370
371 $ rm -r ua
371 $ rm -r ua
372
372
373
373
374 Testing default checkout:
374 Testing default checkout:
375
375
376 $ hg clone a ua
376 $ hg clone a ua
377 updating to branch default
377 updating to branch default
378 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
378 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
379
379
380 Repo ua has both heads:
380 Repo ua has both heads:
381
381
382 $ hg -R ua heads
382 $ hg -R ua heads
383 changeset: 15:0aae7cf88f0d
383 changeset: 15:0aae7cf88f0d
384 branch: stable
384 branch: stable
385 tag: tip
385 tag: tip
386 user: test
386 user: test
387 date: Thu Jan 01 00:00:00 1970 +0000
387 date: Thu Jan 01 00:00:00 1970 +0000
388 summary: another change for branch stable
388 summary: another change for branch stable
389
389
390 changeset: 12:f21241060d6a
390 changeset: 12:f21241060d6a
391 user: test
391 user: test
392 date: Thu Jan 01 00:00:00 1970 +0000
392 date: Thu Jan 01 00:00:00 1970 +0000
393 summary: hacked default
393 summary: hacked default
394
394
395
395
396 Branch 'default' is checked out:
396 Branch 'default' is checked out:
397
397
398 $ hg -R ua parents
398 $ hg -R ua parents
399 changeset: 12:f21241060d6a
399 changeset: 12:f21241060d6a
400 user: test
400 user: test
401 date: Thu Jan 01 00:00:00 1970 +0000
401 date: Thu Jan 01 00:00:00 1970 +0000
402 summary: hacked default
402 summary: hacked default
403
403
404 Test clone with a branch named "@" (issue3677)
404 Test clone with a branch named "@" (issue3677)
405
405
406 $ hg -R ua branch @
406 $ hg -R ua branch @
407 marked working directory as branch @
407 marked working directory as branch @
408 $ hg -R ua commit -m 'created branch @'
408 $ hg -R ua commit -m 'created branch @'
409 $ hg clone ua atbranch
409 $ hg clone ua atbranch
410 updating to branch default
410 updating to branch default
411 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
411 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
412 $ hg -R atbranch heads
412 $ hg -R atbranch heads
413 changeset: 16:798b6d97153e
413 changeset: 16:798b6d97153e
414 branch: @
414 branch: @
415 tag: tip
415 tag: tip
416 parent: 12:f21241060d6a
416 parent: 12:f21241060d6a
417 user: test
417 user: test
418 date: Thu Jan 01 00:00:00 1970 +0000
418 date: Thu Jan 01 00:00:00 1970 +0000
419 summary: created branch @
419 summary: created branch @
420
420
421 changeset: 15:0aae7cf88f0d
421 changeset: 15:0aae7cf88f0d
422 branch: stable
422 branch: stable
423 user: test
423 user: test
424 date: Thu Jan 01 00:00:00 1970 +0000
424 date: Thu Jan 01 00:00:00 1970 +0000
425 summary: another change for branch stable
425 summary: another change for branch stable
426
426
427 changeset: 12:f21241060d6a
427 changeset: 12:f21241060d6a
428 user: test
428 user: test
429 date: Thu Jan 01 00:00:00 1970 +0000
429 date: Thu Jan 01 00:00:00 1970 +0000
430 summary: hacked default
430 summary: hacked default
431
431
432 $ hg -R atbranch parents
432 $ hg -R atbranch parents
433 changeset: 12:f21241060d6a
433 changeset: 12:f21241060d6a
434 user: test
434 user: test
435 date: Thu Jan 01 00:00:00 1970 +0000
435 date: Thu Jan 01 00:00:00 1970 +0000
436 summary: hacked default
436 summary: hacked default
437
437
438
438
439 $ rm -r ua atbranch
439 $ rm -r ua atbranch
440
440
441
441
442 Testing #<branch>:
442 Testing #<branch>:
443
443
444 $ hg clone -u . a#stable ua
444 $ hg clone -u . a#stable ua
445 adding changesets
445 adding changesets
446 adding manifests
446 adding manifests
447 adding file changes
447 adding file changes
448 added 14 changesets with 14 changes to 3 files
448 added 14 changesets with 14 changes to 3 files
449 new changesets acb14030fe0a:0aae7cf88f0d
449 new changesets acb14030fe0a:0aae7cf88f0d
450 updating to branch stable
450 updating to branch stable
451 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
451 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
452
452
453 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
453 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
454
454
455 $ hg -R ua heads
455 $ hg -R ua heads
456 changeset: 13:0aae7cf88f0d
456 changeset: 13:0aae7cf88f0d
457 branch: stable
457 branch: stable
458 tag: tip
458 tag: tip
459 user: test
459 user: test
460 date: Thu Jan 01 00:00:00 1970 +0000
460 date: Thu Jan 01 00:00:00 1970 +0000
461 summary: another change for branch stable
461 summary: another change for branch stable
462
462
463 changeset: 10:a7949464abda
463 changeset: 10:a7949464abda
464 user: test
464 user: test
465 date: Thu Jan 01 00:00:00 1970 +0000
465 date: Thu Jan 01 00:00:00 1970 +0000
466 summary: test
466 summary: test
467
467
468
468
469 Same revision checked out in repo a and ua:
469 Same revision checked out in repo a and ua:
470
470
471 $ hg -R a parents --template "{node|short}\n"
471 $ hg -R a parents --template "{node|short}\n"
472 e8ece76546a6
472 e8ece76546a6
473 $ hg -R ua parents --template "{node|short}\n"
473 $ hg -R ua parents --template "{node|short}\n"
474 e8ece76546a6
474 e8ece76546a6
475
475
476 $ rm -r ua
476 $ rm -r ua
477
477
478
478
479 Testing -u -r <branch>:
479 Testing -u -r <branch>:
480
480
481 $ hg clone -u . -r stable a ua
481 $ hg clone -u . -r stable a ua
482 adding changesets
482 adding changesets
483 adding manifests
483 adding manifests
484 adding file changes
484 adding file changes
485 added 14 changesets with 14 changes to 3 files
485 added 14 changesets with 14 changes to 3 files
486 new changesets acb14030fe0a:0aae7cf88f0d
486 new changesets acb14030fe0a:0aae7cf88f0d
487 updating to branch stable
487 updating to branch stable
488 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
488 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
489
489
490 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
490 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
491
491
492 $ hg -R ua heads
492 $ hg -R ua heads
493 changeset: 13:0aae7cf88f0d
493 changeset: 13:0aae7cf88f0d
494 branch: stable
494 branch: stable
495 tag: tip
495 tag: tip
496 user: test
496 user: test
497 date: Thu Jan 01 00:00:00 1970 +0000
497 date: Thu Jan 01 00:00:00 1970 +0000
498 summary: another change for branch stable
498 summary: another change for branch stable
499
499
500 changeset: 10:a7949464abda
500 changeset: 10:a7949464abda
501 user: test
501 user: test
502 date: Thu Jan 01 00:00:00 1970 +0000
502 date: Thu Jan 01 00:00:00 1970 +0000
503 summary: test
503 summary: test
504
504
505
505
506 Same revision checked out in repo a and ua:
506 Same revision checked out in repo a and ua:
507
507
508 $ hg -R a parents --template "{node|short}\n"
508 $ hg -R a parents --template "{node|short}\n"
509 e8ece76546a6
509 e8ece76546a6
510 $ hg -R ua parents --template "{node|short}\n"
510 $ hg -R ua parents --template "{node|short}\n"
511 e8ece76546a6
511 e8ece76546a6
512
512
513 $ rm -r ua
513 $ rm -r ua
514
514
515
515
516 Testing -r <branch>:
516 Testing -r <branch>:
517
517
518 $ hg clone -r stable a ua
518 $ hg clone -r stable a ua
519 adding changesets
519 adding changesets
520 adding manifests
520 adding manifests
521 adding file changes
521 adding file changes
522 added 14 changesets with 14 changes to 3 files
522 added 14 changesets with 14 changes to 3 files
523 new changesets acb14030fe0a:0aae7cf88f0d
523 new changesets acb14030fe0a:0aae7cf88f0d
524 updating to branch stable
524 updating to branch stable
525 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
525 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
526
526
527 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
527 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
528
528
529 $ hg -R ua heads
529 $ hg -R ua heads
530 changeset: 13:0aae7cf88f0d
530 changeset: 13:0aae7cf88f0d
531 branch: stable
531 branch: stable
532 tag: tip
532 tag: tip
533 user: test
533 user: test
534 date: Thu Jan 01 00:00:00 1970 +0000
534 date: Thu Jan 01 00:00:00 1970 +0000
535 summary: another change for branch stable
535 summary: another change for branch stable
536
536
537 changeset: 10:a7949464abda
537 changeset: 10:a7949464abda
538 user: test
538 user: test
539 date: Thu Jan 01 00:00:00 1970 +0000
539 date: Thu Jan 01 00:00:00 1970 +0000
540 summary: test
540 summary: test
541
541
542
542
543 Branch 'stable' is checked out:
543 Branch 'stable' is checked out:
544
544
545 $ hg -R ua parents
545 $ hg -R ua parents
546 changeset: 13:0aae7cf88f0d
546 changeset: 13:0aae7cf88f0d
547 branch: stable
547 branch: stable
548 tag: tip
548 tag: tip
549 user: test
549 user: test
550 date: Thu Jan 01 00:00:00 1970 +0000
550 date: Thu Jan 01 00:00:00 1970 +0000
551 summary: another change for branch stable
551 summary: another change for branch stable
552
552
553
553
554 $ rm -r ua
554 $ rm -r ua
555
555
556
556
557 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
557 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
558 iterable in addbranchrevs()
558 iterable in addbranchrevs()
559
559
560 $ cat <<EOF > simpleclone.py
560 $ cat <<EOF > simpleclone.py
561 > from mercurial import hg, ui as uimod
561 > from mercurial import hg, ui as uimod
562 > myui = uimod.ui.load()
562 > myui = uimod.ui.load()
563 > repo = hg.repository(myui, b'a')
563 > repo = hg.repository(myui, b'a')
564 > hg.clone(myui, {}, repo, dest=b"ua")
564 > hg.clone(myui, {}, repo, dest=b"ua")
565 > EOF
565 > EOF
566
566
567 $ "$PYTHON" simpleclone.py
567 $ "$PYTHON" simpleclone.py
568 updating to branch default
568 updating to branch default
569 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
569 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
570
570
571 $ rm -r ua
571 $ rm -r ua
572
572
573 $ cat <<EOF > branchclone.py
573 $ cat <<EOF > branchclone.py
574 > from mercurial import extensions, hg, ui as uimod
574 > from mercurial import extensions, hg, ui as uimod
575 > myui = uimod.ui.load()
575 > myui = uimod.ui.load()
576 > extensions.loadall(myui)
576 > extensions.loadall(myui)
577 > repo = hg.repository(myui, b'a')
577 > repo = hg.repository(myui, b'a')
578 > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable",])
578 > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable",])
579 > EOF
579 > EOF
580
580
581 $ "$PYTHON" branchclone.py
581 $ "$PYTHON" branchclone.py
582 adding changesets
582 adding changesets
583 adding manifests
583 adding manifests
584 adding file changes
584 adding file changes
585 added 14 changesets with 14 changes to 3 files
585 added 14 changesets with 14 changes to 3 files
586 new changesets acb14030fe0a:0aae7cf88f0d
586 new changesets acb14030fe0a:0aae7cf88f0d
587 updating to branch stable
587 updating to branch stable
588 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
588 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
589 $ rm -r ua
589 $ rm -r ua
590
590
591
591
592 Test clone with special '@' bookmark:
592 Test clone with special '@' bookmark:
593 $ cd a
593 $ cd a
594 $ hg bookmark -r a7949464abda @ # branch point of stable from default
594 $ hg bookmark -r a7949464abda @ # branch point of stable from default
595 $ hg clone . ../i
595 $ hg clone . ../i
596 updating to bookmark @
596 updating to bookmark @
597 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
597 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
598 $ hg id -i ../i
598 $ hg id -i ../i
599 a7949464abda
599 a7949464abda
600 $ rm -r ../i
600 $ rm -r ../i
601
601
602 $ hg bookmark -f -r stable @
602 $ hg bookmark -f -r stable @
603 $ hg bookmarks
603 $ hg bookmarks
604 @ 15:0aae7cf88f0d
604 @ 15:0aae7cf88f0d
605 $ hg clone . ../i
605 $ hg clone . ../i
606 updating to bookmark @ on branch stable
606 updating to bookmark @ on branch stable
607 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
607 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
608 $ hg id -i ../i
608 $ hg id -i ../i
609 0aae7cf88f0d
609 0aae7cf88f0d
610 $ cd "$TESTTMP"
610 $ cd "$TESTTMP"
611
611
612
612
613 Testing failures:
613 Testing failures:
614
614
615 $ mkdir fail
615 $ mkdir fail
616 $ cd fail
616 $ cd fail
617
617
618 No local source
618 No local source
619
619
620 $ hg clone a b
620 $ hg clone a b
621 abort: repository a not found!
621 abort: repository a not found!
622 [255]
622 [255]
623
623
624 No remote source
624 No remote source
625
625
626 #if windows
626 #if windows
627 $ hg clone http://$LOCALIP:3121/a b
627 $ hg clone http://$LOCALIP:3121/a b
628 abort: error: * (glob)
628 abort: error: * (glob)
629 [255]
629 [255]
630 #else
630 #else
631 $ hg clone http://$LOCALIP:3121/a b
631 $ hg clone http://$LOCALIP:3121/a b
632 abort: error: *refused* (glob)
632 abort: error: *refused* (glob)
633 [255]
633 [255]
634 #endif
634 #endif
635 $ rm -rf b # work around bug with http clone
635 $ rm -rf b # work around bug with http clone
636
636
637
637
638 #if unix-permissions no-root
638 #if unix-permissions no-root
639
639
640 Inaccessible source
640 Inaccessible source
641
641
642 $ mkdir a
642 $ mkdir a
643 $ chmod 000 a
643 $ chmod 000 a
644 $ hg clone a b
644 $ hg clone a b
645 abort: Permission denied: *$TESTTMP/fail/a/.hg* (glob)
645 abort: Permission denied: *$TESTTMP/fail/a/.hg* (glob)
646 [255]
646 [255]
647
647
648 Inaccessible destination
648 Inaccessible destination
649
649
650 $ hg init b
650 $ hg init b
651 $ cd b
651 $ cd b
652 $ hg clone . ../a
652 $ hg clone . ../a
653 abort: Permission denied: *../a* (glob)
653 abort: Permission denied: *../a* (glob)
654 [255]
654 [255]
655 $ cd ..
655 $ cd ..
656 $ chmod 700 a
656 $ chmod 700 a
657 $ rm -r a b
657 $ rm -r a b
658
658
659 #endif
659 #endif
660
660
661
661
662 #if fifo
662 #if fifo
663
663
664 Source of wrong type
664 Source of wrong type
665
665
666 $ mkfifo a
666 $ mkfifo a
667 $ hg clone a b
667 $ hg clone a b
668 abort: $ENOTDIR$: *$TESTTMP/fail/a/.hg* (glob)
668 abort: $ENOTDIR$: *$TESTTMP/fail/a/.hg* (glob)
669 [255]
669 [255]
670 $ rm a
670 $ rm a
671
671
672 #endif
672 #endif
673
673
674 Default destination, same directory
674 Default destination, same directory
675
675
676 $ hg init q
676 $ hg init q
677 $ hg clone q
677 $ hg clone q
678 destination directory: q
678 destination directory: q
679 abort: destination 'q' is not empty
679 abort: destination 'q' is not empty
680 [255]
680 [255]
681
681
682 destination directory not empty
682 destination directory not empty
683
683
684 $ mkdir a
684 $ mkdir a
685 $ echo stuff > a/a
685 $ echo stuff > a/a
686 $ hg clone q a
686 $ hg clone q a
687 abort: destination 'a' is not empty
687 abort: destination 'a' is not empty
688 [255]
688 [255]
689
689
690
690
691 #if unix-permissions no-root
691 #if unix-permissions no-root
692
692
693 leave existing directory in place after clone failure
693 leave existing directory in place after clone failure
694
694
695 $ hg init c
695 $ hg init c
696 $ cd c
696 $ cd c
697 $ echo c > c
697 $ echo c > c
698 $ hg commit -A -m test
698 $ hg commit -A -m test
699 adding c
699 adding c
700 $ chmod -rx .hg/store/data
700 $ chmod -rx .hg/store/data
701 $ cd ..
701 $ cd ..
702 $ mkdir d
702 $ mkdir d
703 $ hg clone c d 2> err
703 $ hg clone c d 2> err
704 [255]
704 [255]
705 $ test -d d
705 $ test -d d
706 $ test -d d/.hg
706 $ test -d d/.hg
707 [1]
707 [1]
708
708
709 re-enable perm to allow deletion
709 re-enable perm to allow deletion
710
710
711 $ chmod +rx c/.hg/store/data
711 $ chmod +rx c/.hg/store/data
712
712
713 #endif
713 #endif
714
714
715 $ cd ..
715 $ cd ..
716
716
717 Test clone from the repository in (emulated) revlog format 0 (issue4203):
717 Test clone from the repository in (emulated) revlog format 0 (issue4203):
718
718
719 $ mkdir issue4203
719 $ mkdir issue4203
720 $ mkdir -p src/.hg
720 $ mkdir -p src/.hg
721 $ echo foo > src/foo
721 $ echo foo > src/foo
722 $ hg -R src add src/foo
722 $ hg -R src add src/foo
723 $ hg -R src commit -m '#0'
723 $ hg -R src commit -m '#0'
724 $ hg -R src log -q
724 $ hg -R src log -q
725 0:e1bab28bca43
725 0:e1bab28bca43
726 $ hg clone -U -q src dst
726 $ hg clone -U -q src dst
727 $ hg -R dst log -q
727 $ hg -R dst log -q
728 0:e1bab28bca43
728 0:e1bab28bca43
729
729
730 Create repositories to test auto sharing functionality
730 Create repositories to test auto sharing functionality
731
731
732 $ cat >> $HGRCPATH << EOF
732 $ cat >> $HGRCPATH << EOF
733 > [extensions]
733 > [extensions]
734 > share=
734 > share=
735 > EOF
735 > EOF
736
736
737 $ hg init empty
737 $ hg init empty
738 $ hg init source1a
738 $ hg init source1a
739 $ cd source1a
739 $ cd source1a
740 $ echo initial1 > foo
740 $ echo initial1 > foo
741 $ hg -q commit -A -m initial
741 $ hg -q commit -A -m initial
742 $ echo second > foo
742 $ echo second > foo
743 $ hg commit -m second
743 $ hg commit -m second
744 $ cd ..
744 $ cd ..
745
745
746 $ hg init filteredrev0
746 $ hg init filteredrev0
747 $ cd filteredrev0
747 $ cd filteredrev0
748 $ cat >> .hg/hgrc << EOF
748 $ cat >> .hg/hgrc << EOF
749 > [experimental]
749 > [experimental]
750 > evolution.createmarkers=True
750 > evolution.createmarkers=True
751 > EOF
751 > EOF
752 $ echo initial1 > foo
752 $ echo initial1 > foo
753 $ hg -q commit -A -m initial0
753 $ hg -q commit -A -m initial0
754 $ hg -q up -r null
754 $ hg -q up -r null
755 $ echo initial2 > foo
755 $ echo initial2 > foo
756 $ hg -q commit -A -m initial1
756 $ hg -q commit -A -m initial1
757 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
757 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
758 obsoleted 1 changesets
758 obsoleted 1 changesets
759 $ cd ..
759 $ cd ..
760
760
761 $ hg -q clone --pull source1a source1b
761 $ hg -q clone --pull source1a source1b
762 $ cd source1a
762 $ cd source1a
763 $ hg bookmark bookA
763 $ hg bookmark bookA
764 $ echo 1a > foo
764 $ echo 1a > foo
765 $ hg commit -m 1a
765 $ hg commit -m 1a
766 $ cd ../source1b
766 $ cd ../source1b
767 $ hg -q up -r 0
767 $ hg -q up -r 0
768 $ echo head1 > foo
768 $ echo head1 > foo
769 $ hg commit -m head1
769 $ hg commit -m head1
770 created new head
770 created new head
771 $ hg bookmark head1
771 $ hg bookmark head1
772 $ hg -q up -r 0
772 $ hg -q up -r 0
773 $ echo head2 > foo
773 $ echo head2 > foo
774 $ hg commit -m head2
774 $ hg commit -m head2
775 created new head
775 created new head
776 $ hg bookmark head2
776 $ hg bookmark head2
777 $ hg -q up -r 0
777 $ hg -q up -r 0
778 $ hg branch branch1
778 $ hg branch branch1
779 marked working directory as branch branch1
779 marked working directory as branch branch1
780 (branches are permanent and global, did you want a bookmark?)
780 (branches are permanent and global, did you want a bookmark?)
781 $ echo branch1 > foo
781 $ echo branch1 > foo
782 $ hg commit -m branch1
782 $ hg commit -m branch1
783 $ hg -q up -r 0
783 $ hg -q up -r 0
784 $ hg branch branch2
784 $ hg branch branch2
785 marked working directory as branch branch2
785 marked working directory as branch branch2
786 $ echo branch2 > foo
786 $ echo branch2 > foo
787 $ hg commit -m branch2
787 $ hg commit -m branch2
788 $ cd ..
788 $ cd ..
789 $ hg init source2
789 $ hg init source2
790 $ cd source2
790 $ cd source2
791 $ echo initial2 > foo
791 $ echo initial2 > foo
792 $ hg -q commit -A -m initial2
792 $ hg -q commit -A -m initial2
793 $ echo second > foo
793 $ echo second > foo
794 $ hg commit -m second
794 $ hg commit -m second
795 $ cd ..
795 $ cd ..
796
796
797 Clone with auto share from an empty repo should not result in share
797 Clone with auto share from an empty repo should not result in share
798
798
799 $ mkdir share
799 $ mkdir share
800 $ hg --config share.pool=share clone empty share-empty
800 $ hg --config share.pool=share clone empty share-empty
801 (not using pooled storage: remote appears to be empty)
801 (not using pooled storage: remote appears to be empty)
802 updating to branch default
802 updating to branch default
803 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
803 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
804 $ ls share
804 $ ls share
805 $ test -d share-empty/.hg/store
805 $ test -d share-empty/.hg/store
806 $ test -f share-empty/.hg/sharedpath
806 $ test -f share-empty/.hg/sharedpath
807 [1]
807 [1]
808
808
809 Clone with auto share from a repo with filtered revision 0 should not result in share
809 Clone with auto share from a repo with filtered revision 0 should not result in share
810
810
811 $ hg --config share.pool=share clone filteredrev0 share-filtered
811 $ hg --config share.pool=share clone filteredrev0 share-filtered
812 (not using pooled storage: unable to resolve identity of remote)
812 (not using pooled storage: unable to resolve identity of remote)
813 requesting all changes
813 requesting all changes
814 adding changesets
814 adding changesets
815 adding manifests
815 adding manifests
816 adding file changes
816 adding file changes
817 added 1 changesets with 1 changes to 1 files
817 added 1 changesets with 1 changes to 1 files
818 new changesets e082c1832e09
818 new changesets e082c1832e09
819 updating to branch default
819 updating to branch default
820 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
820 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
821
821
822 Clone from repo with content should result in shared store being created
822 Clone from repo with content should result in shared store being created
823
823
824 $ hg --config share.pool=share clone source1a share-dest1a
824 $ hg --config share.pool=share clone source1a share-dest1a
825 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
825 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
826 requesting all changes
826 requesting all changes
827 adding changesets
827 adding changesets
828 adding manifests
828 adding manifests
829 adding file changes
829 adding file changes
830 added 3 changesets with 3 changes to 1 files
830 added 3 changesets with 3 changes to 1 files
831 new changesets b5f04eac9d8f:e5bfe23c0b47
831 new changesets b5f04eac9d8f:e5bfe23c0b47
832 searching for changes
832 searching for changes
833 no changes found
833 no changes found
834 adding remote bookmark bookA
834 adding remote bookmark bookA
835 updating working directory
835 updating working directory
836 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
836 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
837
837
838 The shared repo should have been created
838 The shared repo should have been created
839
839
840 $ ls share
840 $ ls share
841 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
841 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
842
842
843 The destination should point to it
843 The destination should point to it
844
844
845 $ cat share-dest1a/.hg/sharedpath; echo
845 $ cat share-dest1a/.hg/sharedpath; echo
846 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
846 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
847
847
848 The destination should have bookmarks
848 The destination should have bookmarks
849
849
850 $ hg -R share-dest1a bookmarks
850 $ hg -R share-dest1a bookmarks
851 bookA 2:e5bfe23c0b47
851 bookA 2:e5bfe23c0b47
852
852
853 The default path should be the remote, not the share
853 The default path should be the remote, not the share
854
854
855 $ hg -R share-dest1a config paths.default
855 $ hg -R share-dest1a config paths.default
856 $TESTTMP/source1a
856 $TESTTMP/source1a
857
857
858 Clone with existing share dir should result in pull + share
858 Clone with existing share dir should result in pull + share
859
859
860 $ hg --config share.pool=share clone source1b share-dest1b
860 $ hg --config share.pool=share clone source1b share-dest1b
861 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
861 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
862 searching for changes
862 searching for changes
863 adding changesets
863 adding changesets
864 adding manifests
864 adding manifests
865 adding file changes
865 adding file changes
866 added 4 changesets with 4 changes to 1 files (+4 heads)
866 added 4 changesets with 4 changes to 1 files (+4 heads)
867 adding remote bookmark head1
867 adding remote bookmark head1
868 adding remote bookmark head2
868 adding remote bookmark head2
869 new changesets 4a8dc1ab4c13:6bacf4683960
869 new changesets 4a8dc1ab4c13:6bacf4683960
870 updating working directory
870 updating working directory
871 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
871 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
872
872
873 $ ls share
873 $ ls share
874 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
874 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
875
875
876 $ cat share-dest1b/.hg/sharedpath; echo
876 $ cat share-dest1b/.hg/sharedpath; echo
877 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
877 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
878
878
879 We only get bookmarks from the remote, not everything in the share
879 We only get bookmarks from the remote, not everything in the share
880
880
881 $ hg -R share-dest1b bookmarks
881 $ hg -R share-dest1b bookmarks
882 head1 3:4a8dc1ab4c13
882 head1 3:4a8dc1ab4c13
883 head2 4:99f71071f117
883 head2 4:99f71071f117
884
884
885 Default path should be source, not share.
885 Default path should be source, not share.
886
886
887 $ hg -R share-dest1b config paths.default
887 $ hg -R share-dest1b config paths.default
888 $TESTTMP/source1b
888 $TESTTMP/source1b
889
889
890 Checked out revision should be head of default branch
890 Checked out revision should be head of default branch
891
891
892 $ hg -R share-dest1b log -r .
892 $ hg -R share-dest1b log -r .
893 changeset: 4:99f71071f117
893 changeset: 4:99f71071f117
894 bookmark: head2
894 bookmark: head2
895 parent: 0:b5f04eac9d8f
895 parent: 0:b5f04eac9d8f
896 user: test
896 user: test
897 date: Thu Jan 01 00:00:00 1970 +0000
897 date: Thu Jan 01 00:00:00 1970 +0000
898 summary: head2
898 summary: head2
899
899
900
900
901 Clone from unrelated repo should result in new share
901 Clone from unrelated repo should result in new share
902
902
903 $ hg --config share.pool=share clone source2 share-dest2
903 $ hg --config share.pool=share clone source2 share-dest2
904 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
904 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
905 requesting all changes
905 requesting all changes
906 adding changesets
906 adding changesets
907 adding manifests
907 adding manifests
908 adding file changes
908 adding file changes
909 added 2 changesets with 2 changes to 1 files
909 added 2 changesets with 2 changes to 1 files
910 new changesets 22aeff664783:63cf6c3dba4a
910 new changesets 22aeff664783:63cf6c3dba4a
911 searching for changes
911 searching for changes
912 no changes found
912 no changes found
913 updating working directory
913 updating working directory
914 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
914 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
915
915
916 $ ls share
916 $ ls share
917 22aeff664783fd44c6d9b435618173c118c3448e
917 22aeff664783fd44c6d9b435618173c118c3448e
918 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
918 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
919
919
920 remote naming mode works as advertised
920 remote naming mode works as advertised
921
921
922 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
922 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
923 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
923 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
924 requesting all changes
924 requesting all changes
925 adding changesets
925 adding changesets
926 adding manifests
926 adding manifests
927 adding file changes
927 adding file changes
928 added 3 changesets with 3 changes to 1 files
928 added 3 changesets with 3 changes to 1 files
929 new changesets b5f04eac9d8f:e5bfe23c0b47
929 new changesets b5f04eac9d8f:e5bfe23c0b47
930 searching for changes
930 searching for changes
931 no changes found
931 no changes found
932 adding remote bookmark bookA
932 adding remote bookmark bookA
933 updating working directory
933 updating working directory
934 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
934 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
935
935
936 $ ls shareremote
936 $ ls shareremote
937 195bb1fcdb595c14a6c13e0269129ed78f6debde
937 195bb1fcdb595c14a6c13e0269129ed78f6debde
938
938
939 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
939 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
940 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
940 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
941 requesting all changes
941 requesting all changes
942 adding changesets
942 adding changesets
943 adding manifests
943 adding manifests
944 adding file changes
944 adding file changes
945 added 6 changesets with 6 changes to 1 files (+4 heads)
945 added 6 changesets with 6 changes to 1 files (+4 heads)
946 new changesets b5f04eac9d8f:6bacf4683960
946 new changesets b5f04eac9d8f:6bacf4683960
947 searching for changes
947 searching for changes
948 no changes found
948 no changes found
949 adding remote bookmark head1
949 adding remote bookmark head1
950 adding remote bookmark head2
950 adding remote bookmark head2
951 updating working directory
951 updating working directory
952 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
952 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
953
953
954 $ ls shareremote
954 $ ls shareremote
955 195bb1fcdb595c14a6c13e0269129ed78f6debde
955 195bb1fcdb595c14a6c13e0269129ed78f6debde
956 c0d4f83847ca2a873741feb7048a45085fd47c46
956 c0d4f83847ca2a873741feb7048a45085fd47c46
957
957
958 request to clone a single revision is respected in sharing mode
958 request to clone a single revision is respected in sharing mode
959
959
960 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
960 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
961 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
961 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
962 adding changesets
962 adding changesets
963 adding manifests
963 adding manifests
964 adding file changes
964 adding file changes
965 added 2 changesets with 2 changes to 1 files
965 added 2 changesets with 2 changes to 1 files
966 new changesets b5f04eac9d8f:4a8dc1ab4c13
966 new changesets b5f04eac9d8f:4a8dc1ab4c13
967 no changes found
967 no changes found
968 adding remote bookmark head1
968 adding remote bookmark head1
969 updating working directory
969 updating working directory
970 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
970 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
971
971
972 $ hg -R share-1arev log -G
972 $ hg -R share-1arev log -G
973 @ changeset: 1:4a8dc1ab4c13
973 @ changeset: 1:4a8dc1ab4c13
974 | bookmark: head1
974 | bookmark: head1
975 | tag: tip
975 | tag: tip
976 | user: test
976 | user: test
977 | date: Thu Jan 01 00:00:00 1970 +0000
977 | date: Thu Jan 01 00:00:00 1970 +0000
978 | summary: head1
978 | summary: head1
979 |
979 |
980 o changeset: 0:b5f04eac9d8f
980 o changeset: 0:b5f04eac9d8f
981 user: test
981 user: test
982 date: Thu Jan 01 00:00:00 1970 +0000
982 date: Thu Jan 01 00:00:00 1970 +0000
983 summary: initial
983 summary: initial
984
984
985
985
986 making another clone should only pull down requested rev
986 making another clone should only pull down requested rev
987
987
988 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
988 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
989 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
989 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
990 searching for changes
990 searching for changes
991 adding changesets
991 adding changesets
992 adding manifests
992 adding manifests
993 adding file changes
993 adding file changes
994 added 1 changesets with 1 changes to 1 files (+1 heads)
994 added 1 changesets with 1 changes to 1 files (+1 heads)
995 adding remote bookmark head1
995 adding remote bookmark head1
996 adding remote bookmark head2
996 adding remote bookmark head2
997 new changesets 99f71071f117
997 new changesets 99f71071f117
998 updating working directory
998 updating working directory
999 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
999 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1000
1000
1001 $ hg -R share-1brev log -G
1001 $ hg -R share-1brev log -G
1002 @ changeset: 2:99f71071f117
1002 @ changeset: 2:99f71071f117
1003 | bookmark: head2
1003 | bookmark: head2
1004 | tag: tip
1004 | tag: tip
1005 | parent: 0:b5f04eac9d8f
1005 | parent: 0:b5f04eac9d8f
1006 | user: test
1006 | user: test
1007 | date: Thu Jan 01 00:00:00 1970 +0000
1007 | date: Thu Jan 01 00:00:00 1970 +0000
1008 | summary: head2
1008 | summary: head2
1009 |
1009 |
1010 | o changeset: 1:4a8dc1ab4c13
1010 | o changeset: 1:4a8dc1ab4c13
1011 |/ bookmark: head1
1011 |/ bookmark: head1
1012 | user: test
1012 | user: test
1013 | date: Thu Jan 01 00:00:00 1970 +0000
1013 | date: Thu Jan 01 00:00:00 1970 +0000
1014 | summary: head1
1014 | summary: head1
1015 |
1015 |
1016 o changeset: 0:b5f04eac9d8f
1016 o changeset: 0:b5f04eac9d8f
1017 user: test
1017 user: test
1018 date: Thu Jan 01 00:00:00 1970 +0000
1018 date: Thu Jan 01 00:00:00 1970 +0000
1019 summary: initial
1019 summary: initial
1020
1020
1021
1021
1022 Request to clone a single branch is respected in sharing mode
1022 Request to clone a single branch is respected in sharing mode
1023
1023
1024 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
1024 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
1025 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1025 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1026 adding changesets
1026 adding changesets
1027 adding manifests
1027 adding manifests
1028 adding file changes
1028 adding file changes
1029 added 2 changesets with 2 changes to 1 files
1029 added 2 changesets with 2 changes to 1 files
1030 new changesets b5f04eac9d8f:5f92a6c1a1b1
1030 new changesets b5f04eac9d8f:5f92a6c1a1b1
1031 no changes found
1031 no changes found
1032 updating working directory
1032 updating working directory
1033 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1033 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1034
1034
1035 $ hg -R share-1bbranch1 log -G
1035 $ hg -R share-1bbranch1 log -G
1036 o changeset: 1:5f92a6c1a1b1
1036 o changeset: 1:5f92a6c1a1b1
1037 | branch: branch1
1037 | branch: branch1
1038 | tag: tip
1038 | tag: tip
1039 | user: test
1039 | user: test
1040 | date: Thu Jan 01 00:00:00 1970 +0000
1040 | date: Thu Jan 01 00:00:00 1970 +0000
1041 | summary: branch1
1041 | summary: branch1
1042 |
1042 |
1043 @ changeset: 0:b5f04eac9d8f
1043 @ changeset: 0:b5f04eac9d8f
1044 user: test
1044 user: test
1045 date: Thu Jan 01 00:00:00 1970 +0000
1045 date: Thu Jan 01 00:00:00 1970 +0000
1046 summary: initial
1046 summary: initial
1047
1047
1048
1048
1049 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1049 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1050 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1050 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1051 searching for changes
1051 searching for changes
1052 adding changesets
1052 adding changesets
1053 adding manifests
1053 adding manifests
1054 adding file changes
1054 adding file changes
1055 added 1 changesets with 1 changes to 1 files (+1 heads)
1055 added 1 changesets with 1 changes to 1 files (+1 heads)
1056 new changesets 6bacf4683960
1056 new changesets 6bacf4683960
1057 updating working directory
1057 updating working directory
1058 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1058 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1059
1059
1060 $ hg -R share-1bbranch2 log -G
1060 $ hg -R share-1bbranch2 log -G
1061 o changeset: 2:6bacf4683960
1061 o changeset: 2:6bacf4683960
1062 | branch: branch2
1062 | branch: branch2
1063 | tag: tip
1063 | tag: tip
1064 | parent: 0:b5f04eac9d8f
1064 | parent: 0:b5f04eac9d8f
1065 | user: test
1065 | user: test
1066 | date: Thu Jan 01 00:00:00 1970 +0000
1066 | date: Thu Jan 01 00:00:00 1970 +0000
1067 | summary: branch2
1067 | summary: branch2
1068 |
1068 |
1069 | o changeset: 1:5f92a6c1a1b1
1069 | o changeset: 1:5f92a6c1a1b1
1070 |/ branch: branch1
1070 |/ branch: branch1
1071 | user: test
1071 | user: test
1072 | date: Thu Jan 01 00:00:00 1970 +0000
1072 | date: Thu Jan 01 00:00:00 1970 +0000
1073 | summary: branch1
1073 | summary: branch1
1074 |
1074 |
1075 @ changeset: 0:b5f04eac9d8f
1075 @ changeset: 0:b5f04eac9d8f
1076 user: test
1076 user: test
1077 date: Thu Jan 01 00:00:00 1970 +0000
1077 date: Thu Jan 01 00:00:00 1970 +0000
1078 summary: initial
1078 summary: initial
1079
1079
1080
1080
1081 -U is respected in share clone mode
1081 -U is respected in share clone mode
1082
1082
1083 $ hg --config share.pool=share clone -U source1a share-1anowc
1083 $ hg --config share.pool=share clone -U source1a share-1anowc
1084 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1084 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1085 searching for changes
1085 searching for changes
1086 no changes found
1086 no changes found
1087 adding remote bookmark bookA
1087 adding remote bookmark bookA
1088
1088
1089 $ ls share-1anowc
1089 $ ls share-1anowc
1090
1090
1091 Test that auto sharing doesn't cause failure of "hg clone local remote"
1091 Test that auto sharing doesn't cause failure of "hg clone local remote"
1092
1092
1093 $ cd $TESTTMP
1093 $ cd $TESTTMP
1094 $ hg -R a id -r 0
1094 $ hg -R a id -r 0
1095 acb14030fe0a
1095 acb14030fe0a
1096 $ hg id -R remote -r 0
1096 $ hg id -R remote -r 0
1097 abort: repository remote not found!
1097 abort: repository remote not found!
1098 [255]
1098 [255]
1099 $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
1099 $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
1100 $ hg -R remote id -r 0
1100 $ hg -R remote id -r 0
1101 acb14030fe0a
1101 acb14030fe0a
1102
1102
1103 Cloning into pooled storage doesn't race (issue5104)
1103 Cloning into pooled storage doesn't race (issue5104)
1104
1104
1105 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1105 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1106 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1106 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1107 $ wait
1107 $ wait
1108
1108
1109 $ hg -R share-destrace1 log -r tip
1109 $ hg -R share-destrace1 log -r tip
1110 changeset: 2:e5bfe23c0b47
1110 changeset: 2:e5bfe23c0b47
1111 bookmark: bookA
1111 bookmark: bookA
1112 tag: tip
1112 tag: tip
1113 user: test
1113 user: test
1114 date: Thu Jan 01 00:00:00 1970 +0000
1114 date: Thu Jan 01 00:00:00 1970 +0000
1115 summary: 1a
1115 summary: 1a
1116
1116
1117
1117
1118 $ hg -R share-destrace2 log -r tip
1118 $ hg -R share-destrace2 log -r tip
1119 changeset: 2:e5bfe23c0b47
1119 changeset: 2:e5bfe23c0b47
1120 bookmark: bookA
1120 bookmark: bookA
1121 tag: tip
1121 tag: tip
1122 user: test
1122 user: test
1123 date: Thu Jan 01 00:00:00 1970 +0000
1123 date: Thu Jan 01 00:00:00 1970 +0000
1124 summary: 1a
1124 summary: 1a
1125
1125
1126 One repo should be new, the other should be shared from the pool. We
1126 One repo should be new, the other should be shared from the pool. We
1127 don't care which is which, so we just make sure we always print the
1127 don't care which is which, so we just make sure we always print the
1128 one containing "new pooled" first, then one one containing "existing
1128 one containing "new pooled" first, then one one containing "existing
1129 pooled".
1129 pooled".
1130
1130
1131 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1131 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1132 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1132 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1133 requesting all changes
1133 requesting all changes
1134 adding changesets
1134 adding changesets
1135 adding manifests
1135 adding manifests
1136 adding file changes
1136 adding file changes
1137 added 3 changesets with 3 changes to 1 files
1137 added 3 changesets with 3 changes to 1 files
1138 new changesets b5f04eac9d8f:e5bfe23c0b47
1138 new changesets b5f04eac9d8f:e5bfe23c0b47
1139 searching for changes
1139 searching for changes
1140 no changes found
1140 no changes found
1141 adding remote bookmark bookA
1141 adding remote bookmark bookA
1142 updating working directory
1142 updating working directory
1143 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1143 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1144
1144
1145 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1145 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1146 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1146 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1147 searching for changes
1147 searching for changes
1148 no changes found
1148 no changes found
1149 adding remote bookmark bookA
1149 adding remote bookmark bookA
1150 updating working directory
1150 updating working directory
1151 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1151 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1152
1152
1153 SEC: check for unsafe ssh url
1153 SEC: check for unsafe ssh url
1154
1154
1155 $ cat >> $HGRCPATH << EOF
1155 $ cat >> $HGRCPATH << EOF
1156 > [ui]
1156 > [ui]
1157 > ssh = sh -c "read l; read l; read l"
1157 > ssh = sh -c "read l; read l; read l"
1158 > EOF
1158 > EOF
1159
1159
1160 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1160 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1161 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1161 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1162 [255]
1162 [255]
1163 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1163 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1164 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1164 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1165 [255]
1165 [255]
1166 $ hg clone 'ssh://fakehost|touch%20owned/path'
1166 $ hg clone 'ssh://fakehost|touch%20owned/path'
1167 abort: no suitable response from remote hg!
1167 abort: no suitable response from remote hg!
1168 [255]
1168 [255]
1169 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1169 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1170 abort: no suitable response from remote hg!
1170 abort: no suitable response from remote hg!
1171 [255]
1171 [255]
1172
1172
1173 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1173 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1174 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1174 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1175 [255]
1175 [255]
1176
1176
1177 #if windows
1177 #if windows
1178 $ hg clone "ssh://%26touch%20owned%20/" --debug
1178 $ hg clone "ssh://%26touch%20owned%20/" --debug
1179 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1179 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1180 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1180 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1181 sending hello command
1181 sending hello command
1182 sending between command
1182 sending between command
1183 abort: no suitable response from remote hg!
1183 abort: no suitable response from remote hg!
1184 [255]
1184 [255]
1185 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1185 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1186 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1186 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1187 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1187 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1188 sending hello command
1188 sending hello command
1189 sending between command
1189 sending between command
1190 abort: no suitable response from remote hg!
1190 abort: no suitable response from remote hg!
1191 [255]
1191 [255]
1192 #else
1192 #else
1193 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1193 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1194 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1194 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1195 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1195 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1196 sending hello command
1196 sending hello command
1197 sending between command
1197 sending between command
1198 abort: no suitable response from remote hg!
1198 abort: no suitable response from remote hg!
1199 [255]
1199 [255]
1200 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1200 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1201 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1201 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1202 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1202 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1203 sending hello command
1203 sending hello command
1204 sending between command
1204 sending between command
1205 abort: no suitable response from remote hg!
1205 abort: no suitable response from remote hg!
1206 [255]
1206 [255]
1207 #endif
1207 #endif
1208
1208
1209 $ hg clone "ssh://v-alid.example.com/" --debug
1209 $ hg clone "ssh://v-alid.example.com/" --debug
1210 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1210 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1211 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1211 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
1212 sending hello command
1212 sending hello command
1213 sending between command
1213 sending between command
1214 abort: no suitable response from remote hg!
1214 abort: no suitable response from remote hg!
1215 [255]
1215 [255]
1216
1216
1217 We should not have created a file named owned - if it exists, the
1217 We should not have created a file named owned - if it exists, the
1218 attack succeeded.
1218 attack succeeded.
1219 $ if test -f owned; then echo 'you got owned'; fi
1219 $ if test -f owned; then echo 'you got owned'; fi
1220
1220
1221 Cloning without fsmonitor enabled does not print a warning for small repos
1221 Cloning without fsmonitor enabled does not print a warning for small repos
1222
1222
1223 $ hg clone a fsmonitor-default
1223 $ hg clone a fsmonitor-default
1224 updating to bookmark @ on branch stable
1224 updating to bookmark @ on branch stable
1225 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1225 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1226
1226
1227 Lower the warning threshold to simulate a large repo
1227 Lower the warning threshold to simulate a large repo
1228
1228
1229 $ cat >> $HGRCPATH << EOF
1229 $ cat >> $HGRCPATH << EOF
1230 > [fsmonitor]
1230 > [fsmonitor]
1231 > warn_update_file_count = 2
1231 > warn_update_file_count = 2
1232 > EOF
1232 > EOF
1233
1233
1234 We should see a warning about no fsmonitor on supported platforms
1234 We should see a warning about no fsmonitor on supported platforms
1235
1235
1236 #if linuxormacos no-fsmonitor
1236 #if linuxormacos no-fsmonitor
1237 $ hg clone a nofsmonitor
1237 $ hg clone a nofsmonitor
1238 updating to bookmark @ on branch stable
1238 updating to bookmark @ on branch stable
1239 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1239 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1240 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1240 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1241 #else
1241 #else
1242 $ hg clone a nofsmonitor
1242 $ hg clone a nofsmonitor
1243 updating to bookmark @ on branch stable
1243 updating to bookmark @ on branch stable
1244 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1244 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1245 #endif
1245 #endif
1246
1246
1247 We should not see warning about fsmonitor when it is enabled
1247 We should not see warning about fsmonitor when it is enabled
1248
1248
1249 #if fsmonitor
1249 #if fsmonitor
1250 $ hg clone a fsmonitor-enabled
1250 $ hg clone a fsmonitor-enabled
1251 updating to bookmark @ on branch stable
1251 updating to bookmark @ on branch stable
1252 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1252 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1253 #endif
1253 #endif
1254
1254
1255 We can disable the fsmonitor warning
1255 We can disable the fsmonitor warning
1256
1256
1257 $ hg --config fsmonitor.warn_when_unused=false clone a fsmonitor-disable-warning
1257 $ hg --config fsmonitor.warn_when_unused=false clone a fsmonitor-disable-warning
1258 updating to bookmark @ on branch stable
1258 updating to bookmark @ on branch stable
1259 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1259 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1260
1260
1261 Loaded fsmonitor but disabled in config should still print warning
1261 Loaded fsmonitor but disabled in config should still print warning
1262
1262
1263 #if linuxormacos fsmonitor
1263 #if linuxormacos fsmonitor
1264 $ hg --config fsmonitor.mode=off clone a fsmonitor-mode-off
1264 $ hg --config fsmonitor.mode=off clone a fsmonitor-mode-off
1265 updating to bookmark @ on branch stable
1265 updating to bookmark @ on branch stable
1266 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (fsmonitor !)
1266 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (fsmonitor !)
1267 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1267 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1268 #endif
1268 #endif
1269
1269
1270 Warning not printed if working directory isn't empty
1270 Warning not printed if working directory isn't empty
1271
1271
1272 $ hg -q clone a fsmonitor-update
1272 $ hg -q clone a fsmonitor-update
1273 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (?)
1273 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (?)
1274 $ cd fsmonitor-update
1274 $ cd fsmonitor-update
1275 $ hg up acb14030fe0a
1275 $ hg up acb14030fe0a
1276 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
1276 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
1277 (leaving bookmark @)
1277 (leaving bookmark @)
1278 $ hg up cf0fe1914066
1278 $ hg up cf0fe1914066
1279 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1279 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1280
1280
1281 `hg update` from null revision also prints
1281 `hg update` from null revision also prints
1282
1282
1283 $ hg up null
1283 $ hg up null
1284 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1284 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1285
1285
1286 #if linuxormacos no-fsmonitor
1286 #if linuxormacos no-fsmonitor
1287 $ hg up cf0fe1914066
1287 $ hg up cf0fe1914066
1288 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1288 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1289 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1289 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1290 #else
1290 #else
1291 $ hg up cf0fe1914066
1291 $ hg up cf0fe1914066
1292 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1292 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1293 #endif
1293 #endif
1294
1294
1295 $ cd ..
1295 $ cd ..
1296
1296
@@ -1,432 +1,432
1 #require hardlink reporevlogstore
1 #require hardlink reporevlogstore
2
2
3 $ cat > nlinks.py <<EOF
3 $ cat > nlinks.py <<EOF
4 > from __future__ import print_function
4 > from __future__ import print_function
5 > import sys
5 > import sys
6 > from mercurial import util
6 > from mercurial import util
7 > for f in sorted(sys.stdin.readlines()):
7 > for f in sorted(sys.stdin.readlines()):
8 > f = f[:-1]
8 > f = f[:-1]
9 > print(util.nlinks(f), f)
9 > print(util.nlinks(f), f)
10 > EOF
10 > EOF
11
11
12 $ nlinksdir()
12 $ nlinksdir()
13 > {
13 > {
14 > find "$@" -type f | "$PYTHON" $TESTTMP/nlinks.py
14 > find "$@" -type f | "$PYTHON" $TESTTMP/nlinks.py
15 > }
15 > }
16
16
17 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
17 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
18
18
19 $ cat > linkcp.py <<EOF
19 $ cat > linkcp.py <<EOF
20 > from __future__ import absolute_import
20 > from __future__ import absolute_import
21 > import sys
21 > import sys
22 > from mercurial import pycompat, util
22 > from mercurial import pycompat, util
23 > util.copyfiles(pycompat.fsencode(sys.argv[1]),
23 > util.copyfiles(pycompat.fsencode(sys.argv[1]),
24 > pycompat.fsencode(sys.argv[2]), hardlink=True)
24 > pycompat.fsencode(sys.argv[2]), hardlink=True)
25 > EOF
25 > EOF
26
26
27 $ linkcp()
27 $ linkcp()
28 > {
28 > {
29 > "$PYTHON" $TESTTMP/linkcp.py $1 $2
29 > "$PYTHON" $TESTTMP/linkcp.py $1 $2
30 > }
30 > }
31
31
32 Prepare repo r1:
32 Prepare repo r1:
33
33
34 $ hg init r1
34 $ hg init r1
35 $ cd r1
35 $ cd r1
36
36
37 $ echo c1 > f1
37 $ echo c1 > f1
38 $ hg add f1
38 $ hg add f1
39 $ hg ci -m0
39 $ hg ci -m0
40
40
41 $ mkdir d1
41 $ mkdir d1
42 $ cd d1
42 $ cd d1
43 $ echo c2 > f2
43 $ echo c2 > f2
44 $ hg add f2
44 $ hg add f2
45 $ hg ci -m1
45 $ hg ci -m1
46 $ cd ../..
46 $ cd ../..
47
47
48 $ nlinksdir r1/.hg/store
48 $ nlinksdir r1/.hg/store
49 1 r1/.hg/store/00changelog.i
49 1 r1/.hg/store/00changelog.i
50 1 r1/.hg/store/00manifest.i
50 1 r1/.hg/store/00manifest.i
51 1 r1/.hg/store/data/d1/f2.i
51 1 r1/.hg/store/data/d1/f2.i
52 1 r1/.hg/store/data/f1.i
52 1 r1/.hg/store/data/f1.i
53 1 r1/.hg/store/fncache (repofncache !)
53 1 r1/.hg/store/fncache (repofncache !)
54 1 r1/.hg/store/phaseroots
54 1 r1/.hg/store/phaseroots
55 1 r1/.hg/store/undo
55 1 r1/.hg/store/undo
56 1 r1/.hg/store/undo.backup.fncache (repofncache !)
56 1 r1/.hg/store/undo.backup.fncache (repofncache !)
57 1 r1/.hg/store/undo.backupfiles
57 1 r1/.hg/store/undo.backupfiles
58 1 r1/.hg/store/undo.phaseroots
58 1 r1/.hg/store/undo.phaseroots
59
59
60
60
61 Create hardlinked clone r2:
61 Create hardlinked clone r2:
62
62
63 $ hg clone -U --debug r1 r2 --config progress.debug=true
63 $ hg clone -U --debug r1 r2 --config progress.debug=true
64 linking: 1
64 linking: 1 files
65 linking: 2
65 linking: 2 files
66 linking: 3
66 linking: 3 files
67 linking: 4
67 linking: 4 files
68 linking: 5
68 linking: 5 files
69 linking: 6
69 linking: 6 files
70 linking: 7
70 linking: 7 files
71 linked 7 files
71 linked 7 files
72
72
73 Create non-hardlinked clone r3:
73 Create non-hardlinked clone r3:
74
74
75 $ hg clone --pull r1 r3
75 $ hg clone --pull r1 r3
76 requesting all changes
76 requesting all changes
77 adding changesets
77 adding changesets
78 adding manifests
78 adding manifests
79 adding file changes
79 adding file changes
80 added 2 changesets with 2 changes to 2 files
80 added 2 changesets with 2 changes to 2 files
81 new changesets 40d85e9847f2:7069c422939c
81 new changesets 40d85e9847f2:7069c422939c
82 updating to branch default
82 updating to branch default
83 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
83 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
84
84
85
85
86 Repos r1 and r2 should now contain hardlinked files:
86 Repos r1 and r2 should now contain hardlinked files:
87
87
88 $ nlinksdir r1/.hg/store
88 $ nlinksdir r1/.hg/store
89 2 r1/.hg/store/00changelog.i
89 2 r1/.hg/store/00changelog.i
90 2 r1/.hg/store/00manifest.i
90 2 r1/.hg/store/00manifest.i
91 2 r1/.hg/store/data/d1/f2.i
91 2 r1/.hg/store/data/d1/f2.i
92 2 r1/.hg/store/data/f1.i
92 2 r1/.hg/store/data/f1.i
93 2 r1/.hg/store/fncache (repofncache !)
93 2 r1/.hg/store/fncache (repofncache !)
94 1 r1/.hg/store/phaseroots
94 1 r1/.hg/store/phaseroots
95 1 r1/.hg/store/undo
95 1 r1/.hg/store/undo
96 1 r1/.hg/store/undo.backup.fncache (repofncache !)
96 1 r1/.hg/store/undo.backup.fncache (repofncache !)
97 1 r1/.hg/store/undo.backupfiles
97 1 r1/.hg/store/undo.backupfiles
98 1 r1/.hg/store/undo.phaseroots
98 1 r1/.hg/store/undo.phaseroots
99
99
100 $ nlinksdir r2/.hg/store
100 $ nlinksdir r2/.hg/store
101 2 r2/.hg/store/00changelog.i
101 2 r2/.hg/store/00changelog.i
102 2 r2/.hg/store/00manifest.i
102 2 r2/.hg/store/00manifest.i
103 2 r2/.hg/store/data/d1/f2.i
103 2 r2/.hg/store/data/d1/f2.i
104 2 r2/.hg/store/data/f1.i
104 2 r2/.hg/store/data/f1.i
105 2 r2/.hg/store/fncache (repofncache !)
105 2 r2/.hg/store/fncache (repofncache !)
106
106
107 Repo r3 should not be hardlinked:
107 Repo r3 should not be hardlinked:
108
108
109 $ nlinksdir r3/.hg/store
109 $ nlinksdir r3/.hg/store
110 1 r3/.hg/store/00changelog.i
110 1 r3/.hg/store/00changelog.i
111 1 r3/.hg/store/00manifest.i
111 1 r3/.hg/store/00manifest.i
112 1 r3/.hg/store/data/d1/f2.i
112 1 r3/.hg/store/data/d1/f2.i
113 1 r3/.hg/store/data/f1.i
113 1 r3/.hg/store/data/f1.i
114 1 r3/.hg/store/fncache (repofncache !)
114 1 r3/.hg/store/fncache (repofncache !)
115 1 r3/.hg/store/phaseroots
115 1 r3/.hg/store/phaseroots
116 1 r3/.hg/store/undo
116 1 r3/.hg/store/undo
117 1 r3/.hg/store/undo.backupfiles
117 1 r3/.hg/store/undo.backupfiles
118 1 r3/.hg/store/undo.phaseroots
118 1 r3/.hg/store/undo.phaseroots
119
119
120
120
121 Create a non-inlined filelog in r3:
121 Create a non-inlined filelog in r3:
122
122
123 $ cd r3/d1
123 $ cd r3/d1
124 >>> f = open('data1', 'wb')
124 >>> f = open('data1', 'wb')
125 >>> for x in range(10000):
125 >>> for x in range(10000):
126 ... f.write(b"%d\n" % x) and None
126 ... f.write(b"%d\n" % x) and None
127 >>> f.close()
127 >>> f.close()
128 $ for j in 0 1 2 3 4 5 6 7 8 9; do
128 $ for j in 0 1 2 3 4 5 6 7 8 9; do
129 > cat data1 >> f2
129 > cat data1 >> f2
130 > hg commit -m$j
130 > hg commit -m$j
131 > done
131 > done
132 $ cd ../..
132 $ cd ../..
133
133
134 $ nlinksdir r3/.hg/store
134 $ nlinksdir r3/.hg/store
135 1 r3/.hg/store/00changelog.i
135 1 r3/.hg/store/00changelog.i
136 1 r3/.hg/store/00manifest.i
136 1 r3/.hg/store/00manifest.i
137 1 r3/.hg/store/data/d1/f2.d
137 1 r3/.hg/store/data/d1/f2.d
138 1 r3/.hg/store/data/d1/f2.i
138 1 r3/.hg/store/data/d1/f2.i
139 1 r3/.hg/store/data/f1.i
139 1 r3/.hg/store/data/f1.i
140 1 r3/.hg/store/fncache (repofncache !)
140 1 r3/.hg/store/fncache (repofncache !)
141 1 r3/.hg/store/phaseroots
141 1 r3/.hg/store/phaseroots
142 1 r3/.hg/store/undo
142 1 r3/.hg/store/undo
143 1 r3/.hg/store/undo.backup.fncache (repofncache !)
143 1 r3/.hg/store/undo.backup.fncache (repofncache !)
144 1 r3/.hg/store/undo.backup.phaseroots
144 1 r3/.hg/store/undo.backup.phaseroots
145 1 r3/.hg/store/undo.backupfiles
145 1 r3/.hg/store/undo.backupfiles
146 1 r3/.hg/store/undo.phaseroots
146 1 r3/.hg/store/undo.phaseroots
147
147
148 Push to repo r1 should break up most hardlinks in r2:
148 Push to repo r1 should break up most hardlinks in r2:
149
149
150 $ hg -R r2 verify
150 $ hg -R r2 verify
151 checking changesets
151 checking changesets
152 checking manifests
152 checking manifests
153 crosschecking files in changesets and manifests
153 crosschecking files in changesets and manifests
154 checking files
154 checking files
155 checked 2 changesets with 2 changes to 2 files
155 checked 2 changesets with 2 changes to 2 files
156
156
157 $ cd r3
157 $ cd r3
158 $ hg push
158 $ hg push
159 pushing to $TESTTMP/r1
159 pushing to $TESTTMP/r1
160 searching for changes
160 searching for changes
161 adding changesets
161 adding changesets
162 adding manifests
162 adding manifests
163 adding file changes
163 adding file changes
164 added 10 changesets with 10 changes to 1 files
164 added 10 changesets with 10 changes to 1 files
165
165
166 $ cd ..
166 $ cd ..
167
167
168 $ nlinksdir r2/.hg/store
168 $ nlinksdir r2/.hg/store
169 1 r2/.hg/store/00changelog.i
169 1 r2/.hg/store/00changelog.i
170 1 r2/.hg/store/00manifest.i
170 1 r2/.hg/store/00manifest.i
171 1 r2/.hg/store/data/d1/f2.i
171 1 r2/.hg/store/data/d1/f2.i
172 2 r2/.hg/store/data/f1.i
172 2 r2/.hg/store/data/f1.i
173 [12] r2/\.hg/store/fncache (re) (repofncache !)
173 [12] r2/\.hg/store/fncache (re) (repofncache !)
174
174
175 #if hardlink-whitelisted repofncache
175 #if hardlink-whitelisted repofncache
176 $ nlinksdir r2/.hg/store/fncache
176 $ nlinksdir r2/.hg/store/fncache
177 2 r2/.hg/store/fncache
177 2 r2/.hg/store/fncache
178 #endif
178 #endif
179
179
180 $ hg -R r2 verify
180 $ hg -R r2 verify
181 checking changesets
181 checking changesets
182 checking manifests
182 checking manifests
183 crosschecking files in changesets and manifests
183 crosschecking files in changesets and manifests
184 checking files
184 checking files
185 checked 2 changesets with 2 changes to 2 files
185 checked 2 changesets with 2 changes to 2 files
186
186
187
187
188 $ cd r1
188 $ cd r1
189 $ hg up
189 $ hg up
190 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
190 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
191
191
192 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
192 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
193
193
194 $ echo c1c1 >> f1
194 $ echo c1c1 >> f1
195 $ hg ci -m00
195 $ hg ci -m00
196 $ cd ..
196 $ cd ..
197
197
198 $ nlinksdir r2/.hg/store
198 $ nlinksdir r2/.hg/store
199 1 r2/.hg/store/00changelog.i
199 1 r2/.hg/store/00changelog.i
200 1 r2/.hg/store/00manifest.i
200 1 r2/.hg/store/00manifest.i
201 1 r2/.hg/store/data/d1/f2.i
201 1 r2/.hg/store/data/d1/f2.i
202 1 r2/.hg/store/data/f1.i
202 1 r2/.hg/store/data/f1.i
203 [12] r2/\.hg/store/fncache (re) (repofncache !)
203 [12] r2/\.hg/store/fncache (re) (repofncache !)
204
204
205 #if hardlink-whitelisted repofncache
205 #if hardlink-whitelisted repofncache
206 $ nlinksdir r2/.hg/store/fncache
206 $ nlinksdir r2/.hg/store/fncache
207 2 r2/.hg/store/fncache
207 2 r2/.hg/store/fncache
208 #endif
208 #endif
209
209
210 Create a file which exec permissions we will change
210 Create a file which exec permissions we will change
211 $ cd r3
211 $ cd r3
212 $ echo "echo hello world" > f3
212 $ echo "echo hello world" > f3
213 $ hg add f3
213 $ hg add f3
214 $ hg ci -mf3
214 $ hg ci -mf3
215 $ cd ..
215 $ cd ..
216
216
217 $ cd r3
217 $ cd r3
218 $ hg tip --template '{rev}:{node|short}\n'
218 $ hg tip --template '{rev}:{node|short}\n'
219 12:d3b77733a28a
219 12:d3b77733a28a
220 $ echo bla > f1
220 $ echo bla > f1
221 $ chmod +x f3
221 $ chmod +x f3
222 $ hg ci -m1
222 $ hg ci -m1
223 $ cd ..
223 $ cd ..
224
224
225 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
225 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
226
226
227 $ linkcp r3 r4
227 $ linkcp r3 r4
228
228
229 'checklink' is produced by hardlinking a symlink, which is undefined whether
229 'checklink' is produced by hardlinking a symlink, which is undefined whether
230 the symlink should be followed or not. It does behave differently on Linux and
230 the symlink should be followed or not. It does behave differently on Linux and
231 BSD. Just remove it so the test pass on both platforms.
231 BSD. Just remove it so the test pass on both platforms.
232
232
233 $ rm -f r4/.hg/cache/checklink
233 $ rm -f r4/.hg/cache/checklink
234
234
235 r4 has hardlinks in the working dir (not just inside .hg):
235 r4 has hardlinks in the working dir (not just inside .hg):
236
236
237 $ nlinksdir r4
237 $ nlinksdir r4
238 2 r4/.hg/00changelog.i
238 2 r4/.hg/00changelog.i
239 2 r4/.hg/branch
239 2 r4/.hg/branch
240 2 r4/.hg/cache/branch2-base
240 2 r4/.hg/cache/branch2-base
241 2 r4/.hg/cache/branch2-served
241 2 r4/.hg/cache/branch2-served
242 2 r4/.hg/cache/checkisexec (execbit !)
242 2 r4/.hg/cache/checkisexec (execbit !)
243 ? r4/.hg/cache/checklink-target (glob) (symlink !)
243 ? r4/.hg/cache/checklink-target (glob) (symlink !)
244 2 r4/.hg/cache/checknoexec (execbit !)
244 2 r4/.hg/cache/checknoexec (execbit !)
245 2 r4/.hg/cache/manifestfulltextcache (reporevlogstore !)
245 2 r4/.hg/cache/manifestfulltextcache (reporevlogstore !)
246 2 r4/.hg/cache/rbc-names-v1
246 2 r4/.hg/cache/rbc-names-v1
247 2 r4/.hg/cache/rbc-revs-v1
247 2 r4/.hg/cache/rbc-revs-v1
248 2 r4/.hg/dirstate
248 2 r4/.hg/dirstate
249 2 r4/.hg/fsmonitor.state (fsmonitor !)
249 2 r4/.hg/fsmonitor.state (fsmonitor !)
250 2 r4/.hg/hgrc
250 2 r4/.hg/hgrc
251 2 r4/.hg/last-message.txt
251 2 r4/.hg/last-message.txt
252 2 r4/.hg/requires
252 2 r4/.hg/requires
253 2 r4/.hg/store/00changelog.i
253 2 r4/.hg/store/00changelog.i
254 2 r4/.hg/store/00manifest.i
254 2 r4/.hg/store/00manifest.i
255 2 r4/.hg/store/data/d1/f2.d
255 2 r4/.hg/store/data/d1/f2.d
256 2 r4/.hg/store/data/d1/f2.i
256 2 r4/.hg/store/data/d1/f2.i
257 2 r4/.hg/store/data/f1.i
257 2 r4/.hg/store/data/f1.i
258 2 r4/.hg/store/data/f3.i
258 2 r4/.hg/store/data/f3.i
259 2 r4/.hg/store/fncache (repofncache !)
259 2 r4/.hg/store/fncache (repofncache !)
260 2 r4/.hg/store/phaseroots
260 2 r4/.hg/store/phaseroots
261 2 r4/.hg/store/undo
261 2 r4/.hg/store/undo
262 2 r4/.hg/store/undo.backup.fncache (repofncache !)
262 2 r4/.hg/store/undo.backup.fncache (repofncache !)
263 2 r4/.hg/store/undo.backup.phaseroots
263 2 r4/.hg/store/undo.backup.phaseroots
264 2 r4/.hg/store/undo.backupfiles
264 2 r4/.hg/store/undo.backupfiles
265 2 r4/.hg/store/undo.phaseroots
265 2 r4/.hg/store/undo.phaseroots
266 [24] r4/\.hg/undo\.backup\.dirstate (re)
266 [24] r4/\.hg/undo\.backup\.dirstate (re)
267 2 r4/.hg/undo.bookmarks
267 2 r4/.hg/undo.bookmarks
268 2 r4/.hg/undo.branch
268 2 r4/.hg/undo.branch
269 2 r4/.hg/undo.desc
269 2 r4/.hg/undo.desc
270 [24] r4/\.hg/undo\.dirstate (re)
270 [24] r4/\.hg/undo\.dirstate (re)
271 2 r4/d1/data1
271 2 r4/d1/data1
272 2 r4/d1/f2
272 2 r4/d1/f2
273 2 r4/f1
273 2 r4/f1
274 2 r4/f3
274 2 r4/f3
275
275
276 Update back to revision 12 in r4 should break hardlink of file f1 and f3:
276 Update back to revision 12 in r4 should break hardlink of file f1 and f3:
277 #if hardlink-whitelisted
277 #if hardlink-whitelisted
278 $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
278 $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
279 4 r4/.hg/undo.backup.dirstate
279 4 r4/.hg/undo.backup.dirstate
280 4 r4/.hg/undo.dirstate
280 4 r4/.hg/undo.dirstate
281 #endif
281 #endif
282
282
283
283
284 $ hg -R r4 up 12
284 $ hg -R r4 up 12
285 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (execbit !)
285 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (execbit !)
286 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-execbit !)
286 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-execbit !)
287
287
288 $ nlinksdir r4
288 $ nlinksdir r4
289 2 r4/.hg/00changelog.i
289 2 r4/.hg/00changelog.i
290 1 r4/.hg/branch
290 1 r4/.hg/branch
291 2 r4/.hg/cache/branch2-base
291 2 r4/.hg/cache/branch2-base
292 2 r4/.hg/cache/branch2-served
292 2 r4/.hg/cache/branch2-served
293 2 r4/.hg/cache/checkisexec (execbit !)
293 2 r4/.hg/cache/checkisexec (execbit !)
294 2 r4/.hg/cache/checklink-target (symlink !)
294 2 r4/.hg/cache/checklink-target (symlink !)
295 2 r4/.hg/cache/checknoexec (execbit !)
295 2 r4/.hg/cache/checknoexec (execbit !)
296 2 r4/.hg/cache/manifestfulltextcache (reporevlogstore !)
296 2 r4/.hg/cache/manifestfulltextcache (reporevlogstore !)
297 2 r4/.hg/cache/rbc-names-v1
297 2 r4/.hg/cache/rbc-names-v1
298 2 r4/.hg/cache/rbc-revs-v1
298 2 r4/.hg/cache/rbc-revs-v1
299 1 r4/.hg/dirstate
299 1 r4/.hg/dirstate
300 1 r4/.hg/fsmonitor.state (fsmonitor !)
300 1 r4/.hg/fsmonitor.state (fsmonitor !)
301 2 r4/.hg/hgrc
301 2 r4/.hg/hgrc
302 2 r4/.hg/last-message.txt
302 2 r4/.hg/last-message.txt
303 2 r4/.hg/requires
303 2 r4/.hg/requires
304 2 r4/.hg/store/00changelog.i
304 2 r4/.hg/store/00changelog.i
305 2 r4/.hg/store/00manifest.i
305 2 r4/.hg/store/00manifest.i
306 2 r4/.hg/store/data/d1/f2.d
306 2 r4/.hg/store/data/d1/f2.d
307 2 r4/.hg/store/data/d1/f2.i
307 2 r4/.hg/store/data/d1/f2.i
308 2 r4/.hg/store/data/f1.i
308 2 r4/.hg/store/data/f1.i
309 2 r4/.hg/store/data/f3.i
309 2 r4/.hg/store/data/f3.i
310 2 r4/.hg/store/fncache
310 2 r4/.hg/store/fncache
311 2 r4/.hg/store/phaseroots
311 2 r4/.hg/store/phaseroots
312 2 r4/.hg/store/undo
312 2 r4/.hg/store/undo
313 2 r4/.hg/store/undo.backup.fncache (repofncache !)
313 2 r4/.hg/store/undo.backup.fncache (repofncache !)
314 2 r4/.hg/store/undo.backup.phaseroots
314 2 r4/.hg/store/undo.backup.phaseroots
315 2 r4/.hg/store/undo.backupfiles
315 2 r4/.hg/store/undo.backupfiles
316 2 r4/.hg/store/undo.phaseroots
316 2 r4/.hg/store/undo.phaseroots
317 [24] r4/\.hg/undo\.backup\.dirstate (re)
317 [24] r4/\.hg/undo\.backup\.dirstate (re)
318 2 r4/.hg/undo.bookmarks
318 2 r4/.hg/undo.bookmarks
319 2 r4/.hg/undo.branch
319 2 r4/.hg/undo.branch
320 2 r4/.hg/undo.desc
320 2 r4/.hg/undo.desc
321 [24] r4/\.hg/undo\.dirstate (re)
321 [24] r4/\.hg/undo\.dirstate (re)
322 2 r4/d1/data1
322 2 r4/d1/data1
323 2 r4/d1/f2
323 2 r4/d1/f2
324 1 r4/f1
324 1 r4/f1
325 1 r4/f3 (execbit !)
325 1 r4/f3 (execbit !)
326 2 r4/f3 (no-execbit !)
326 2 r4/f3 (no-execbit !)
327
327
328 #if hardlink-whitelisted
328 #if hardlink-whitelisted
329 $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
329 $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
330 4 r4/.hg/undo.backup.dirstate
330 4 r4/.hg/undo.backup.dirstate
331 4 r4/.hg/undo.dirstate
331 4 r4/.hg/undo.dirstate
332 #endif
332 #endif
333
333
334 Test hardlinking outside hg:
334 Test hardlinking outside hg:
335
335
336 $ mkdir x
336 $ mkdir x
337 $ echo foo > x/a
337 $ echo foo > x/a
338
338
339 $ linkcp x y
339 $ linkcp x y
340 $ echo bar >> y/a
340 $ echo bar >> y/a
341
341
342 No diff if hardlink:
342 No diff if hardlink:
343
343
344 $ diff x/a y/a
344 $ diff x/a y/a
345
345
346 Test mq hardlinking:
346 Test mq hardlinking:
347
347
348 $ echo "[extensions]" >> $HGRCPATH
348 $ echo "[extensions]" >> $HGRCPATH
349 $ echo "mq=" >> $HGRCPATH
349 $ echo "mq=" >> $HGRCPATH
350
350
351 $ hg init a
351 $ hg init a
352 $ cd a
352 $ cd a
353
353
354 $ hg qimport -n foo - << EOF
354 $ hg qimport -n foo - << EOF
355 > # HG changeset patch
355 > # HG changeset patch
356 > # Date 1 0
356 > # Date 1 0
357 > diff -r 2588a8b53d66 a
357 > diff -r 2588a8b53d66 a
358 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
358 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
359 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
359 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
360 > @@ -0,0 +1,1 @@
360 > @@ -0,0 +1,1 @@
361 > +a
361 > +a
362 > EOF
362 > EOF
363 adding foo to series file
363 adding foo to series file
364
364
365 $ hg qpush
365 $ hg qpush
366 applying foo
366 applying foo
367 now at: foo
367 now at: foo
368
368
369 $ cd ..
369 $ cd ..
370 $ linkcp a b
370 $ linkcp a b
371 $ cd b
371 $ cd b
372
372
373 $ hg qimport -n bar - << EOF
373 $ hg qimport -n bar - << EOF
374 > # HG changeset patch
374 > # HG changeset patch
375 > # Date 2 0
375 > # Date 2 0
376 > diff -r 2588a8b53d66 a
376 > diff -r 2588a8b53d66 a
377 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
377 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
378 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
378 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
379 > @@ -0,0 +1,1 @@
379 > @@ -0,0 +1,1 @@
380 > +b
380 > +b
381 > EOF
381 > EOF
382 adding bar to series file
382 adding bar to series file
383
383
384 $ hg qpush
384 $ hg qpush
385 applying bar
385 applying bar
386 now at: bar
386 now at: bar
387
387
388 $ cat .hg/patches/status
388 $ cat .hg/patches/status
389 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
389 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
390 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
390 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
391
391
392 $ cat .hg/patches/series
392 $ cat .hg/patches/series
393 foo
393 foo
394 bar
394 bar
395
395
396 $ cat ../a/.hg/patches/status
396 $ cat ../a/.hg/patches/status
397 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
397 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
398
398
399 $ cat ../a/.hg/patches/series
399 $ cat ../a/.hg/patches/series
400 foo
400 foo
401
401
402 Test tags hardlinking:
402 Test tags hardlinking:
403
403
404 $ hg qdel -r qbase:qtip
404 $ hg qdel -r qbase:qtip
405 patch foo finalized without changeset message
405 patch foo finalized without changeset message
406 patch bar finalized without changeset message
406 patch bar finalized without changeset message
407
407
408 $ hg tag -l lfoo
408 $ hg tag -l lfoo
409 $ hg tag foo
409 $ hg tag foo
410
410
411 $ cd ..
411 $ cd ..
412 $ linkcp b c
412 $ linkcp b c
413 $ cd c
413 $ cd c
414
414
415 $ hg tag -l -r 0 lbar
415 $ hg tag -l -r 0 lbar
416 $ hg tag -r 0 bar
416 $ hg tag -r 0 bar
417
417
418 $ cat .hgtags
418 $ cat .hgtags
419 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
419 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
420 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
420 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
421
421
422 $ cat .hg/localtags
422 $ cat .hg/localtags
423 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
423 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
424 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
424 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
425
425
426 $ cat ../b/.hgtags
426 $ cat ../b/.hgtags
427 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
427 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
428
428
429 $ cat ../b/.hg/localtags
429 $ cat ../b/.hg/localtags
430 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
430 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
431
431
432 $ cd ..
432 $ cd ..
@@ -1,1470 +1,1473
1 $ . $TESTDIR/wireprotohelpers.sh
1 $ . $TESTDIR/wireprotohelpers.sh
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [extensions]
4 > [extensions]
5 > blackbox =
5 > blackbox =
6 > [blackbox]
6 > [blackbox]
7 > track = simplecache
7 > track = simplecache
8 > EOF
8 > EOF
9
9
10 $ hg init server
10 $ hg init server
11 $ enablehttpv2 server
11 $ enablehttpv2 server
12 $ cd server
12 $ cd server
13 $ cat >> .hg/hgrc << EOF
13 $ cat >> .hg/hgrc << EOF
14 > [server]
14 > [server]
15 > compressionengines = zlib
15 > compressionengines = zlib
16 > [extensions]
16 > [extensions]
17 > simplecache = $TESTDIR/wireprotosimplecache.py
17 > simplecache = $TESTDIR/wireprotosimplecache.py
18 > [simplecache]
18 > [simplecache]
19 > cacheapi = true
19 > cacheapi = true
20 > EOF
20 > EOF
21
21
22 $ echo a0 > a
22 $ echo a0 > a
23 $ echo b0 > b
23 $ echo b0 > b
24 $ hg -q commit -A -m 'commit 0'
24 $ hg -q commit -A -m 'commit 0'
25 $ echo a1 > a
25 $ echo a1 > a
26 $ hg commit -m 'commit 1'
26 $ hg commit -m 'commit 1'
27
27
28 $ hg --debug debugindex -m
28 $ hg --debug debugindex -m
29 rev linkrev nodeid p1 p2
29 rev linkrev nodeid p1 p2
30 0 0 992f4779029a3df8d0666d00bb924f69634e2641 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
30 0 0 992f4779029a3df8d0666d00bb924f69634e2641 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
31 1 1 a988fb43583e871d1ed5750ee074c6d840bbbfc8 992f4779029a3df8d0666d00bb924f69634e2641 0000000000000000000000000000000000000000
31 1 1 a988fb43583e871d1ed5750ee074c6d840bbbfc8 992f4779029a3df8d0666d00bb924f69634e2641 0000000000000000000000000000000000000000
32
32
33 $ hg --config simplecache.redirectsfile=redirects.py serve -p $HGPORT -d --pid-file hg.pid -E error.log
33 $ hg --config simplecache.redirectsfile=redirects.py serve -p $HGPORT -d --pid-file hg.pid -E error.log
34 $ cat hg.pid > $DAEMON_PIDS
34 $ cat hg.pid > $DAEMON_PIDS
35
35
36 $ cat > redirects.py << EOF
36 $ cat > redirects.py << EOF
37 > [
37 > [
38 > {
38 > {
39 > b'name': b'target-a',
39 > b'name': b'target-a',
40 > b'protocol': b'http',
40 > b'protocol': b'http',
41 > b'snirequired': False,
41 > b'snirequired': False,
42 > b'tlsversions': [b'1.2', b'1.3'],
42 > b'tlsversions': [b'1.2', b'1.3'],
43 > b'uris': [b'http://example.com/'],
43 > b'uris': [b'http://example.com/'],
44 > },
44 > },
45 > ]
45 > ]
46 > EOF
46 > EOF
47
47
48 Redirect targets advertised when configured
48 Redirect targets advertised when configured
49
49
50 $ sendhttpv2peerhandshake << EOF
50 $ sendhttpv2peerhandshake << EOF
51 > command capabilities
51 > command capabilities
52 > EOF
52 > EOF
53 creating http peer for wire protocol version 2
53 creating http peer for wire protocol version 2
54 s> GET /?cmd=capabilities HTTP/1.1\r\n
54 s> GET /?cmd=capabilities HTTP/1.1\r\n
55 s> Accept-Encoding: identity\r\n
55 s> Accept-Encoding: identity\r\n
56 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
56 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
57 s> x-hgproto-1: cbor\r\n
57 s> x-hgproto-1: cbor\r\n
58 s> x-hgupgrade-1: exp-http-v2-0003\r\n
58 s> x-hgupgrade-1: exp-http-v2-0003\r\n
59 s> accept: application/mercurial-0.1\r\n
59 s> accept: application/mercurial-0.1\r\n
60 s> host: $LOCALIP:$HGPORT\r\n (glob)
60 s> host: $LOCALIP:$HGPORT\r\n (glob)
61 s> user-agent: Mercurial debugwireproto\r\n
61 s> user-agent: Mercurial debugwireproto\r\n
62 s> \r\n
62 s> \r\n
63 s> makefile('rb', None)
63 s> makefile('rb', None)
64 s> HTTP/1.1 200 OK\r\n
64 s> HTTP/1.1 200 OK\r\n
65 s> Server: testing stub value\r\n
65 s> Server: testing stub value\r\n
66 s> Date: $HTTP_DATE$\r\n
66 s> Date: $HTTP_DATE$\r\n
67 s> Content-Type: application/mercurial-cbor\r\n
67 s> Content-Type: application/mercurial-cbor\r\n
68 s> Content-Length: 2259\r\n
68 s> Content-Length: 2259\r\n
69 s> \r\n
69 s> \r\n
70 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
70 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
71 (remote redirect target target-a is compatible)
71 (remote redirect target target-a is compatible) (tls1.2 !)
72 (remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !)
72 sending capabilities command
73 sending capabilities command
73 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
74 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
74 s> Accept-Encoding: identity\r\n
75 s> Accept-Encoding: identity\r\n
75 s> accept: application/mercurial-exp-framing-0006\r\n
76 s> accept: application/mercurial-exp-framing-0006\r\n
76 s> content-type: application/mercurial-exp-framing-0006\r\n
77 s> content-type: application/mercurial-exp-framing-0006\r\n
77 s> content-length: 111\r\n
78 s> content-length: 111\r\n (tls1.2 !)
79 s> content-length: 102\r\n (no-tls1.2 !)
78 s> host: $LOCALIP:$HGPORT\r\n (glob)
80 s> host: $LOCALIP:$HGPORT\r\n (glob)
79 s> user-agent: Mercurial debugwireproto\r\n
81 s> user-agent: Mercurial debugwireproto\r\n
80 s> \r\n
82 s> \r\n
81 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81HidentityC\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81Htarget-a
83 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81HidentityC\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81Htarget-a (tls1.2 !)
84 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81Hidentity:\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x80 (no-tls1.2 !)
82 s> makefile('rb', None)
85 s> makefile('rb', None)
83 s> HTTP/1.1 200 OK\r\n
86 s> HTTP/1.1 200 OK\r\n
84 s> Server: testing stub value\r\n
87 s> Server: testing stub value\r\n
85 s> Date: $HTTP_DATE$\r\n
88 s> Date: $HTTP_DATE$\r\n
86 s> Content-Type: application/mercurial-exp-framing-0006\r\n
89 s> Content-Type: application/mercurial-exp-framing-0006\r\n
87 s> Transfer-Encoding: chunked\r\n
90 s> Transfer-Encoding: chunked\r\n
88 s> \r\n
91 s> \r\n
89 s> 11\r\n
92 s> 11\r\n
90 s> \t\x00\x00\x01\x00\x02\x01\x92
93 s> \t\x00\x00\x01\x00\x02\x01\x92
91 s> Hidentity
94 s> Hidentity
92 s> \r\n
95 s> \r\n
93 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
96 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
94 s> 13\r\n
97 s> 13\r\n
95 s> \x0b\x00\x00\x01\x00\x02\x041
98 s> \x0b\x00\x00\x01\x00\x02\x041
96 s> \xa1FstatusBok
99 s> \xa1FstatusBok
97 s> \r\n
100 s> \r\n
98 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
101 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
99 s> 6d1\r\n
102 s> 6d1\r\n
100 s> \xc9\x06\x00\x01\x00\x02\x041
103 s> \xc9\x06\x00\x01\x00\x02\x041
101 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/
104 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/
102 s> \r\n
105 s> \r\n
103 received frame(size=1737; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
106 received frame(size=1737; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
104 s> 8\r\n
107 s> 8\r\n
105 s> \x00\x00\x00\x01\x00\x02\x002
108 s> \x00\x00\x00\x01\x00\x02\x002
106 s> \r\n
109 s> \r\n
107 s> 0\r\n
110 s> 0\r\n
108 s> \r\n
111 s> \r\n
109 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
112 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
110 response: gen[
113 response: gen[
111 {
114 {
112 b'commands': {
115 b'commands': {
113 b'branchmap': {
116 b'branchmap': {
114 b'args': {},
117 b'args': {},
115 b'permissions': [
118 b'permissions': [
116 b'pull'
119 b'pull'
117 ]
120 ]
118 },
121 },
119 b'capabilities': {
122 b'capabilities': {
120 b'args': {},
123 b'args': {},
121 b'permissions': [
124 b'permissions': [
122 b'pull'
125 b'pull'
123 ]
126 ]
124 },
127 },
125 b'changesetdata': {
128 b'changesetdata': {
126 b'args': {
129 b'args': {
127 b'fields': {
130 b'fields': {
128 b'default': set([]),
131 b'default': set([]),
129 b'required': False,
132 b'required': False,
130 b'type': b'set',
133 b'type': b'set',
131 b'validvalues': set([
134 b'validvalues': set([
132 b'bookmarks',
135 b'bookmarks',
133 b'parents',
136 b'parents',
134 b'phase',
137 b'phase',
135 b'revision'
138 b'revision'
136 ])
139 ])
137 },
140 },
138 b'revisions': {
141 b'revisions': {
139 b'required': True,
142 b'required': True,
140 b'type': b'list'
143 b'type': b'list'
141 }
144 }
142 },
145 },
143 b'permissions': [
146 b'permissions': [
144 b'pull'
147 b'pull'
145 ]
148 ]
146 },
149 },
147 b'filedata': {
150 b'filedata': {
148 b'args': {
151 b'args': {
149 b'fields': {
152 b'fields': {
150 b'default': set([]),
153 b'default': set([]),
151 b'required': False,
154 b'required': False,
152 b'type': b'set',
155 b'type': b'set',
153 b'validvalues': set([
156 b'validvalues': set([
154 b'linknode',
157 b'linknode',
155 b'parents',
158 b'parents',
156 b'revision'
159 b'revision'
157 ])
160 ])
158 },
161 },
159 b'haveparents': {
162 b'haveparents': {
160 b'default': False,
163 b'default': False,
161 b'required': False,
164 b'required': False,
162 b'type': b'bool'
165 b'type': b'bool'
163 },
166 },
164 b'nodes': {
167 b'nodes': {
165 b'required': True,
168 b'required': True,
166 b'type': b'list'
169 b'type': b'list'
167 },
170 },
168 b'path': {
171 b'path': {
169 b'required': True,
172 b'required': True,
170 b'type': b'bytes'
173 b'type': b'bytes'
171 }
174 }
172 },
175 },
173 b'permissions': [
176 b'permissions': [
174 b'pull'
177 b'pull'
175 ]
178 ]
176 },
179 },
177 b'filesdata': {
180 b'filesdata': {
178 b'args': {
181 b'args': {
179 b'fields': {
182 b'fields': {
180 b'default': set([]),
183 b'default': set([]),
181 b'required': False,
184 b'required': False,
182 b'type': b'set',
185 b'type': b'set',
183 b'validvalues': set([
186 b'validvalues': set([
184 b'firstchangeset',
187 b'firstchangeset',
185 b'linknode',
188 b'linknode',
186 b'parents',
189 b'parents',
187 b'revision'
190 b'revision'
188 ])
191 ])
189 },
192 },
190 b'haveparents': {
193 b'haveparents': {
191 b'default': False,
194 b'default': False,
192 b'required': False,
195 b'required': False,
193 b'type': b'bool'
196 b'type': b'bool'
194 },
197 },
195 b'pathfilter': {
198 b'pathfilter': {
196 b'default': None,
199 b'default': None,
197 b'required': False,
200 b'required': False,
198 b'type': b'dict'
201 b'type': b'dict'
199 },
202 },
200 b'revisions': {
203 b'revisions': {
201 b'required': True,
204 b'required': True,
202 b'type': b'list'
205 b'type': b'list'
203 }
206 }
204 },
207 },
205 b'permissions': [
208 b'permissions': [
206 b'pull'
209 b'pull'
207 ],
210 ],
208 b'recommendedbatchsize': 50000
211 b'recommendedbatchsize': 50000
209 },
212 },
210 b'heads': {
213 b'heads': {
211 b'args': {
214 b'args': {
212 b'publiconly': {
215 b'publiconly': {
213 b'default': False,
216 b'default': False,
214 b'required': False,
217 b'required': False,
215 b'type': b'bool'
218 b'type': b'bool'
216 }
219 }
217 },
220 },
218 b'permissions': [
221 b'permissions': [
219 b'pull'
222 b'pull'
220 ]
223 ]
221 },
224 },
222 b'known': {
225 b'known': {
223 b'args': {
226 b'args': {
224 b'nodes': {
227 b'nodes': {
225 b'default': [],
228 b'default': [],
226 b'required': False,
229 b'required': False,
227 b'type': b'list'
230 b'type': b'list'
228 }
231 }
229 },
232 },
230 b'permissions': [
233 b'permissions': [
231 b'pull'
234 b'pull'
232 ]
235 ]
233 },
236 },
234 b'listkeys': {
237 b'listkeys': {
235 b'args': {
238 b'args': {
236 b'namespace': {
239 b'namespace': {
237 b'required': True,
240 b'required': True,
238 b'type': b'bytes'
241 b'type': b'bytes'
239 }
242 }
240 },
243 },
241 b'permissions': [
244 b'permissions': [
242 b'pull'
245 b'pull'
243 ]
246 ]
244 },
247 },
245 b'lookup': {
248 b'lookup': {
246 b'args': {
249 b'args': {
247 b'key': {
250 b'key': {
248 b'required': True,
251 b'required': True,
249 b'type': b'bytes'
252 b'type': b'bytes'
250 }
253 }
251 },
254 },
252 b'permissions': [
255 b'permissions': [
253 b'pull'
256 b'pull'
254 ]
257 ]
255 },
258 },
256 b'manifestdata': {
259 b'manifestdata': {
257 b'args': {
260 b'args': {
258 b'fields': {
261 b'fields': {
259 b'default': set([]),
262 b'default': set([]),
260 b'required': False,
263 b'required': False,
261 b'type': b'set',
264 b'type': b'set',
262 b'validvalues': set([
265 b'validvalues': set([
263 b'parents',
266 b'parents',
264 b'revision'
267 b'revision'
265 ])
268 ])
266 },
269 },
267 b'haveparents': {
270 b'haveparents': {
268 b'default': False,
271 b'default': False,
269 b'required': False,
272 b'required': False,
270 b'type': b'bool'
273 b'type': b'bool'
271 },
274 },
272 b'nodes': {
275 b'nodes': {
273 b'required': True,
276 b'required': True,
274 b'type': b'list'
277 b'type': b'list'
275 },
278 },
276 b'tree': {
279 b'tree': {
277 b'required': True,
280 b'required': True,
278 b'type': b'bytes'
281 b'type': b'bytes'
279 }
282 }
280 },
283 },
281 b'permissions': [
284 b'permissions': [
282 b'pull'
285 b'pull'
283 ],
286 ],
284 b'recommendedbatchsize': 100000
287 b'recommendedbatchsize': 100000
285 },
288 },
286 b'pushkey': {
289 b'pushkey': {
287 b'args': {
290 b'args': {
288 b'key': {
291 b'key': {
289 b'required': True,
292 b'required': True,
290 b'type': b'bytes'
293 b'type': b'bytes'
291 },
294 },
292 b'namespace': {
295 b'namespace': {
293 b'required': True,
296 b'required': True,
294 b'type': b'bytes'
297 b'type': b'bytes'
295 },
298 },
296 b'new': {
299 b'new': {
297 b'required': True,
300 b'required': True,
298 b'type': b'bytes'
301 b'type': b'bytes'
299 },
302 },
300 b'old': {
303 b'old': {
301 b'required': True,
304 b'required': True,
302 b'type': b'bytes'
305 b'type': b'bytes'
303 }
306 }
304 },
307 },
305 b'permissions': [
308 b'permissions': [
306 b'push'
309 b'push'
307 ]
310 ]
308 },
311 },
309 b'rawstorefiledata': {
312 b'rawstorefiledata': {
310 b'args': {
313 b'args': {
311 b'files': {
314 b'files': {
312 b'required': True,
315 b'required': True,
313 b'type': b'list'
316 b'type': b'list'
314 },
317 },
315 b'pathfilter': {
318 b'pathfilter': {
316 b'default': None,
319 b'default': None,
317 b'required': False,
320 b'required': False,
318 b'type': b'list'
321 b'type': b'list'
319 }
322 }
320 },
323 },
321 b'permissions': [
324 b'permissions': [
322 b'pull'
325 b'pull'
323 ]
326 ]
324 }
327 }
325 },
328 },
326 b'framingmediatypes': [
329 b'framingmediatypes': [
327 b'application/mercurial-exp-framing-0006'
330 b'application/mercurial-exp-framing-0006'
328 ],
331 ],
329 b'pathfilterprefixes': set([
332 b'pathfilterprefixes': set([
330 b'path:',
333 b'path:',
331 b'rootfilesin:'
334 b'rootfilesin:'
332 ]),
335 ]),
333 b'rawrepoformats': [
336 b'rawrepoformats': [
334 b'generaldelta',
337 b'generaldelta',
335 b'revlogv1'
338 b'revlogv1'
336 ],
339 ],
337 b'redirect': {
340 b'redirect': {
338 b'hashes': [
341 b'hashes': [
339 b'sha256',
342 b'sha256',
340 b'sha1'
343 b'sha1'
341 ],
344 ],
342 b'targets': [
345 b'targets': [
343 {
346 {
344 b'name': b'target-a',
347 b'name': b'target-a',
345 b'protocol': b'http',
348 b'protocol': b'http',
346 b'snirequired': False,
349 b'snirequired': False,
347 b'tlsversions': [
350 b'tlsversions': [
348 b'1.2',
351 b'1.2',
349 b'1.3'
352 b'1.3'
350 ],
353 ],
351 b'uris': [
354 b'uris': [
352 b'http://example.com/'
355 b'http://example.com/'
353 ]
356 ]
354 }
357 }
355 ]
358 ]
356 }
359 }
357 }
360 }
358 ]
361 ]
359 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
362 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
360
363
361 Unknown protocol is filtered from compatible targets
364 Unknown protocol is filtered from compatible targets
362
365
363 $ cat > redirects.py << EOF
366 $ cat > redirects.py << EOF
364 > [
367 > [
365 > {
368 > {
366 > b'name': b'target-a',
369 > b'name': b'target-a',
367 > b'protocol': b'http',
370 > b'protocol': b'http',
368 > b'uris': [b'http://example.com/'],
371 > b'uris': [b'http://example.com/'],
369 > },
372 > },
370 > {
373 > {
371 > b'name': b'target-b',
374 > b'name': b'target-b',
372 > b'protocol': b'unknown',
375 > b'protocol': b'unknown',
373 > b'uris': [b'unknown://example.com/'],
376 > b'uris': [b'unknown://example.com/'],
374 > },
377 > },
375 > ]
378 > ]
376 > EOF
379 > EOF
377
380
378 $ sendhttpv2peerhandshake << EOF
381 $ sendhttpv2peerhandshake << EOF
379 > command capabilities
382 > command capabilities
380 > EOF
383 > EOF
381 creating http peer for wire protocol version 2
384 creating http peer for wire protocol version 2
382 s> GET /?cmd=capabilities HTTP/1.1\r\n
385 s> GET /?cmd=capabilities HTTP/1.1\r\n
383 s> Accept-Encoding: identity\r\n
386 s> Accept-Encoding: identity\r\n
384 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
387 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
385 s> x-hgproto-1: cbor\r\n
388 s> x-hgproto-1: cbor\r\n
386 s> x-hgupgrade-1: exp-http-v2-0003\r\n
389 s> x-hgupgrade-1: exp-http-v2-0003\r\n
387 s> accept: application/mercurial-0.1\r\n
390 s> accept: application/mercurial-0.1\r\n
388 s> host: $LOCALIP:$HGPORT\r\n (glob)
391 s> host: $LOCALIP:$HGPORT\r\n (glob)
389 s> user-agent: Mercurial debugwireproto\r\n
392 s> user-agent: Mercurial debugwireproto\r\n
390 s> \r\n
393 s> \r\n
391 s> makefile('rb', None)
394 s> makefile('rb', None)
392 s> HTTP/1.1 200 OK\r\n
395 s> HTTP/1.1 200 OK\r\n
393 s> Server: testing stub value\r\n
396 s> Server: testing stub value\r\n
394 s> Date: $HTTP_DATE$\r\n
397 s> Date: $HTTP_DATE$\r\n
395 s> Content-Type: application/mercurial-cbor\r\n
398 s> Content-Type: application/mercurial-cbor\r\n
396 s> Content-Length: 2286\r\n
399 s> Content-Length: 2286\r\n
397 s> \r\n
400 s> \r\n
398 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
401 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
399 (remote redirect target target-a is compatible)
402 (remote redirect target target-a is compatible)
400 (remote redirect target target-b uses unsupported protocol: unknown)
403 (remote redirect target target-b uses unsupported protocol: unknown)
401 sending capabilities command
404 sending capabilities command
402 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
405 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
403 s> Accept-Encoding: identity\r\n
406 s> Accept-Encoding: identity\r\n
404 s> accept: application/mercurial-exp-framing-0006\r\n
407 s> accept: application/mercurial-exp-framing-0006\r\n
405 s> content-type: application/mercurial-exp-framing-0006\r\n
408 s> content-type: application/mercurial-exp-framing-0006\r\n
406 s> content-length: 111\r\n
409 s> content-length: 111\r\n
407 s> host: $LOCALIP:$HGPORT\r\n (glob)
410 s> host: $LOCALIP:$HGPORT\r\n (glob)
408 s> user-agent: Mercurial debugwireproto\r\n
411 s> user-agent: Mercurial debugwireproto\r\n
409 s> \r\n
412 s> \r\n
410 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81HidentityC\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81Htarget-a
413 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81HidentityC\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81Htarget-a
411 s> makefile('rb', None)
414 s> makefile('rb', None)
412 s> HTTP/1.1 200 OK\r\n
415 s> HTTP/1.1 200 OK\r\n
413 s> Server: testing stub value\r\n
416 s> Server: testing stub value\r\n
414 s> Date: $HTTP_DATE$\r\n
417 s> Date: $HTTP_DATE$\r\n
415 s> Content-Type: application/mercurial-exp-framing-0006\r\n
418 s> Content-Type: application/mercurial-exp-framing-0006\r\n
416 s> Transfer-Encoding: chunked\r\n
419 s> Transfer-Encoding: chunked\r\n
417 s> \r\n
420 s> \r\n
418 s> 11\r\n
421 s> 11\r\n
419 s> \t\x00\x00\x01\x00\x02\x01\x92
422 s> \t\x00\x00\x01\x00\x02\x01\x92
420 s> Hidentity
423 s> Hidentity
421 s> \r\n
424 s> \r\n
422 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
425 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
423 s> 13\r\n
426 s> 13\r\n
424 s> \x0b\x00\x00\x01\x00\x02\x041
427 s> \x0b\x00\x00\x01\x00\x02\x041
425 s> \xa1FstatusBok
428 s> \xa1FstatusBok
426 s> \r\n
429 s> \r\n
427 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
430 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
428 s> 6ec\r\n
431 s> 6ec\r\n
429 s> \xe4\x06\x00\x01\x00\x02\x041
432 s> \xe4\x06\x00\x01\x00\x02\x041
430 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/
433 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/
431 s> \r\n
434 s> \r\n
432 received frame(size=1764; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
435 received frame(size=1764; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
433 s> 8\r\n
436 s> 8\r\n
434 s> \x00\x00\x00\x01\x00\x02\x002
437 s> \x00\x00\x00\x01\x00\x02\x002
435 s> \r\n
438 s> \r\n
436 s> 0\r\n
439 s> 0\r\n
437 s> \r\n
440 s> \r\n
438 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
441 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
439 response: gen[
442 response: gen[
440 {
443 {
441 b'commands': {
444 b'commands': {
442 b'branchmap': {
445 b'branchmap': {
443 b'args': {},
446 b'args': {},
444 b'permissions': [
447 b'permissions': [
445 b'pull'
448 b'pull'
446 ]
449 ]
447 },
450 },
448 b'capabilities': {
451 b'capabilities': {
449 b'args': {},
452 b'args': {},
450 b'permissions': [
453 b'permissions': [
451 b'pull'
454 b'pull'
452 ]
455 ]
453 },
456 },
454 b'changesetdata': {
457 b'changesetdata': {
455 b'args': {
458 b'args': {
456 b'fields': {
459 b'fields': {
457 b'default': set([]),
460 b'default': set([]),
458 b'required': False,
461 b'required': False,
459 b'type': b'set',
462 b'type': b'set',
460 b'validvalues': set([
463 b'validvalues': set([
461 b'bookmarks',
464 b'bookmarks',
462 b'parents',
465 b'parents',
463 b'phase',
466 b'phase',
464 b'revision'
467 b'revision'
465 ])
468 ])
466 },
469 },
467 b'revisions': {
470 b'revisions': {
468 b'required': True,
471 b'required': True,
469 b'type': b'list'
472 b'type': b'list'
470 }
473 }
471 },
474 },
472 b'permissions': [
475 b'permissions': [
473 b'pull'
476 b'pull'
474 ]
477 ]
475 },
478 },
476 b'filedata': {
479 b'filedata': {
477 b'args': {
480 b'args': {
478 b'fields': {
481 b'fields': {
479 b'default': set([]),
482 b'default': set([]),
480 b'required': False,
483 b'required': False,
481 b'type': b'set',
484 b'type': b'set',
482 b'validvalues': set([
485 b'validvalues': set([
483 b'linknode',
486 b'linknode',
484 b'parents',
487 b'parents',
485 b'revision'
488 b'revision'
486 ])
489 ])
487 },
490 },
488 b'haveparents': {
491 b'haveparents': {
489 b'default': False,
492 b'default': False,
490 b'required': False,
493 b'required': False,
491 b'type': b'bool'
494 b'type': b'bool'
492 },
495 },
493 b'nodes': {
496 b'nodes': {
494 b'required': True,
497 b'required': True,
495 b'type': b'list'
498 b'type': b'list'
496 },
499 },
497 b'path': {
500 b'path': {
498 b'required': True,
501 b'required': True,
499 b'type': b'bytes'
502 b'type': b'bytes'
500 }
503 }
501 },
504 },
502 b'permissions': [
505 b'permissions': [
503 b'pull'
506 b'pull'
504 ]
507 ]
505 },
508 },
506 b'filesdata': {
509 b'filesdata': {
507 b'args': {
510 b'args': {
508 b'fields': {
511 b'fields': {
509 b'default': set([]),
512 b'default': set([]),
510 b'required': False,
513 b'required': False,
511 b'type': b'set',
514 b'type': b'set',
512 b'validvalues': set([
515 b'validvalues': set([
513 b'firstchangeset',
516 b'firstchangeset',
514 b'linknode',
517 b'linknode',
515 b'parents',
518 b'parents',
516 b'revision'
519 b'revision'
517 ])
520 ])
518 },
521 },
519 b'haveparents': {
522 b'haveparents': {
520 b'default': False,
523 b'default': False,
521 b'required': False,
524 b'required': False,
522 b'type': b'bool'
525 b'type': b'bool'
523 },
526 },
524 b'pathfilter': {
527 b'pathfilter': {
525 b'default': None,
528 b'default': None,
526 b'required': False,
529 b'required': False,
527 b'type': b'dict'
530 b'type': b'dict'
528 },
531 },
529 b'revisions': {
532 b'revisions': {
530 b'required': True,
533 b'required': True,
531 b'type': b'list'
534 b'type': b'list'
532 }
535 }
533 },
536 },
534 b'permissions': [
537 b'permissions': [
535 b'pull'
538 b'pull'
536 ],
539 ],
537 b'recommendedbatchsize': 50000
540 b'recommendedbatchsize': 50000
538 },
541 },
539 b'heads': {
542 b'heads': {
540 b'args': {
543 b'args': {
541 b'publiconly': {
544 b'publiconly': {
542 b'default': False,
545 b'default': False,
543 b'required': False,
546 b'required': False,
544 b'type': b'bool'
547 b'type': b'bool'
545 }
548 }
546 },
549 },
547 b'permissions': [
550 b'permissions': [
548 b'pull'
551 b'pull'
549 ]
552 ]
550 },
553 },
551 b'known': {
554 b'known': {
552 b'args': {
555 b'args': {
553 b'nodes': {
556 b'nodes': {
554 b'default': [],
557 b'default': [],
555 b'required': False,
558 b'required': False,
556 b'type': b'list'
559 b'type': b'list'
557 }
560 }
558 },
561 },
559 b'permissions': [
562 b'permissions': [
560 b'pull'
563 b'pull'
561 ]
564 ]
562 },
565 },
563 b'listkeys': {
566 b'listkeys': {
564 b'args': {
567 b'args': {
565 b'namespace': {
568 b'namespace': {
566 b'required': True,
569 b'required': True,
567 b'type': b'bytes'
570 b'type': b'bytes'
568 }
571 }
569 },
572 },
570 b'permissions': [
573 b'permissions': [
571 b'pull'
574 b'pull'
572 ]
575 ]
573 },
576 },
574 b'lookup': {
577 b'lookup': {
575 b'args': {
578 b'args': {
576 b'key': {
579 b'key': {
577 b'required': True,
580 b'required': True,
578 b'type': b'bytes'
581 b'type': b'bytes'
579 }
582 }
580 },
583 },
581 b'permissions': [
584 b'permissions': [
582 b'pull'
585 b'pull'
583 ]
586 ]
584 },
587 },
585 b'manifestdata': {
588 b'manifestdata': {
586 b'args': {
589 b'args': {
587 b'fields': {
590 b'fields': {
588 b'default': set([]),
591 b'default': set([]),
589 b'required': False,
592 b'required': False,
590 b'type': b'set',
593 b'type': b'set',
591 b'validvalues': set([
594 b'validvalues': set([
592 b'parents',
595 b'parents',
593 b'revision'
596 b'revision'
594 ])
597 ])
595 },
598 },
596 b'haveparents': {
599 b'haveparents': {
597 b'default': False,
600 b'default': False,
598 b'required': False,
601 b'required': False,
599 b'type': b'bool'
602 b'type': b'bool'
600 },
603 },
601 b'nodes': {
604 b'nodes': {
602 b'required': True,
605 b'required': True,
603 b'type': b'list'
606 b'type': b'list'
604 },
607 },
605 b'tree': {
608 b'tree': {
606 b'required': True,
609 b'required': True,
607 b'type': b'bytes'
610 b'type': b'bytes'
608 }
611 }
609 },
612 },
610 b'permissions': [
613 b'permissions': [
611 b'pull'
614 b'pull'
612 ],
615 ],
613 b'recommendedbatchsize': 100000
616 b'recommendedbatchsize': 100000
614 },
617 },
615 b'pushkey': {
618 b'pushkey': {
616 b'args': {
619 b'args': {
617 b'key': {
620 b'key': {
618 b'required': True,
621 b'required': True,
619 b'type': b'bytes'
622 b'type': b'bytes'
620 },
623 },
621 b'namespace': {
624 b'namespace': {
622 b'required': True,
625 b'required': True,
623 b'type': b'bytes'
626 b'type': b'bytes'
624 },
627 },
625 b'new': {
628 b'new': {
626 b'required': True,
629 b'required': True,
627 b'type': b'bytes'
630 b'type': b'bytes'
628 },
631 },
629 b'old': {
632 b'old': {
630 b'required': True,
633 b'required': True,
631 b'type': b'bytes'
634 b'type': b'bytes'
632 }
635 }
633 },
636 },
634 b'permissions': [
637 b'permissions': [
635 b'push'
638 b'push'
636 ]
639 ]
637 },
640 },
638 b'rawstorefiledata': {
641 b'rawstorefiledata': {
639 b'args': {
642 b'args': {
640 b'files': {
643 b'files': {
641 b'required': True,
644 b'required': True,
642 b'type': b'list'
645 b'type': b'list'
643 },
646 },
644 b'pathfilter': {
647 b'pathfilter': {
645 b'default': None,
648 b'default': None,
646 b'required': False,
649 b'required': False,
647 b'type': b'list'
650 b'type': b'list'
648 }
651 }
649 },
652 },
650 b'permissions': [
653 b'permissions': [
651 b'pull'
654 b'pull'
652 ]
655 ]
653 }
656 }
654 },
657 },
655 b'framingmediatypes': [
658 b'framingmediatypes': [
656 b'application/mercurial-exp-framing-0006'
659 b'application/mercurial-exp-framing-0006'
657 ],
660 ],
658 b'pathfilterprefixes': set([
661 b'pathfilterprefixes': set([
659 b'path:',
662 b'path:',
660 b'rootfilesin:'
663 b'rootfilesin:'
661 ]),
664 ]),
662 b'rawrepoformats': [
665 b'rawrepoformats': [
663 b'generaldelta',
666 b'generaldelta',
664 b'revlogv1'
667 b'revlogv1'
665 ],
668 ],
666 b'redirect': {
669 b'redirect': {
667 b'hashes': [
670 b'hashes': [
668 b'sha256',
671 b'sha256',
669 b'sha1'
672 b'sha1'
670 ],
673 ],
671 b'targets': [
674 b'targets': [
672 {
675 {
673 b'name': b'target-a',
676 b'name': b'target-a',
674 b'protocol': b'http',
677 b'protocol': b'http',
675 b'uris': [
678 b'uris': [
676 b'http://example.com/'
679 b'http://example.com/'
677 ]
680 ]
678 },
681 },
679 {
682 {
680 b'name': b'target-b',
683 b'name': b'target-b',
681 b'protocol': b'unknown',
684 b'protocol': b'unknown',
682 b'uris': [
685 b'uris': [
683 b'unknown://example.com/'
686 b'unknown://example.com/'
684 ]
687 ]
685 }
688 }
686 ]
689 ]
687 }
690 }
688 }
691 }
689 ]
692 ]
690 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
693 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
691
694
692 Missing SNI support filters targets that require SNI
695 Missing SNI support filters targets that require SNI
693
696
694 $ cat > nosni.py << EOF
697 $ cat > nosni.py << EOF
695 > from mercurial import sslutil
698 > from mercurial import sslutil
696 > sslutil.hassni = False
699 > sslutil.hassni = False
697 > EOF
700 > EOF
698 $ cat >> $HGRCPATH << EOF
701 $ cat >> $HGRCPATH << EOF
699 > [extensions]
702 > [extensions]
700 > nosni=`pwd`/nosni.py
703 > nosni=`pwd`/nosni.py
701 > EOF
704 > EOF
702
705
703 $ cat > redirects.py << EOF
706 $ cat > redirects.py << EOF
704 > [
707 > [
705 > {
708 > {
706 > b'name': b'target-bad-tls',
709 > b'name': b'target-bad-tls',
707 > b'protocol': b'https',
710 > b'protocol': b'https',
708 > b'uris': [b'https://example.com/'],
711 > b'uris': [b'https://example.com/'],
709 > b'snirequired': True,
712 > b'snirequired': True,
710 > },
713 > },
711 > ]
714 > ]
712 > EOF
715 > EOF
713
716
714 $ sendhttpv2peerhandshake << EOF
717 $ sendhttpv2peerhandshake << EOF
715 > command capabilities
718 > command capabilities
716 > EOF
719 > EOF
717 creating http peer for wire protocol version 2
720 creating http peer for wire protocol version 2
718 s> GET /?cmd=capabilities HTTP/1.1\r\n
721 s> GET /?cmd=capabilities HTTP/1.1\r\n
719 s> Accept-Encoding: identity\r\n
722 s> Accept-Encoding: identity\r\n
720 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
723 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
721 s> x-hgproto-1: cbor\r\n
724 s> x-hgproto-1: cbor\r\n
722 s> x-hgupgrade-1: exp-http-v2-0003\r\n
725 s> x-hgupgrade-1: exp-http-v2-0003\r\n
723 s> accept: application/mercurial-0.1\r\n
726 s> accept: application/mercurial-0.1\r\n
724 s> host: $LOCALIP:$HGPORT\r\n (glob)
727 s> host: $LOCALIP:$HGPORT\r\n (glob)
725 s> user-agent: Mercurial debugwireproto\r\n
728 s> user-agent: Mercurial debugwireproto\r\n
726 s> \r\n
729 s> \r\n
727 s> makefile('rb', None)
730 s> makefile('rb', None)
728 s> HTTP/1.1 200 OK\r\n
731 s> HTTP/1.1 200 OK\r\n
729 s> Server: testing stub value\r\n
732 s> Server: testing stub value\r\n
730 s> Date: $HTTP_DATE$\r\n
733 s> Date: $HTTP_DATE$\r\n
731 s> Content-Type: application/mercurial-cbor\r\n
734 s> Content-Type: application/mercurial-cbor\r\n
732 s> Content-Length: 2246\r\n
735 s> Content-Length: 2246\r\n
733 s> \r\n
736 s> \r\n
734 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
737 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
735 (redirect target target-bad-tls requires SNI, which is unsupported)
738 (redirect target target-bad-tls requires SNI, which is unsupported)
736 sending capabilities command
739 sending capabilities command
737 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
740 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
738 s> Accept-Encoding: identity\r\n
741 s> Accept-Encoding: identity\r\n
739 s> accept: application/mercurial-exp-framing-0006\r\n
742 s> accept: application/mercurial-exp-framing-0006\r\n
740 s> content-type: application/mercurial-exp-framing-0006\r\n
743 s> content-type: application/mercurial-exp-framing-0006\r\n
741 s> content-length: 102\r\n
744 s> content-length: 102\r\n
742 s> host: $LOCALIP:$HGPORT\r\n (glob)
745 s> host: $LOCALIP:$HGPORT\r\n (glob)
743 s> user-agent: Mercurial debugwireproto\r\n
746 s> user-agent: Mercurial debugwireproto\r\n
744 s> \r\n
747 s> \r\n
745 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81Hidentity:\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x80
748 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81Hidentity:\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x80
746 s> makefile('rb', None)
749 s> makefile('rb', None)
747 s> HTTP/1.1 200 OK\r\n
750 s> HTTP/1.1 200 OK\r\n
748 s> Server: testing stub value\r\n
751 s> Server: testing stub value\r\n
749 s> Date: $HTTP_DATE$\r\n
752 s> Date: $HTTP_DATE$\r\n
750 s> Content-Type: application/mercurial-exp-framing-0006\r\n
753 s> Content-Type: application/mercurial-exp-framing-0006\r\n
751 s> Transfer-Encoding: chunked\r\n
754 s> Transfer-Encoding: chunked\r\n
752 s> \r\n
755 s> \r\n
753 s> 11\r\n
756 s> 11\r\n
754 s> \t\x00\x00\x01\x00\x02\x01\x92
757 s> \t\x00\x00\x01\x00\x02\x01\x92
755 s> Hidentity
758 s> Hidentity
756 s> \r\n
759 s> \r\n
757 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
760 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
758 s> 13\r\n
761 s> 13\r\n
759 s> \x0b\x00\x00\x01\x00\x02\x041
762 s> \x0b\x00\x00\x01\x00\x02\x041
760 s> \xa1FstatusBok
763 s> \xa1FstatusBok
761 s> \r\n
764 s> \r\n
762 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
765 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
763 s> 6c4\r\n
766 s> 6c4\r\n
764 s> \xbc\x06\x00\x01\x00\x02\x041
767 s> \xbc\x06\x00\x01\x00\x02\x041
765 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/
768 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/
766 s> \r\n
769 s> \r\n
767 received frame(size=1724; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
770 received frame(size=1724; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
768 s> 8\r\n
771 s> 8\r\n
769 s> \x00\x00\x00\x01\x00\x02\x002
772 s> \x00\x00\x00\x01\x00\x02\x002
770 s> \r\n
773 s> \r\n
771 s> 0\r\n
774 s> 0\r\n
772 s> \r\n
775 s> \r\n
773 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
776 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
774 response: gen[
777 response: gen[
775 {
778 {
776 b'commands': {
779 b'commands': {
777 b'branchmap': {
780 b'branchmap': {
778 b'args': {},
781 b'args': {},
779 b'permissions': [
782 b'permissions': [
780 b'pull'
783 b'pull'
781 ]
784 ]
782 },
785 },
783 b'capabilities': {
786 b'capabilities': {
784 b'args': {},
787 b'args': {},
785 b'permissions': [
788 b'permissions': [
786 b'pull'
789 b'pull'
787 ]
790 ]
788 },
791 },
789 b'changesetdata': {
792 b'changesetdata': {
790 b'args': {
793 b'args': {
791 b'fields': {
794 b'fields': {
792 b'default': set([]),
795 b'default': set([]),
793 b'required': False,
796 b'required': False,
794 b'type': b'set',
797 b'type': b'set',
795 b'validvalues': set([
798 b'validvalues': set([
796 b'bookmarks',
799 b'bookmarks',
797 b'parents',
800 b'parents',
798 b'phase',
801 b'phase',
799 b'revision'
802 b'revision'
800 ])
803 ])
801 },
804 },
802 b'revisions': {
805 b'revisions': {
803 b'required': True,
806 b'required': True,
804 b'type': b'list'
807 b'type': b'list'
805 }
808 }
806 },
809 },
807 b'permissions': [
810 b'permissions': [
808 b'pull'
811 b'pull'
809 ]
812 ]
810 },
813 },
811 b'filedata': {
814 b'filedata': {
812 b'args': {
815 b'args': {
813 b'fields': {
816 b'fields': {
814 b'default': set([]),
817 b'default': set([]),
815 b'required': False,
818 b'required': False,
816 b'type': b'set',
819 b'type': b'set',
817 b'validvalues': set([
820 b'validvalues': set([
818 b'linknode',
821 b'linknode',
819 b'parents',
822 b'parents',
820 b'revision'
823 b'revision'
821 ])
824 ])
822 },
825 },
823 b'haveparents': {
826 b'haveparents': {
824 b'default': False,
827 b'default': False,
825 b'required': False,
828 b'required': False,
826 b'type': b'bool'
829 b'type': b'bool'
827 },
830 },
828 b'nodes': {
831 b'nodes': {
829 b'required': True,
832 b'required': True,
830 b'type': b'list'
833 b'type': b'list'
831 },
834 },
832 b'path': {
835 b'path': {
833 b'required': True,
836 b'required': True,
834 b'type': b'bytes'
837 b'type': b'bytes'
835 }
838 }
836 },
839 },
837 b'permissions': [
840 b'permissions': [
838 b'pull'
841 b'pull'
839 ]
842 ]
840 },
843 },
841 b'filesdata': {
844 b'filesdata': {
842 b'args': {
845 b'args': {
843 b'fields': {
846 b'fields': {
844 b'default': set([]),
847 b'default': set([]),
845 b'required': False,
848 b'required': False,
846 b'type': b'set',
849 b'type': b'set',
847 b'validvalues': set([
850 b'validvalues': set([
848 b'firstchangeset',
851 b'firstchangeset',
849 b'linknode',
852 b'linknode',
850 b'parents',
853 b'parents',
851 b'revision'
854 b'revision'
852 ])
855 ])
853 },
856 },
854 b'haveparents': {
857 b'haveparents': {
855 b'default': False,
858 b'default': False,
856 b'required': False,
859 b'required': False,
857 b'type': b'bool'
860 b'type': b'bool'
858 },
861 },
859 b'pathfilter': {
862 b'pathfilter': {
860 b'default': None,
863 b'default': None,
861 b'required': False,
864 b'required': False,
862 b'type': b'dict'
865 b'type': b'dict'
863 },
866 },
864 b'revisions': {
867 b'revisions': {
865 b'required': True,
868 b'required': True,
866 b'type': b'list'
869 b'type': b'list'
867 }
870 }
868 },
871 },
869 b'permissions': [
872 b'permissions': [
870 b'pull'
873 b'pull'
871 ],
874 ],
872 b'recommendedbatchsize': 50000
875 b'recommendedbatchsize': 50000
873 },
876 },
874 b'heads': {
877 b'heads': {
875 b'args': {
878 b'args': {
876 b'publiconly': {
879 b'publiconly': {
877 b'default': False,
880 b'default': False,
878 b'required': False,
881 b'required': False,
879 b'type': b'bool'
882 b'type': b'bool'
880 }
883 }
881 },
884 },
882 b'permissions': [
885 b'permissions': [
883 b'pull'
886 b'pull'
884 ]
887 ]
885 },
888 },
886 b'known': {
889 b'known': {
887 b'args': {
890 b'args': {
888 b'nodes': {
891 b'nodes': {
889 b'default': [],
892 b'default': [],
890 b'required': False,
893 b'required': False,
891 b'type': b'list'
894 b'type': b'list'
892 }
895 }
893 },
896 },
894 b'permissions': [
897 b'permissions': [
895 b'pull'
898 b'pull'
896 ]
899 ]
897 },
900 },
898 b'listkeys': {
901 b'listkeys': {
899 b'args': {
902 b'args': {
900 b'namespace': {
903 b'namespace': {
901 b'required': True,
904 b'required': True,
902 b'type': b'bytes'
905 b'type': b'bytes'
903 }
906 }
904 },
907 },
905 b'permissions': [
908 b'permissions': [
906 b'pull'
909 b'pull'
907 ]
910 ]
908 },
911 },
909 b'lookup': {
912 b'lookup': {
910 b'args': {
913 b'args': {
911 b'key': {
914 b'key': {
912 b'required': True,
915 b'required': True,
913 b'type': b'bytes'
916 b'type': b'bytes'
914 }
917 }
915 },
918 },
916 b'permissions': [
919 b'permissions': [
917 b'pull'
920 b'pull'
918 ]
921 ]
919 },
922 },
920 b'manifestdata': {
923 b'manifestdata': {
921 b'args': {
924 b'args': {
922 b'fields': {
925 b'fields': {
923 b'default': set([]),
926 b'default': set([]),
924 b'required': False,
927 b'required': False,
925 b'type': b'set',
928 b'type': b'set',
926 b'validvalues': set([
929 b'validvalues': set([
927 b'parents',
930 b'parents',
928 b'revision'
931 b'revision'
929 ])
932 ])
930 },
933 },
931 b'haveparents': {
934 b'haveparents': {
932 b'default': False,
935 b'default': False,
933 b'required': False,
936 b'required': False,
934 b'type': b'bool'
937 b'type': b'bool'
935 },
938 },
936 b'nodes': {
939 b'nodes': {
937 b'required': True,
940 b'required': True,
938 b'type': b'list'
941 b'type': b'list'
939 },
942 },
940 b'tree': {
943 b'tree': {
941 b'required': True,
944 b'required': True,
942 b'type': b'bytes'
945 b'type': b'bytes'
943 }
946 }
944 },
947 },
945 b'permissions': [
948 b'permissions': [
946 b'pull'
949 b'pull'
947 ],
950 ],
948 b'recommendedbatchsize': 100000
951 b'recommendedbatchsize': 100000
949 },
952 },
950 b'pushkey': {
953 b'pushkey': {
951 b'args': {
954 b'args': {
952 b'key': {
955 b'key': {
953 b'required': True,
956 b'required': True,
954 b'type': b'bytes'
957 b'type': b'bytes'
955 },
958 },
956 b'namespace': {
959 b'namespace': {
957 b'required': True,
960 b'required': True,
958 b'type': b'bytes'
961 b'type': b'bytes'
959 },
962 },
960 b'new': {
963 b'new': {
961 b'required': True,
964 b'required': True,
962 b'type': b'bytes'
965 b'type': b'bytes'
963 },
966 },
964 b'old': {
967 b'old': {
965 b'required': True,
968 b'required': True,
966 b'type': b'bytes'
969 b'type': b'bytes'
967 }
970 }
968 },
971 },
969 b'permissions': [
972 b'permissions': [
970 b'push'
973 b'push'
971 ]
974 ]
972 },
975 },
973 b'rawstorefiledata': {
976 b'rawstorefiledata': {
974 b'args': {
977 b'args': {
975 b'files': {
978 b'files': {
976 b'required': True,
979 b'required': True,
977 b'type': b'list'
980 b'type': b'list'
978 },
981 },
979 b'pathfilter': {
982 b'pathfilter': {
980 b'default': None,
983 b'default': None,
981 b'required': False,
984 b'required': False,
982 b'type': b'list'
985 b'type': b'list'
983 }
986 }
984 },
987 },
985 b'permissions': [
988 b'permissions': [
986 b'pull'
989 b'pull'
987 ]
990 ]
988 }
991 }
989 },
992 },
990 b'framingmediatypes': [
993 b'framingmediatypes': [
991 b'application/mercurial-exp-framing-0006'
994 b'application/mercurial-exp-framing-0006'
992 ],
995 ],
993 b'pathfilterprefixes': set([
996 b'pathfilterprefixes': set([
994 b'path:',
997 b'path:',
995 b'rootfilesin:'
998 b'rootfilesin:'
996 ]),
999 ]),
997 b'rawrepoformats': [
1000 b'rawrepoformats': [
998 b'generaldelta',
1001 b'generaldelta',
999 b'revlogv1'
1002 b'revlogv1'
1000 ],
1003 ],
1001 b'redirect': {
1004 b'redirect': {
1002 b'hashes': [
1005 b'hashes': [
1003 b'sha256',
1006 b'sha256',
1004 b'sha1'
1007 b'sha1'
1005 ],
1008 ],
1006 b'targets': [
1009 b'targets': [
1007 {
1010 {
1008 b'name': b'target-bad-tls',
1011 b'name': b'target-bad-tls',
1009 b'protocol': b'https',
1012 b'protocol': b'https',
1010 b'snirequired': True,
1013 b'snirequired': True,
1011 b'uris': [
1014 b'uris': [
1012 b'https://example.com/'
1015 b'https://example.com/'
1013 ]
1016 ]
1014 }
1017 }
1015 ]
1018 ]
1016 }
1019 }
1017 }
1020 }
1018 ]
1021 ]
1019 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
1022 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
1020
1023
1021 $ cat >> $HGRCPATH << EOF
1024 $ cat >> $HGRCPATH << EOF
1022 > [extensions]
1025 > [extensions]
1023 > nosni=!
1026 > nosni=!
1024 > EOF
1027 > EOF
1025
1028
1026 Unknown tls value is filtered from compatible targets
1029 Unknown tls value is filtered from compatible targets
1027
1030
1028 $ cat > redirects.py << EOF
1031 $ cat > redirects.py << EOF
1029 > [
1032 > [
1030 > {
1033 > {
1031 > b'name': b'target-bad-tls',
1034 > b'name': b'target-bad-tls',
1032 > b'protocol': b'https',
1035 > b'protocol': b'https',
1033 > b'uris': [b'https://example.com/'],
1036 > b'uris': [b'https://example.com/'],
1034 > b'tlsversions': [b'42', b'39'],
1037 > b'tlsversions': [b'42', b'39'],
1035 > },
1038 > },
1036 > ]
1039 > ]
1037 > EOF
1040 > EOF
1038
1041
1039 $ sendhttpv2peerhandshake << EOF
1042 $ sendhttpv2peerhandshake << EOF
1040 > command capabilities
1043 > command capabilities
1041 > EOF
1044 > EOF
1042 creating http peer for wire protocol version 2
1045 creating http peer for wire protocol version 2
1043 s> GET /?cmd=capabilities HTTP/1.1\r\n
1046 s> GET /?cmd=capabilities HTTP/1.1\r\n
1044 s> Accept-Encoding: identity\r\n
1047 s> Accept-Encoding: identity\r\n
1045 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
1048 s> vary: X-HgProto-1,X-HgUpgrade-1\r\n
1046 s> x-hgproto-1: cbor\r\n
1049 s> x-hgproto-1: cbor\r\n
1047 s> x-hgupgrade-1: exp-http-v2-0003\r\n
1050 s> x-hgupgrade-1: exp-http-v2-0003\r\n
1048 s> accept: application/mercurial-0.1\r\n
1051 s> accept: application/mercurial-0.1\r\n
1049 s> host: $LOCALIP:$HGPORT\r\n (glob)
1052 s> host: $LOCALIP:$HGPORT\r\n (glob)
1050 s> user-agent: Mercurial debugwireproto\r\n
1053 s> user-agent: Mercurial debugwireproto\r\n
1051 s> \r\n
1054 s> \r\n
1052 s> makefile('rb', None)
1055 s> makefile('rb', None)
1053 s> HTTP/1.1 200 OK\r\n
1056 s> HTTP/1.1 200 OK\r\n
1054 s> Server: testing stub value\r\n
1057 s> Server: testing stub value\r\n
1055 s> Date: $HTTP_DATE$\r\n
1058 s> Date: $HTTP_DATE$\r\n
1056 s> Content-Type: application/mercurial-cbor\r\n
1059 s> Content-Type: application/mercurial-cbor\r\n
1057 s> Content-Length: 2252\r\n
1060 s> Content-Length: 2252\r\n
1058 s> \r\n
1061 s> \r\n
1059 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
1062 s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xd3batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
1060 (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42)
1063 (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42)
1061 sending capabilities command
1064 sending capabilities command
1062 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
1065 s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
1063 s> Accept-Encoding: identity\r\n
1066 s> Accept-Encoding: identity\r\n
1064 s> accept: application/mercurial-exp-framing-0006\r\n
1067 s> accept: application/mercurial-exp-framing-0006\r\n
1065 s> content-type: application/mercurial-exp-framing-0006\r\n
1068 s> content-type: application/mercurial-exp-framing-0006\r\n
1066 s> content-length: 102\r\n
1069 s> content-length: 102\r\n
1067 s> host: $LOCALIP:$HGPORT\r\n (glob)
1070 s> host: $LOCALIP:$HGPORT\r\n (glob)
1068 s> user-agent: Mercurial debugwireproto\r\n
1071 s> user-agent: Mercurial debugwireproto\r\n
1069 s> \r\n
1072 s> \r\n
1070 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81Hidentity:\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x80
1073 s> \x1c\x00\x00\x01\x00\x01\x01\x82\xa1Pcontentencodings\x81Hidentity:\x00\x00\x01\x00\x01\x00\x11\xa2DnameLcapabilitiesHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x80
1071 s> makefile('rb', None)
1074 s> makefile('rb', None)
1072 s> HTTP/1.1 200 OK\r\n
1075 s> HTTP/1.1 200 OK\r\n
1073 s> Server: testing stub value\r\n
1076 s> Server: testing stub value\r\n
1074 s> Date: $HTTP_DATE$\r\n
1077 s> Date: $HTTP_DATE$\r\n
1075 s> Content-Type: application/mercurial-exp-framing-0006\r\n
1078 s> Content-Type: application/mercurial-exp-framing-0006\r\n
1076 s> Transfer-Encoding: chunked\r\n
1079 s> Transfer-Encoding: chunked\r\n
1077 s> \r\n
1080 s> \r\n
1078 s> 11\r\n
1081 s> 11\r\n
1079 s> \t\x00\x00\x01\x00\x02\x01\x92
1082 s> \t\x00\x00\x01\x00\x02\x01\x92
1080 s> Hidentity
1083 s> Hidentity
1081 s> \r\n
1084 s> \r\n
1082 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
1085 received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
1083 s> 13\r\n
1086 s> 13\r\n
1084 s> \x0b\x00\x00\x01\x00\x02\x041
1087 s> \x0b\x00\x00\x01\x00\x02\x041
1085 s> \xa1FstatusBok
1088 s> \xa1FstatusBok
1086 s> \r\n
1089 s> \r\n
1087 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
1090 received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
1088 s> 6ca\r\n
1091 s> 6ca\r\n
1089 s> \xc2\x06\x00\x01\x00\x02\x041
1092 s> \xc2\x06\x00\x01\x00\x02\x041
1090 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/
1093 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x82LgeneraldeltaHrevlogv1Hredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/
1091 s> \r\n
1094 s> \r\n
1092 received frame(size=1730; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
1095 received frame(size=1730; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
1093 s> 8\r\n
1096 s> 8\r\n
1094 s> \x00\x00\x00\x01\x00\x02\x002
1097 s> \x00\x00\x00\x01\x00\x02\x002
1095 s> \r\n
1098 s> \r\n
1096 s> 0\r\n
1099 s> 0\r\n
1097 s> \r\n
1100 s> \r\n
1098 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
1101 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
1099 response: gen[
1102 response: gen[
1100 {
1103 {
1101 b'commands': {
1104 b'commands': {
1102 b'branchmap': {
1105 b'branchmap': {
1103 b'args': {},
1106 b'args': {},
1104 b'permissions': [
1107 b'permissions': [
1105 b'pull'
1108 b'pull'
1106 ]
1109 ]
1107 },
1110 },
1108 b'capabilities': {
1111 b'capabilities': {
1109 b'args': {},
1112 b'args': {},
1110 b'permissions': [
1113 b'permissions': [
1111 b'pull'
1114 b'pull'
1112 ]
1115 ]
1113 },
1116 },
1114 b'changesetdata': {
1117 b'changesetdata': {
1115 b'args': {
1118 b'args': {
1116 b'fields': {
1119 b'fields': {
1117 b'default': set([]),
1120 b'default': set([]),
1118 b'required': False,
1121 b'required': False,
1119 b'type': b'set',
1122 b'type': b'set',
1120 b'validvalues': set([
1123 b'validvalues': set([
1121 b'bookmarks',
1124 b'bookmarks',
1122 b'parents',
1125 b'parents',
1123 b'phase',
1126 b'phase',
1124 b'revision'
1127 b'revision'
1125 ])
1128 ])
1126 },
1129 },
1127 b'revisions': {
1130 b'revisions': {
1128 b'required': True,
1131 b'required': True,
1129 b'type': b'list'
1132 b'type': b'list'
1130 }
1133 }
1131 },
1134 },
1132 b'permissions': [
1135 b'permissions': [
1133 b'pull'
1136 b'pull'
1134 ]
1137 ]
1135 },
1138 },
1136 b'filedata': {
1139 b'filedata': {
1137 b'args': {
1140 b'args': {
1138 b'fields': {
1141 b'fields': {
1139 b'default': set([]),
1142 b'default': set([]),
1140 b'required': False,
1143 b'required': False,
1141 b'type': b'set',
1144 b'type': b'set',
1142 b'validvalues': set([
1145 b'validvalues': set([
1143 b'linknode',
1146 b'linknode',
1144 b'parents',
1147 b'parents',
1145 b'revision'
1148 b'revision'
1146 ])
1149 ])
1147 },
1150 },
1148 b'haveparents': {
1151 b'haveparents': {
1149 b'default': False,
1152 b'default': False,
1150 b'required': False,
1153 b'required': False,
1151 b'type': b'bool'
1154 b'type': b'bool'
1152 },
1155 },
1153 b'nodes': {
1156 b'nodes': {
1154 b'required': True,
1157 b'required': True,
1155 b'type': b'list'
1158 b'type': b'list'
1156 },
1159 },
1157 b'path': {
1160 b'path': {
1158 b'required': True,
1161 b'required': True,
1159 b'type': b'bytes'
1162 b'type': b'bytes'
1160 }
1163 }
1161 },
1164 },
1162 b'permissions': [
1165 b'permissions': [
1163 b'pull'
1166 b'pull'
1164 ]
1167 ]
1165 },
1168 },
1166 b'filesdata': {
1169 b'filesdata': {
1167 b'args': {
1170 b'args': {
1168 b'fields': {
1171 b'fields': {
1169 b'default': set([]),
1172 b'default': set([]),
1170 b'required': False,
1173 b'required': False,
1171 b'type': b'set',
1174 b'type': b'set',
1172 b'validvalues': set([
1175 b'validvalues': set([
1173 b'firstchangeset',
1176 b'firstchangeset',
1174 b'linknode',
1177 b'linknode',
1175 b'parents',
1178 b'parents',
1176 b'revision'
1179 b'revision'
1177 ])
1180 ])
1178 },
1181 },
1179 b'haveparents': {
1182 b'haveparents': {
1180 b'default': False,
1183 b'default': False,
1181 b'required': False,
1184 b'required': False,
1182 b'type': b'bool'
1185 b'type': b'bool'
1183 },
1186 },
1184 b'pathfilter': {
1187 b'pathfilter': {
1185 b'default': None,
1188 b'default': None,
1186 b'required': False,
1189 b'required': False,
1187 b'type': b'dict'
1190 b'type': b'dict'
1188 },
1191 },
1189 b'revisions': {
1192 b'revisions': {
1190 b'required': True,
1193 b'required': True,
1191 b'type': b'list'
1194 b'type': b'list'
1192 }
1195 }
1193 },
1196 },
1194 b'permissions': [
1197 b'permissions': [
1195 b'pull'
1198 b'pull'
1196 ],
1199 ],
1197 b'recommendedbatchsize': 50000
1200 b'recommendedbatchsize': 50000
1198 },
1201 },
1199 b'heads': {
1202 b'heads': {
1200 b'args': {
1203 b'args': {
1201 b'publiconly': {
1204 b'publiconly': {
1202 b'default': False,
1205 b'default': False,
1203 b'required': False,
1206 b'required': False,
1204 b'type': b'bool'
1207 b'type': b'bool'
1205 }
1208 }
1206 },
1209 },
1207 b'permissions': [
1210 b'permissions': [
1208 b'pull'
1211 b'pull'
1209 ]
1212 ]
1210 },
1213 },
1211 b'known': {
1214 b'known': {
1212 b'args': {
1215 b'args': {
1213 b'nodes': {
1216 b'nodes': {
1214 b'default': [],
1217 b'default': [],
1215 b'required': False,
1218 b'required': False,
1216 b'type': b'list'
1219 b'type': b'list'
1217 }
1220 }
1218 },
1221 },
1219 b'permissions': [
1222 b'permissions': [
1220 b'pull'
1223 b'pull'
1221 ]
1224 ]
1222 },
1225 },
1223 b'listkeys': {
1226 b'listkeys': {
1224 b'args': {
1227 b'args': {
1225 b'namespace': {
1228 b'namespace': {
1226 b'required': True,
1229 b'required': True,
1227 b'type': b'bytes'
1230 b'type': b'bytes'
1228 }
1231 }
1229 },
1232 },
1230 b'permissions': [
1233 b'permissions': [
1231 b'pull'
1234 b'pull'
1232 ]
1235 ]
1233 },
1236 },
1234 b'lookup': {
1237 b'lookup': {
1235 b'args': {
1238 b'args': {
1236 b'key': {
1239 b'key': {
1237 b'required': True,
1240 b'required': True,
1238 b'type': b'bytes'
1241 b'type': b'bytes'
1239 }
1242 }
1240 },
1243 },
1241 b'permissions': [
1244 b'permissions': [
1242 b'pull'
1245 b'pull'
1243 ]
1246 ]
1244 },
1247 },
1245 b'manifestdata': {
1248 b'manifestdata': {
1246 b'args': {
1249 b'args': {
1247 b'fields': {
1250 b'fields': {
1248 b'default': set([]),
1251 b'default': set([]),
1249 b'required': False,
1252 b'required': False,
1250 b'type': b'set',
1253 b'type': b'set',
1251 b'validvalues': set([
1254 b'validvalues': set([
1252 b'parents',
1255 b'parents',
1253 b'revision'
1256 b'revision'
1254 ])
1257 ])
1255 },
1258 },
1256 b'haveparents': {
1259 b'haveparents': {
1257 b'default': False,
1260 b'default': False,
1258 b'required': False,
1261 b'required': False,
1259 b'type': b'bool'
1262 b'type': b'bool'
1260 },
1263 },
1261 b'nodes': {
1264 b'nodes': {
1262 b'required': True,
1265 b'required': True,
1263 b'type': b'list'
1266 b'type': b'list'
1264 },
1267 },
1265 b'tree': {
1268 b'tree': {
1266 b'required': True,
1269 b'required': True,
1267 b'type': b'bytes'
1270 b'type': b'bytes'
1268 }
1271 }
1269 },
1272 },
1270 b'permissions': [
1273 b'permissions': [
1271 b'pull'
1274 b'pull'
1272 ],
1275 ],
1273 b'recommendedbatchsize': 100000
1276 b'recommendedbatchsize': 100000
1274 },
1277 },
1275 b'pushkey': {
1278 b'pushkey': {
1276 b'args': {
1279 b'args': {
1277 b'key': {
1280 b'key': {
1278 b'required': True,
1281 b'required': True,
1279 b'type': b'bytes'
1282 b'type': b'bytes'
1280 },
1283 },
1281 b'namespace': {
1284 b'namespace': {
1282 b'required': True,
1285 b'required': True,
1283 b'type': b'bytes'
1286 b'type': b'bytes'
1284 },
1287 },
1285 b'new': {
1288 b'new': {
1286 b'required': True,
1289 b'required': True,
1287 b'type': b'bytes'
1290 b'type': b'bytes'
1288 },
1291 },
1289 b'old': {
1292 b'old': {
1290 b'required': True,
1293 b'required': True,
1291 b'type': b'bytes'
1294 b'type': b'bytes'
1292 }
1295 }
1293 },
1296 },
1294 b'permissions': [
1297 b'permissions': [
1295 b'push'
1298 b'push'
1296 ]
1299 ]
1297 },
1300 },
1298 b'rawstorefiledata': {
1301 b'rawstorefiledata': {
1299 b'args': {
1302 b'args': {
1300 b'files': {
1303 b'files': {
1301 b'required': True,
1304 b'required': True,
1302 b'type': b'list'
1305 b'type': b'list'
1303 },
1306 },
1304 b'pathfilter': {
1307 b'pathfilter': {
1305 b'default': None,
1308 b'default': None,
1306 b'required': False,
1309 b'required': False,
1307 b'type': b'list'
1310 b'type': b'list'
1308 }
1311 }
1309 },
1312 },
1310 b'permissions': [
1313 b'permissions': [
1311 b'pull'
1314 b'pull'
1312 ]
1315 ]
1313 }
1316 }
1314 },
1317 },
1315 b'framingmediatypes': [
1318 b'framingmediatypes': [
1316 b'application/mercurial-exp-framing-0006'
1319 b'application/mercurial-exp-framing-0006'
1317 ],
1320 ],
1318 b'pathfilterprefixes': set([
1321 b'pathfilterprefixes': set([
1319 b'path:',
1322 b'path:',
1320 b'rootfilesin:'
1323 b'rootfilesin:'
1321 ]),
1324 ]),
1322 b'rawrepoformats': [
1325 b'rawrepoformats': [
1323 b'generaldelta',
1326 b'generaldelta',
1324 b'revlogv1'
1327 b'revlogv1'
1325 ],
1328 ],
1326 b'redirect': {
1329 b'redirect': {
1327 b'hashes': [
1330 b'hashes': [
1328 b'sha256',
1331 b'sha256',
1329 b'sha1'
1332 b'sha1'
1330 ],
1333 ],
1331 b'targets': [
1334 b'targets': [
1332 {
1335 {
1333 b'name': b'target-bad-tls',
1336 b'name': b'target-bad-tls',
1334 b'protocol': b'https',
1337 b'protocol': b'https',
1335 b'tlsversions': [
1338 b'tlsversions': [
1336 b'42',
1339 b'42',
1337 b'39'
1340 b'39'
1338 ],
1341 ],
1339 b'uris': [
1342 b'uris': [
1340 b'https://example.com/'
1343 b'https://example.com/'
1341 ]
1344 ]
1342 }
1345 }
1343 ]
1346 ]
1344 }
1347 }
1345 }
1348 }
1346 ]
1349 ]
1347 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
1350 (sent 2 HTTP requests and * bytes; received * bytes in responses) (glob)
1348
1351
1349 Set up the server to issue content redirects to its built-in API server.
1352 Set up the server to issue content redirects to its built-in API server.
1350
1353
1351 $ cat > redirects.py << EOF
1354 $ cat > redirects.py << EOF
1352 > [
1355 > [
1353 > {
1356 > {
1354 > b'name': b'local',
1357 > b'name': b'local',
1355 > b'protocol': b'http',
1358 > b'protocol': b'http',
1356 > b'uris': [b'http://example.com/'],
1359 > b'uris': [b'http://example.com/'],
1357 > },
1360 > },
1358 > ]
1361 > ]
1359 > EOF
1362 > EOF
1360
1363
1361 Request to eventual cache URL should return 404 (validating the cache server works)
1364 Request to eventual cache URL should return 404 (validating the cache server works)
1362
1365
1363 $ sendhttpraw << EOF
1366 $ sendhttpraw << EOF
1364 > httprequest GET api/simplecache/missingkey
1367 > httprequest GET api/simplecache/missingkey
1365 > user-agent: test
1368 > user-agent: test
1366 > EOF
1369 > EOF
1367 using raw connection to peer
1370 using raw connection to peer
1368 s> GET /api/simplecache/missingkey HTTP/1.1\r\n
1371 s> GET /api/simplecache/missingkey HTTP/1.1\r\n
1369 s> Accept-Encoding: identity\r\n
1372 s> Accept-Encoding: identity\r\n
1370 s> user-agent: test\r\n
1373 s> user-agent: test\r\n
1371 s> host: $LOCALIP:$HGPORT\r\n (glob)
1374 s> host: $LOCALIP:$HGPORT\r\n (glob)
1372 s> \r\n
1375 s> \r\n
1373 s> makefile('rb', None)
1376 s> makefile('rb', None)
1374 s> HTTP/1.1 404 Not Found\r\n
1377 s> HTTP/1.1 404 Not Found\r\n
1375 s> Server: testing stub value\r\n
1378 s> Server: testing stub value\r\n
1376 s> Date: $HTTP_DATE$\r\n
1379 s> Date: $HTTP_DATE$\r\n
1377 s> Content-Type: text/plain\r\n
1380 s> Content-Type: text/plain\r\n
1378 s> Content-Length: 22\r\n
1381 s> Content-Length: 22\r\n
1379 s> \r\n
1382 s> \r\n
1380 s> key not found in cache
1383 s> key not found in cache
1381
1384
1382 Send a cacheable request
1385 Send a cacheable request
1383
1386
1384 $ sendhttpv2peer << EOF
1387 $ sendhttpv2peer << EOF
1385 > command manifestdata
1388 > command manifestdata
1386 > nodes eval:[b'\x99\x2f\x47\x79\x02\x9a\x3d\xf8\xd0\x66\x6d\x00\xbb\x92\x4f\x69\x63\x4e\x26\x41']
1389 > nodes eval:[b'\x99\x2f\x47\x79\x02\x9a\x3d\xf8\xd0\x66\x6d\x00\xbb\x92\x4f\x69\x63\x4e\x26\x41']
1387 > tree eval:b''
1390 > tree eval:b''
1388 > fields eval:[b'parents']
1391 > fields eval:[b'parents']
1389 > EOF
1392 > EOF
1390 creating http peer for wire protocol version 2
1393 creating http peer for wire protocol version 2
1391 sending manifestdata command
1394 sending manifestdata command
1392 response: gen[
1395 response: gen[
1393 {
1396 {
1394 b'totalitems': 1
1397 b'totalitems': 1
1395 },
1398 },
1396 {
1399 {
1397 b'node': b'\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A',
1400 b'node': b'\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A',
1398 b'parents': [
1401 b'parents': [
1399 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
1402 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
1400 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
1403 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
1401 ]
1404 ]
1402 }
1405 }
1403 ]
1406 ]
1404
1407
1405 Cached entry should be available on server
1408 Cached entry should be available on server
1406
1409
1407 $ sendhttpraw << EOF
1410 $ sendhttpraw << EOF
1408 > httprequest GET api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c
1411 > httprequest GET api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c
1409 > user-agent: test
1412 > user-agent: test
1410 > EOF
1413 > EOF
1411 using raw connection to peer
1414 using raw connection to peer
1412 s> GET /api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c HTTP/1.1\r\n
1415 s> GET /api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c HTTP/1.1\r\n
1413 s> Accept-Encoding: identity\r\n
1416 s> Accept-Encoding: identity\r\n
1414 s> user-agent: test\r\n
1417 s> user-agent: test\r\n
1415 s> host: $LOCALIP:$HGPORT\r\n (glob)
1418 s> host: $LOCALIP:$HGPORT\r\n (glob)
1416 s> \r\n
1419 s> \r\n
1417 s> makefile('rb', None)
1420 s> makefile('rb', None)
1418 s> HTTP/1.1 200 OK\r\n
1421 s> HTTP/1.1 200 OK\r\n
1419 s> Server: testing stub value\r\n
1422 s> Server: testing stub value\r\n
1420 s> Date: $HTTP_DATE$\r\n
1423 s> Date: $HTTP_DATE$\r\n
1421 s> Content-Type: application/mercurial-cbor\r\n
1424 s> Content-Type: application/mercurial-cbor\r\n
1422 s> Content-Length: 91\r\n
1425 s> Content-Length: 91\r\n
1423 s> \r\n
1426 s> \r\n
1424 s> \xa1Jtotalitems\x01\xa2DnodeT\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&AGparents\x82T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
1427 s> \xa1Jtotalitems\x01\xa2DnodeT\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&AGparents\x82T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
1425 cbor> [
1428 cbor> [
1426 {
1429 {
1427 b'totalitems': 1
1430 b'totalitems': 1
1428 },
1431 },
1429 {
1432 {
1430 b'node': b'\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A',
1433 b'node': b'\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A',
1431 b'parents': [
1434 b'parents': [
1432 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
1435 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
1433 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
1436 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
1434 ]
1437 ]
1435 }
1438 }
1436 ]
1439 ]
1437
1440
1438 2nd request should result in content redirect response
1441 2nd request should result in content redirect response
1439
1442
1440 $ sendhttpv2peer << EOF
1443 $ sendhttpv2peer << EOF
1441 > command manifestdata
1444 > command manifestdata
1442 > nodes eval:[b'\x99\x2f\x47\x79\x02\x9a\x3d\xf8\xd0\x66\x6d\x00\xbb\x92\x4f\x69\x63\x4e\x26\x41']
1445 > nodes eval:[b'\x99\x2f\x47\x79\x02\x9a\x3d\xf8\xd0\x66\x6d\x00\xbb\x92\x4f\x69\x63\x4e\x26\x41']
1443 > tree eval:b''
1446 > tree eval:b''
1444 > fields eval:[b'parents']
1447 > fields eval:[b'parents']
1445 > EOF
1448 > EOF
1446 creating http peer for wire protocol version 2
1449 creating http peer for wire protocol version 2
1447 sending manifestdata command
1450 sending manifestdata command
1448 response: gen[
1451 response: gen[
1449 {
1452 {
1450 b'totalitems': 1
1453 b'totalitems': 1
1451 },
1454 },
1452 {
1455 {
1453 b'node': b'\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A',
1456 b'node': b'\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A',
1454 b'parents': [
1457 b'parents': [
1455 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
1458 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
1456 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
1459 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
1457 ]
1460 ]
1458 }
1461 }
1459 ]
1462 ]
1460
1463
1461 $ cat error.log
1464 $ cat error.log
1462 $ killdaemons.py
1465 $ killdaemons.py
1463
1466
1464 $ cat .hg/blackbox.log
1467 $ cat .hg/blackbox.log
1465 *> cacher constructed for manifestdata (glob)
1468 *> cacher constructed for manifestdata (glob)
1466 *> cache miss for 47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1469 *> cache miss for 47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1467 *> storing cache entry for 47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1470 *> storing cache entry for 47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1468 *> cacher constructed for manifestdata (glob)
1471 *> cacher constructed for manifestdata (glob)
1469 *> cache hit for 47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1472 *> cache hit for 47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1470 *> sending content redirect for 47abb8efa5f01b8964d74917793ad2464db0fa2c to http://*:$HGPORT/api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
1473 *> sending content redirect for 47abb8efa5f01b8964d74917793ad2464db0fa2c to http://*:$HGPORT/api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c (glob)
General Comments 0
You need to be logged in to leave comments. Login now