##// END OF EJS Templates
merge with stable
Augie Fackler -
r41828:91701785 merge default
parent child Browse files
Show More
@@ -1,38 +1,38 b''
1 # Extract version number into 4 parts, some of which may be empty:
1 # Extract version number into 4 parts, some of which may be empty:
2 #
2 #
3 # version: the numeric part of the most recent tag. Will always look like 1.3.
3 # version: the numeric part of the most recent tag. Will always look like 1.3.
4 #
4 #
5 # type: if an rc build, "rc", otherwise empty
5 # type: if an rc build, "rc", otherwise empty
6 #
6 #
7 # distance: the distance from the nearest tag, or empty if built from a tag
7 # distance: the distance from the nearest tag, or empty if built from a tag
8 #
8 #
9 # node: the node|short hg was built from, or empty if built from a tag
9 # node: the node|short hg was built from, or empty if built from a tag
10 gethgversion() {
10 gethgversion() {
11 export HGRCPATH=
11 export HGRCPATH=
12 export HGPLAIN=
12 export HGPLAIN=
13
13
14 make cleanbutpackages
14 make cleanbutpackages
15 make local PURE=--pure
15 make local PURE=--pure
16 HG="$PWD/hg"
16 HG="$PWD/hg"
17
17
18 "$HG" version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
18 "$HG" version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
19
19
20 hgversion=`LANGUAGE=C "$HG" version | sed -ne 's/.*(version \(.*\))$/\1/p'`
20 hgversion=`LANGUAGE=C "$HG" version | sed -ne 's/.*(version \(.*\))$/\1/p'`
21
21
22 if echo $hgversion | grep + > /dev/null 2>&1 ; then
22 if echo $hgversion | grep + > /dev/null 2>&1 ; then
23 tmp=`echo $hgversion | cut -d+ -f 2`
23 tmp=`echo $hgversion | cut -d+ -f 2`
24 hgversion=`echo $hgversion | cut -d+ -f 1`
24 hgversion=`echo $hgversion | cut -d+ -f 1`
25 distance=`echo $tmp | cut -d- -f 1`
25 distance=`echo $tmp | cut -d- -f 1`
26 node=`echo $tmp | cut -d- -f 2`
26 node=`echo $tmp | cut -d- -f 2`
27 else
27 else
28 distance=''
28 distance=''
29 node=''
29 node=''
30 fi
30 fi
31 if echo $hgversion | grep -- '-' > /dev/null 2>&1; then
31 if echo $hgversion | grep -E -- '[0-9]\.[0-9](\.[0-9])?rc' > /dev/null 2>&1; then
32 version=`echo $hgversion | cut -d- -f1`
32 version=`echo $hgversion | cut -d'r' -f1`
33 type=`echo $hgversion | cut -d- -f2`
33 type="rc`echo $hgversion | cut -d'c' -f2-`"
34 else
34 else
35 version=$hgversion
35 version=$hgversion
36 type=''
36 type=''
37 fi
37 fi
38 }
38 }
@@ -1,1838 +1,1838 b''
1 # subrepo.py - sub-repository classes and factory
1 # subrepo.py - sub-repository classes and factory
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import stat
15 import stat
16 import subprocess
16 import subprocess
17 import sys
17 import sys
18 import tarfile
18 import tarfile
19 import xml.dom.minidom
19 import xml.dom.minidom
20
20
21 from .i18n import _
21 from .i18n import _
22 from . import (
22 from . import (
23 cmdutil,
23 cmdutil,
24 encoding,
24 encoding,
25 error,
25 error,
26 exchange,
26 exchange,
27 logcmdutil,
27 logcmdutil,
28 match as matchmod,
28 match as matchmod,
29 node,
29 node,
30 pathutil,
30 pathutil,
31 phases,
31 phases,
32 pycompat,
32 pycompat,
33 scmutil,
33 scmutil,
34 subrepoutil,
34 subrepoutil,
35 util,
35 util,
36 vfs as vfsmod,
36 vfs as vfsmod,
37 )
37 )
38 from .utils import (
38 from .utils import (
39 dateutil,
39 dateutil,
40 procutil,
40 procutil,
41 stringutil,
41 stringutil,
42 )
42 )
43
43
44 hg = None
44 hg = None
45 reporelpath = subrepoutil.reporelpath
45 reporelpath = subrepoutil.reporelpath
46 subrelpath = subrepoutil.subrelpath
46 subrelpath = subrepoutil.subrelpath
47 _abssource = subrepoutil._abssource
47 _abssource = subrepoutil._abssource
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49
49
50 def _expandedabspath(path):
50 def _expandedabspath(path):
51 '''
51 '''
52 get a path or url and if it is a path expand it and return an absolute path
52 get a path or url and if it is a path expand it and return an absolute path
53 '''
53 '''
54 expandedpath = util.urllocalpath(util.expandpath(path))
54 expandedpath = util.urllocalpath(util.expandpath(path))
55 u = util.url(expandedpath)
55 u = util.url(expandedpath)
56 if not u.scheme:
56 if not u.scheme:
57 path = util.normpath(os.path.abspath(u.path))
57 path = util.normpath(os.path.abspath(u.path))
58 return path
58 return path
59
59
60 def _getstorehashcachename(remotepath):
60 def _getstorehashcachename(remotepath):
61 '''get a unique filename for the store hash cache of a remote repository'''
61 '''get a unique filename for the store hash cache of a remote repository'''
62 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
62 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
63
63
64 class SubrepoAbort(error.Abort):
64 class SubrepoAbort(error.Abort):
65 """Exception class used to avoid handling a subrepo error more than once"""
65 """Exception class used to avoid handling a subrepo error more than once"""
66 def __init__(self, *args, **kw):
66 def __init__(self, *args, **kw):
67 self.subrepo = kw.pop(r'subrepo', None)
67 self.subrepo = kw.pop(r'subrepo', None)
68 self.cause = kw.pop(r'cause', None)
68 self.cause = kw.pop(r'cause', None)
69 error.Abort.__init__(self, *args, **kw)
69 error.Abort.__init__(self, *args, **kw)
70
70
71 def annotatesubrepoerror(func):
71 def annotatesubrepoerror(func):
72 def decoratedmethod(self, *args, **kargs):
72 def decoratedmethod(self, *args, **kargs):
73 try:
73 try:
74 res = func(self, *args, **kargs)
74 res = func(self, *args, **kargs)
75 except SubrepoAbort as ex:
75 except SubrepoAbort as ex:
76 # This exception has already been handled
76 # This exception has already been handled
77 raise ex
77 raise ex
78 except error.Abort as ex:
78 except error.Abort as ex:
79 subrepo = subrelpath(self)
79 subrepo = subrelpath(self)
80 errormsg = (stringutil.forcebytestr(ex) + ' '
80 errormsg = (stringutil.forcebytestr(ex) + ' '
81 + _('(in subrepository "%s")') % subrepo)
81 + _('(in subrepository "%s")') % subrepo)
82 # avoid handling this exception by raising a SubrepoAbort exception
82 # avoid handling this exception by raising a SubrepoAbort exception
83 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
83 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
84 cause=sys.exc_info())
84 cause=sys.exc_info())
85 return res
85 return res
86 return decoratedmethod
86 return decoratedmethod
87
87
88 def _updateprompt(ui, sub, dirty, local, remote):
88 def _updateprompt(ui, sub, dirty, local, remote):
89 if dirty:
89 if dirty:
90 msg = (_(' subrepository sources for %s differ\n'
90 msg = (_(' subrepository sources for %s differ\n'
91 'use (l)ocal source (%s) or (r)emote source (%s)?'
91 'use (l)ocal source (%s) or (r)emote source (%s)?'
92 '$$ &Local $$ &Remote')
92 '$$ &Local $$ &Remote')
93 % (subrelpath(sub), local, remote))
93 % (subrelpath(sub), local, remote))
94 else:
94 else:
95 msg = (_(' subrepository sources for %s differ (in checked out '
95 msg = (_(' subrepository sources for %s differ (in checked out '
96 'version)\n'
96 'version)\n'
97 'use (l)ocal source (%s) or (r)emote source (%s)?'
97 'use (l)ocal source (%s) or (r)emote source (%s)?'
98 '$$ &Local $$ &Remote')
98 '$$ &Local $$ &Remote')
99 % (subrelpath(sub), local, remote))
99 % (subrelpath(sub), local, remote))
100 return ui.promptchoice(msg, 0)
100 return ui.promptchoice(msg, 0)
101
101
102 def _sanitize(ui, vfs, ignore):
102 def _sanitize(ui, vfs, ignore):
103 for dirname, dirs, names in vfs.walk():
103 for dirname, dirs, names in vfs.walk():
104 for i, d in enumerate(dirs):
104 for i, d in enumerate(dirs):
105 if d.lower() == ignore:
105 if d.lower() == ignore:
106 del dirs[i]
106 del dirs[i]
107 break
107 break
108 if vfs.basename(dirname).lower() != '.hg':
108 if vfs.basename(dirname).lower() != '.hg':
109 continue
109 continue
110 for f in names:
110 for f in names:
111 if f.lower() == 'hgrc':
111 if f.lower() == 'hgrc':
112 ui.warn(_("warning: removing potentially hostile 'hgrc' "
112 ui.warn(_("warning: removing potentially hostile 'hgrc' "
113 "in '%s'\n") % vfs.join(dirname))
113 "in '%s'\n") % vfs.join(dirname))
114 vfs.unlink(vfs.reljoin(dirname, f))
114 vfs.unlink(vfs.reljoin(dirname, f))
115
115
116 def _auditsubrepopath(repo, path):
116 def _auditsubrepopath(repo, path):
117 # sanity check for potentially unsafe paths such as '~' and '$FOO'
117 # sanity check for potentially unsafe paths such as '~' and '$FOO'
118 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
118 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
119 raise error.Abort(_('subrepo path contains illegal component: %s')
119 raise error.Abort(_('subrepo path contains illegal component: %s')
120 % path)
120 % path)
121 # auditor doesn't check if the path itself is a symlink
121 # auditor doesn't check if the path itself is a symlink
122 pathutil.pathauditor(repo.root)(path)
122 pathutil.pathauditor(repo.root)(path)
123 if repo.wvfs.islink(path):
123 if repo.wvfs.islink(path):
124 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
124 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
125
125
126 SUBREPO_ALLOWED_DEFAULTS = {
126 SUBREPO_ALLOWED_DEFAULTS = {
127 'hg': True,
127 'hg': True,
128 'git': False,
128 'git': False,
129 'svn': False,
129 'svn': False,
130 }
130 }
131
131
132 def _checktype(ui, kind):
132 def _checktype(ui, kind):
133 # subrepos.allowed is a master kill switch. If disabled, subrepos are
133 # subrepos.allowed is a master kill switch. If disabled, subrepos are
134 # disabled period.
134 # disabled period.
135 if not ui.configbool('subrepos', 'allowed', True):
135 if not ui.configbool('subrepos', 'allowed', True):
136 raise error.Abort(_('subrepos not enabled'),
136 raise error.Abort(_('subrepos not enabled'),
137 hint=_("see 'hg help config.subrepos' for details"))
137 hint=_("see 'hg help config.subrepos' for details"))
138
138
139 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
139 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
140 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
140 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
141 raise error.Abort(_('%s subrepos not allowed') % kind,
141 raise error.Abort(_('%s subrepos not allowed') % kind,
142 hint=_("see 'hg help config.subrepos' for details"))
142 hint=_("see 'hg help config.subrepos' for details"))
143
143
144 if kind not in types:
144 if kind not in types:
145 raise error.Abort(_('unknown subrepo type %s') % kind)
145 raise error.Abort(_('unknown subrepo type %s') % kind)
146
146
147 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
147 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
148 """return instance of the right subrepo class for subrepo in path"""
148 """return instance of the right subrepo class for subrepo in path"""
149 # subrepo inherently violates our import layering rules
149 # subrepo inherently violates our import layering rules
150 # because it wants to make repo objects from deep inside the stack
150 # because it wants to make repo objects from deep inside the stack
151 # so we manually delay the circular imports to not break
151 # so we manually delay the circular imports to not break
152 # scripts that don't use our demand-loading
152 # scripts that don't use our demand-loading
153 global hg
153 global hg
154 from . import hg as h
154 from . import hg as h
155 hg = h
155 hg = h
156
156
157 repo = ctx.repo()
157 repo = ctx.repo()
158 _auditsubrepopath(repo, path)
158 _auditsubrepopath(repo, path)
159 state = ctx.substate[path]
159 state = ctx.substate[path]
160 _checktype(repo.ui, state[2])
160 _checktype(repo.ui, state[2])
161 if allowwdir:
161 if allowwdir:
162 state = (state[0], ctx.subrev(path), state[2])
162 state = (state[0], ctx.subrev(path), state[2])
163 return types[state[2]](ctx, path, state[:2], allowcreate)
163 return types[state[2]](ctx, path, state[:2], allowcreate)
164
164
165 def nullsubrepo(ctx, path, pctx):
165 def nullsubrepo(ctx, path, pctx):
166 """return an empty subrepo in pctx for the extant subrepo in ctx"""
166 """return an empty subrepo in pctx for the extant subrepo in ctx"""
167 # subrepo inherently violates our import layering rules
167 # subrepo inherently violates our import layering rules
168 # because it wants to make repo objects from deep inside the stack
168 # because it wants to make repo objects from deep inside the stack
169 # so we manually delay the circular imports to not break
169 # so we manually delay the circular imports to not break
170 # scripts that don't use our demand-loading
170 # scripts that don't use our demand-loading
171 global hg
171 global hg
172 from . import hg as h
172 from . import hg as h
173 hg = h
173 hg = h
174
174
175 repo = ctx.repo()
175 repo = ctx.repo()
176 _auditsubrepopath(repo, path)
176 _auditsubrepopath(repo, path)
177 state = ctx.substate[path]
177 state = ctx.substate[path]
178 _checktype(repo.ui, state[2])
178 _checktype(repo.ui, state[2])
179 subrev = ''
179 subrev = ''
180 if state[2] == 'hg':
180 if state[2] == 'hg':
181 subrev = "0" * 40
181 subrev = "0" * 40
182 return types[state[2]](pctx, path, (state[0], subrev), True)
182 return types[state[2]](pctx, path, (state[0], subrev), True)
183
183
184 # subrepo classes need to implement the following abstract class:
184 # subrepo classes need to implement the following abstract class:
185
185
186 class abstractsubrepo(object):
186 class abstractsubrepo(object):
187
187
188 def __init__(self, ctx, path):
188 def __init__(self, ctx, path):
189 """Initialize abstractsubrepo part
189 """Initialize abstractsubrepo part
190
190
191 ``ctx`` is the context referring this subrepository in the
191 ``ctx`` is the context referring this subrepository in the
192 parent repository.
192 parent repository.
193
193
194 ``path`` is the path to this subrepository as seen from
194 ``path`` is the path to this subrepository as seen from
195 innermost repository.
195 innermost repository.
196 """
196 """
197 self.ui = ctx.repo().ui
197 self.ui = ctx.repo().ui
198 self._ctx = ctx
198 self._ctx = ctx
199 self._path = path
199 self._path = path
200
200
201 def addwebdirpath(self, serverpath, webconf):
201 def addwebdirpath(self, serverpath, webconf):
202 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
202 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
203
203
204 ``serverpath`` is the path component of the URL for this repo.
204 ``serverpath`` is the path component of the URL for this repo.
205
205
206 ``webconf`` is the dictionary of hgwebdir entries.
206 ``webconf`` is the dictionary of hgwebdir entries.
207 """
207 """
208 pass
208 pass
209
209
210 def storeclean(self, path):
210 def storeclean(self, path):
211 """
211 """
212 returns true if the repository has not changed since it was last
212 returns true if the repository has not changed since it was last
213 cloned from or pushed to a given repository.
213 cloned from or pushed to a given repository.
214 """
214 """
215 return False
215 return False
216
216
217 def dirty(self, ignoreupdate=False, missing=False):
217 def dirty(self, ignoreupdate=False, missing=False):
218 """returns true if the dirstate of the subrepo is dirty or does not
218 """returns true if the dirstate of the subrepo is dirty or does not
219 match current stored state. If ignoreupdate is true, only check
219 match current stored state. If ignoreupdate is true, only check
220 whether the subrepo has uncommitted changes in its dirstate. If missing
220 whether the subrepo has uncommitted changes in its dirstate. If missing
221 is true, check for deleted files.
221 is true, check for deleted files.
222 """
222 """
223 raise NotImplementedError
223 raise NotImplementedError
224
224
225 def dirtyreason(self, ignoreupdate=False, missing=False):
225 def dirtyreason(self, ignoreupdate=False, missing=False):
226 """return reason string if it is ``dirty()``
226 """return reason string if it is ``dirty()``
227
227
228 Returned string should have enough information for the message
228 Returned string should have enough information for the message
229 of exception.
229 of exception.
230
230
231 This returns None, otherwise.
231 This returns None, otherwise.
232 """
232 """
233 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
233 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
234 return _('uncommitted changes in subrepository "%s"'
234 return _('uncommitted changes in subrepository "%s"'
235 ) % subrelpath(self)
235 ) % subrelpath(self)
236
236
237 def bailifchanged(self, ignoreupdate=False, hint=None):
237 def bailifchanged(self, ignoreupdate=False, hint=None):
238 """raise Abort if subrepository is ``dirty()``
238 """raise Abort if subrepository is ``dirty()``
239 """
239 """
240 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
240 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
241 missing=True)
241 missing=True)
242 if dirtyreason:
242 if dirtyreason:
243 raise error.Abort(dirtyreason, hint=hint)
243 raise error.Abort(dirtyreason, hint=hint)
244
244
245 def basestate(self):
245 def basestate(self):
246 """current working directory base state, disregarding .hgsubstate
246 """current working directory base state, disregarding .hgsubstate
247 state and working directory modifications"""
247 state and working directory modifications"""
248 raise NotImplementedError
248 raise NotImplementedError
249
249
250 def checknested(self, path):
250 def checknested(self, path):
251 """check if path is a subrepository within this repository"""
251 """check if path is a subrepository within this repository"""
252 return False
252 return False
253
253
254 def commit(self, text, user, date):
254 def commit(self, text, user, date):
255 """commit the current changes to the subrepo with the given
255 """commit the current changes to the subrepo with the given
256 log message. Use given user and date if possible. Return the
256 log message. Use given user and date if possible. Return the
257 new state of the subrepo.
257 new state of the subrepo.
258 """
258 """
259 raise NotImplementedError
259 raise NotImplementedError
260
260
261 def phase(self, state):
261 def phase(self, state):
262 """returns phase of specified state in the subrepository.
262 """returns phase of specified state in the subrepository.
263 """
263 """
264 return phases.public
264 return phases.public
265
265
266 def remove(self):
266 def remove(self):
267 """remove the subrepo
267 """remove the subrepo
268
268
269 (should verify the dirstate is not dirty first)
269 (should verify the dirstate is not dirty first)
270 """
270 """
271 raise NotImplementedError
271 raise NotImplementedError
272
272
273 def get(self, state, overwrite=False):
273 def get(self, state, overwrite=False):
274 """run whatever commands are needed to put the subrepo into
274 """run whatever commands are needed to put the subrepo into
275 this state
275 this state
276 """
276 """
277 raise NotImplementedError
277 raise NotImplementedError
278
278
279 def merge(self, state):
279 def merge(self, state):
280 """merge currently-saved state with the new state."""
280 """merge currently-saved state with the new state."""
281 raise NotImplementedError
281 raise NotImplementedError
282
282
283 def push(self, opts):
283 def push(self, opts):
284 """perform whatever action is analogous to 'hg push'
284 """perform whatever action is analogous to 'hg push'
285
285
286 This may be a no-op on some systems.
286 This may be a no-op on some systems.
287 """
287 """
288 raise NotImplementedError
288 raise NotImplementedError
289
289
290 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
290 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
291 return []
291 return []
292
292
293 def addremove(self, matcher, prefix, uipathfn, opts):
293 def addremove(self, matcher, prefix, uipathfn, opts):
294 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
294 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
295 return 1
295 return 1
296
296
297 def cat(self, match, fm, fntemplate, prefix, **opts):
297 def cat(self, match, fm, fntemplate, prefix, **opts):
298 return 1
298 return 1
299
299
300 def status(self, rev2, **opts):
300 def status(self, rev2, **opts):
301 return scmutil.status([], [], [], [], [], [], [])
301 return scmutil.status([], [], [], [], [], [], [])
302
302
303 def diff(self, ui, diffopts, node2, match, prefix, **opts):
303 def diff(self, ui, diffopts, node2, match, prefix, **opts):
304 pass
304 pass
305
305
306 def outgoing(self, ui, dest, opts):
306 def outgoing(self, ui, dest, opts):
307 return 1
307 return 1
308
308
309 def incoming(self, ui, source, opts):
309 def incoming(self, ui, source, opts):
310 return 1
310 return 1
311
311
312 def files(self):
312 def files(self):
313 """return filename iterator"""
313 """return filename iterator"""
314 raise NotImplementedError
314 raise NotImplementedError
315
315
316 def filedata(self, name, decode):
316 def filedata(self, name, decode):
317 """return file data, optionally passed through repo decoders"""
317 """return file data, optionally passed through repo decoders"""
318 raise NotImplementedError
318 raise NotImplementedError
319
319
320 def fileflags(self, name):
320 def fileflags(self, name):
321 """return file flags"""
321 """return file flags"""
322 return ''
322 return ''
323
323
324 def matchfileset(self, expr, badfn=None):
324 def matchfileset(self, expr, badfn=None):
325 """Resolve the fileset expression for this repo"""
325 """Resolve the fileset expression for this repo"""
326 return matchmod.never(badfn=badfn)
326 return matchmod.never(badfn=badfn)
327
327
328 def printfiles(self, ui, m, fm, fmt, subrepos):
328 def printfiles(self, ui, m, fm, fmt, subrepos):
329 """handle the files command for this subrepo"""
329 """handle the files command for this subrepo"""
330 return 1
330 return 1
331
331
332 def archive(self, archiver, prefix, match=None, decode=True):
332 def archive(self, archiver, prefix, match=None, decode=True):
333 if match is not None:
333 if match is not None:
334 files = [f for f in self.files() if match(f)]
334 files = [f for f in self.files() if match(f)]
335 else:
335 else:
336 files = self.files()
336 files = self.files()
337 total = len(files)
337 total = len(files)
338 relpath = subrelpath(self)
338 relpath = subrelpath(self)
339 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
339 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
340 unit=_('files'), total=total)
340 unit=_('files'), total=total)
341 progress.update(0)
341 progress.update(0)
342 for name in files:
342 for name in files:
343 flags = self.fileflags(name)
343 flags = self.fileflags(name)
344 mode = 'x' in flags and 0o755 or 0o644
344 mode = 'x' in flags and 0o755 or 0o644
345 symlink = 'l' in flags
345 symlink = 'l' in flags
346 archiver.addfile(prefix + name, mode, symlink,
346 archiver.addfile(prefix + name, mode, symlink,
347 self.filedata(name, decode))
347 self.filedata(name, decode))
348 progress.increment()
348 progress.increment()
349 progress.complete()
349 progress.complete()
350 return total
350 return total
351
351
352 def walk(self, match):
352 def walk(self, match):
353 '''
353 '''
354 walk recursively through the directory tree, finding all files
354 walk recursively through the directory tree, finding all files
355 matched by the match function
355 matched by the match function
356 '''
356 '''
357
357
358 def forget(self, match, prefix, uipathfn, dryrun, interactive):
358 def forget(self, match, prefix, uipathfn, dryrun, interactive):
359 return ([], [])
359 return ([], [])
360
360
361 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
361 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
362 dryrun, warnings):
362 dryrun, warnings):
363 """remove the matched files from the subrepository and the filesystem,
363 """remove the matched files from the subrepository and the filesystem,
364 possibly by force and/or after the file has been removed from the
364 possibly by force and/or after the file has been removed from the
365 filesystem. Return 0 on success, 1 on any warning.
365 filesystem. Return 0 on success, 1 on any warning.
366 """
366 """
367 warnings.append(_("warning: removefiles not implemented (%s)")
367 warnings.append(_("warning: removefiles not implemented (%s)")
368 % self._path)
368 % self._path)
369 return 1
369 return 1
370
370
371 def revert(self, substate, *pats, **opts):
371 def revert(self, substate, *pats, **opts):
372 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
372 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
373 % (substate[0], substate[2]))
373 % (substate[0], substate[2]))
374 return []
374 return []
375
375
376 def shortid(self, revid):
376 def shortid(self, revid):
377 return revid
377 return revid
378
378
379 def unshare(self):
379 def unshare(self):
380 '''
380 '''
381 convert this repository from shared to normal storage.
381 convert this repository from shared to normal storage.
382 '''
382 '''
383
383
384 def verify(self):
384 def verify(self):
385 '''verify the integrity of the repository. Return 0 on success or
385 '''verify the integrity of the repository. Return 0 on success or
386 warning, 1 on any error.
386 warning, 1 on any error.
387 '''
387 '''
388 return 0
388 return 0
389
389
390 @propertycache
390 @propertycache
391 def wvfs(self):
391 def wvfs(self):
392 """return vfs to access the working directory of this subrepository
392 """return vfs to access the working directory of this subrepository
393 """
393 """
394 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
394 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
395
395
396 @propertycache
396 @propertycache
397 def _relpath(self):
397 def _relpath(self):
398 """return path to this subrepository as seen from outermost repository
398 """return path to this subrepository as seen from outermost repository
399 """
399 """
400 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
400 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
401
401
402 class hgsubrepo(abstractsubrepo):
402 class hgsubrepo(abstractsubrepo):
403 def __init__(self, ctx, path, state, allowcreate):
403 def __init__(self, ctx, path, state, allowcreate):
404 super(hgsubrepo, self).__init__(ctx, path)
404 super(hgsubrepo, self).__init__(ctx, path)
405 self._state = state
405 self._state = state
406 r = ctx.repo()
406 r = ctx.repo()
407 root = r.wjoin(path)
407 root = r.wjoin(util.localpath(path))
408 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
408 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
409 # repository constructor does expand variables in path, which is
409 # repository constructor does expand variables in path, which is
410 # unsafe since subrepo path might come from untrusted source.
410 # unsafe since subrepo path might come from untrusted source.
411 if os.path.realpath(util.expandpath(root)) != root:
411 if os.path.realpath(util.expandpath(root)) != root:
412 raise error.Abort(_('subrepo path contains illegal component: %s')
412 raise error.Abort(_('subrepo path contains illegal component: %s')
413 % path)
413 % path)
414 self._repo = hg.repository(r.baseui, root, create=create)
414 self._repo = hg.repository(r.baseui, root, create=create)
415 if self._repo.root != root:
415 if self._repo.root != root:
416 raise error.ProgrammingError('failed to reject unsafe subrepo '
416 raise error.ProgrammingError('failed to reject unsafe subrepo '
417 'path: %s (expanded to %s)'
417 'path: %s (expanded to %s)'
418 % (root, self._repo.root))
418 % (root, self._repo.root))
419
419
420 # Propagate the parent's --hidden option
420 # Propagate the parent's --hidden option
421 if r is r.unfiltered():
421 if r is r.unfiltered():
422 self._repo = self._repo.unfiltered()
422 self._repo = self._repo.unfiltered()
423
423
424 self.ui = self._repo.ui
424 self.ui = self._repo.ui
425 for s, k in [('ui', 'commitsubrepos')]:
425 for s, k in [('ui', 'commitsubrepos')]:
426 v = r.ui.config(s, k)
426 v = r.ui.config(s, k)
427 if v:
427 if v:
428 self.ui.setconfig(s, k, v, 'subrepo')
428 self.ui.setconfig(s, k, v, 'subrepo')
429 # internal config: ui._usedassubrepo
429 # internal config: ui._usedassubrepo
430 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
430 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
431 self._initrepo(r, state[0], create)
431 self._initrepo(r, state[0], create)
432
432
433 @annotatesubrepoerror
433 @annotatesubrepoerror
434 def addwebdirpath(self, serverpath, webconf):
434 def addwebdirpath(self, serverpath, webconf):
435 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
435 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
436
436
437 def storeclean(self, path):
437 def storeclean(self, path):
438 with self._repo.lock():
438 with self._repo.lock():
439 return self._storeclean(path)
439 return self._storeclean(path)
440
440
441 def _storeclean(self, path):
441 def _storeclean(self, path):
442 clean = True
442 clean = True
443 itercache = self._calcstorehash(path)
443 itercache = self._calcstorehash(path)
444 for filehash in self._readstorehashcache(path):
444 for filehash in self._readstorehashcache(path):
445 if filehash != next(itercache, None):
445 if filehash != next(itercache, None):
446 clean = False
446 clean = False
447 break
447 break
448 if clean:
448 if clean:
449 # if not empty:
449 # if not empty:
450 # the cached and current pull states have a different size
450 # the cached and current pull states have a different size
451 clean = next(itercache, None) is None
451 clean = next(itercache, None) is None
452 return clean
452 return clean
453
453
454 def _calcstorehash(self, remotepath):
454 def _calcstorehash(self, remotepath):
455 '''calculate a unique "store hash"
455 '''calculate a unique "store hash"
456
456
457 This method is used to to detect when there are changes that may
457 This method is used to to detect when there are changes that may
458 require a push to a given remote path.'''
458 require a push to a given remote path.'''
459 # sort the files that will be hashed in increasing (likely) file size
459 # sort the files that will be hashed in increasing (likely) file size
460 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
460 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
461 yield '# %s\n' % _expandedabspath(remotepath)
461 yield '# %s\n' % _expandedabspath(remotepath)
462 vfs = self._repo.vfs
462 vfs = self._repo.vfs
463 for relname in filelist:
463 for relname in filelist:
464 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
464 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
465 yield '%s = %s\n' % (relname, filehash)
465 yield '%s = %s\n' % (relname, filehash)
466
466
467 @propertycache
467 @propertycache
468 def _cachestorehashvfs(self):
468 def _cachestorehashvfs(self):
469 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
469 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
470
470
471 def _readstorehashcache(self, remotepath):
471 def _readstorehashcache(self, remotepath):
472 '''read the store hash cache for a given remote repository'''
472 '''read the store hash cache for a given remote repository'''
473 cachefile = _getstorehashcachename(remotepath)
473 cachefile = _getstorehashcachename(remotepath)
474 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
474 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
475
475
476 def _cachestorehash(self, remotepath):
476 def _cachestorehash(self, remotepath):
477 '''cache the current store hash
477 '''cache the current store hash
478
478
479 Each remote repo requires its own store hash cache, because a subrepo
479 Each remote repo requires its own store hash cache, because a subrepo
480 store may be "clean" versus a given remote repo, but not versus another
480 store may be "clean" versus a given remote repo, but not versus another
481 '''
481 '''
482 cachefile = _getstorehashcachename(remotepath)
482 cachefile = _getstorehashcachename(remotepath)
483 with self._repo.lock():
483 with self._repo.lock():
484 storehash = list(self._calcstorehash(remotepath))
484 storehash = list(self._calcstorehash(remotepath))
485 vfs = self._cachestorehashvfs
485 vfs = self._cachestorehashvfs
486 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
486 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
487
487
488 def _getctx(self):
488 def _getctx(self):
489 '''fetch the context for this subrepo revision, possibly a workingctx
489 '''fetch the context for this subrepo revision, possibly a workingctx
490 '''
490 '''
491 if self._ctx.rev() is None:
491 if self._ctx.rev() is None:
492 return self._repo[None] # workingctx if parent is workingctx
492 return self._repo[None] # workingctx if parent is workingctx
493 else:
493 else:
494 rev = self._state[1]
494 rev = self._state[1]
495 return self._repo[rev]
495 return self._repo[rev]
496
496
497 @annotatesubrepoerror
497 @annotatesubrepoerror
498 def _initrepo(self, parentrepo, source, create):
498 def _initrepo(self, parentrepo, source, create):
499 self._repo._subparent = parentrepo
499 self._repo._subparent = parentrepo
500 self._repo._subsource = source
500 self._repo._subsource = source
501
501
502 if create:
502 if create:
503 lines = ['[paths]\n']
503 lines = ['[paths]\n']
504
504
505 def addpathconfig(key, value):
505 def addpathconfig(key, value):
506 if value:
506 if value:
507 lines.append('%s = %s\n' % (key, value))
507 lines.append('%s = %s\n' % (key, value))
508 self.ui.setconfig('paths', key, value, 'subrepo')
508 self.ui.setconfig('paths', key, value, 'subrepo')
509
509
510 defpath = _abssource(self._repo, abort=False)
510 defpath = _abssource(self._repo, abort=False)
511 defpushpath = _abssource(self._repo, True, abort=False)
511 defpushpath = _abssource(self._repo, True, abort=False)
512 addpathconfig('default', defpath)
512 addpathconfig('default', defpath)
513 if defpath != defpushpath:
513 if defpath != defpushpath:
514 addpathconfig('default-push', defpushpath)
514 addpathconfig('default-push', defpushpath)
515
515
516 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
516 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
517
517
518 @annotatesubrepoerror
518 @annotatesubrepoerror
519 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
519 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
520 return cmdutil.add(ui, self._repo, match, prefix, uipathfn,
520 return cmdutil.add(ui, self._repo, match, prefix, uipathfn,
521 explicitonly, **opts)
521 explicitonly, **opts)
522
522
523 @annotatesubrepoerror
523 @annotatesubrepoerror
524 def addremove(self, m, prefix, uipathfn, opts):
524 def addremove(self, m, prefix, uipathfn, opts):
525 # In the same way as sub directories are processed, once in a subrepo,
525 # In the same way as sub directories are processed, once in a subrepo,
526 # always entry any of its subrepos. Don't corrupt the options that will
526 # always entry any of its subrepos. Don't corrupt the options that will
527 # be used to process sibling subrepos however.
527 # be used to process sibling subrepos however.
528 opts = copy.copy(opts)
528 opts = copy.copy(opts)
529 opts['subrepos'] = True
529 opts['subrepos'] = True
530 return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
530 return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
531
531
532 @annotatesubrepoerror
532 @annotatesubrepoerror
533 def cat(self, match, fm, fntemplate, prefix, **opts):
533 def cat(self, match, fm, fntemplate, prefix, **opts):
534 rev = self._state[1]
534 rev = self._state[1]
535 ctx = self._repo[rev]
535 ctx = self._repo[rev]
536 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
536 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
537 prefix, **opts)
537 prefix, **opts)
538
538
539 @annotatesubrepoerror
539 @annotatesubrepoerror
540 def status(self, rev2, **opts):
540 def status(self, rev2, **opts):
541 try:
541 try:
542 rev1 = self._state[1]
542 rev1 = self._state[1]
543 ctx1 = self._repo[rev1]
543 ctx1 = self._repo[rev1]
544 ctx2 = self._repo[rev2]
544 ctx2 = self._repo[rev2]
545 return self._repo.status(ctx1, ctx2, **opts)
545 return self._repo.status(ctx1, ctx2, **opts)
546 except error.RepoLookupError as inst:
546 except error.RepoLookupError as inst:
547 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
547 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
548 % (inst, subrelpath(self)))
548 % (inst, subrelpath(self)))
549 return scmutil.status([], [], [], [], [], [], [])
549 return scmutil.status([], [], [], [], [], [], [])
550
550
551 @annotatesubrepoerror
551 @annotatesubrepoerror
552 def diff(self, ui, diffopts, node2, match, prefix, **opts):
552 def diff(self, ui, diffopts, node2, match, prefix, **opts):
553 try:
553 try:
554 node1 = node.bin(self._state[1])
554 node1 = node.bin(self._state[1])
555 # We currently expect node2 to come from substate and be
555 # We currently expect node2 to come from substate and be
556 # in hex format
556 # in hex format
557 if node2 is not None:
557 if node2 is not None:
558 node2 = node.bin(node2)
558 node2 = node.bin(node2)
559 logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2,
559 logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2,
560 match, prefix=prefix, listsubrepos=True,
560 match, prefix=prefix, listsubrepos=True,
561 **opts)
561 **opts)
562 except error.RepoLookupError as inst:
562 except error.RepoLookupError as inst:
563 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
563 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
564 % (inst, subrelpath(self)))
564 % (inst, subrelpath(self)))
565
565
566 @annotatesubrepoerror
566 @annotatesubrepoerror
567 def archive(self, archiver, prefix, match=None, decode=True):
567 def archive(self, archiver, prefix, match=None, decode=True):
568 self._get(self._state + ('hg',))
568 self._get(self._state + ('hg',))
569 files = self.files()
569 files = self.files()
570 if match:
570 if match:
571 files = [f for f in files if match(f)]
571 files = [f for f in files if match(f)]
572 rev = self._state[1]
572 rev = self._state[1]
573 ctx = self._repo[rev]
573 ctx = self._repo[rev]
574 scmutil.prefetchfiles(self._repo, [ctx.rev()],
574 scmutil.prefetchfiles(self._repo, [ctx.rev()],
575 scmutil.matchfiles(self._repo, files))
575 scmutil.matchfiles(self._repo, files))
576 total = abstractsubrepo.archive(self, archiver, prefix, match)
576 total = abstractsubrepo.archive(self, archiver, prefix, match)
577 for subpath in ctx.substate:
577 for subpath in ctx.substate:
578 s = subrepo(ctx, subpath, True)
578 s = subrepo(ctx, subpath, True)
579 submatch = matchmod.subdirmatcher(subpath, match)
579 submatch = matchmod.subdirmatcher(subpath, match)
580 subprefix = prefix + subpath + '/'
580 subprefix = prefix + subpath + '/'
581 total += s.archive(archiver, subprefix, submatch,
581 total += s.archive(archiver, subprefix, submatch,
582 decode)
582 decode)
583 return total
583 return total
584
584
585 @annotatesubrepoerror
585 @annotatesubrepoerror
586 def dirty(self, ignoreupdate=False, missing=False):
586 def dirty(self, ignoreupdate=False, missing=False):
587 r = self._state[1]
587 r = self._state[1]
588 if r == '' and not ignoreupdate: # no state recorded
588 if r == '' and not ignoreupdate: # no state recorded
589 return True
589 return True
590 w = self._repo[None]
590 w = self._repo[None]
591 if r != w.p1().hex() and not ignoreupdate:
591 if r != w.p1().hex() and not ignoreupdate:
592 # different version checked out
592 # different version checked out
593 return True
593 return True
594 return w.dirty(missing=missing) # working directory changed
594 return w.dirty(missing=missing) # working directory changed
595
595
596 def basestate(self):
596 def basestate(self):
597 return self._repo['.'].hex()
597 return self._repo['.'].hex()
598
598
599 def checknested(self, path):
599 def checknested(self, path):
600 return self._repo._checknested(self._repo.wjoin(path))
600 return self._repo._checknested(self._repo.wjoin(path))
601
601
602 @annotatesubrepoerror
602 @annotatesubrepoerror
603 def commit(self, text, user, date):
603 def commit(self, text, user, date):
604 # don't bother committing in the subrepo if it's only been
604 # don't bother committing in the subrepo if it's only been
605 # updated
605 # updated
606 if not self.dirty(True):
606 if not self.dirty(True):
607 return self._repo['.'].hex()
607 return self._repo['.'].hex()
608 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
608 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
609 n = self._repo.commit(text, user, date)
609 n = self._repo.commit(text, user, date)
610 if not n:
610 if not n:
611 return self._repo['.'].hex() # different version checked out
611 return self._repo['.'].hex() # different version checked out
612 return node.hex(n)
612 return node.hex(n)
613
613
614 @annotatesubrepoerror
614 @annotatesubrepoerror
615 def phase(self, state):
615 def phase(self, state):
616 return self._repo[state or '.'].phase()
616 return self._repo[state or '.'].phase()
617
617
618 @annotatesubrepoerror
618 @annotatesubrepoerror
619 def remove(self):
619 def remove(self):
620 # we can't fully delete the repository as it may contain
620 # we can't fully delete the repository as it may contain
621 # local-only history
621 # local-only history
622 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
622 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
623 hg.clean(self._repo, node.nullid, False)
623 hg.clean(self._repo, node.nullid, False)
624
624
625 def _get(self, state):
625 def _get(self, state):
626 source, revision, kind = state
626 source, revision, kind = state
627 parentrepo = self._repo._subparent
627 parentrepo = self._repo._subparent
628
628
629 if revision in self._repo.unfiltered():
629 if revision in self._repo.unfiltered():
630 # Allow shared subrepos tracked at null to setup the sharedpath
630 # Allow shared subrepos tracked at null to setup the sharedpath
631 if len(self._repo) != 0 or not parentrepo.shared():
631 if len(self._repo) != 0 or not parentrepo.shared():
632 return True
632 return True
633 self._repo._subsource = source
633 self._repo._subsource = source
634 srcurl = _abssource(self._repo)
634 srcurl = _abssource(self._repo)
635
635
636 # Defer creating the peer until after the status message is logged, in
636 # Defer creating the peer until after the status message is logged, in
637 # case there are network problems.
637 # case there are network problems.
638 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
638 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
639
639
640 if len(self._repo) == 0:
640 if len(self._repo) == 0:
641 # use self._repo.vfs instead of self.wvfs to remove .hg only
641 # use self._repo.vfs instead of self.wvfs to remove .hg only
642 self._repo.vfs.rmtree()
642 self._repo.vfs.rmtree()
643
643
644 # A remote subrepo could be shared if there is a local copy
644 # A remote subrepo could be shared if there is a local copy
645 # relative to the parent's share source. But clone pooling doesn't
645 # relative to the parent's share source. But clone pooling doesn't
646 # assemble the repos in a tree, so that can't be consistently done.
646 # assemble the repos in a tree, so that can't be consistently done.
647 # A simpler option is for the user to configure clone pooling, and
647 # A simpler option is for the user to configure clone pooling, and
648 # work with that.
648 # work with that.
649 if parentrepo.shared() and hg.islocal(srcurl):
649 if parentrepo.shared() and hg.islocal(srcurl):
650 self.ui.status(_('sharing subrepo %s from %s\n')
650 self.ui.status(_('sharing subrepo %s from %s\n')
651 % (subrelpath(self), srcurl))
651 % (subrelpath(self), srcurl))
652 shared = hg.share(self._repo._subparent.baseui,
652 shared = hg.share(self._repo._subparent.baseui,
653 getpeer(), self._repo.root,
653 getpeer(), self._repo.root,
654 update=False, bookmarks=False)
654 update=False, bookmarks=False)
655 self._repo = shared.local()
655 self._repo = shared.local()
656 else:
656 else:
657 # TODO: find a common place for this and this code in the
657 # TODO: find a common place for this and this code in the
658 # share.py wrap of the clone command.
658 # share.py wrap of the clone command.
659 if parentrepo.shared():
659 if parentrepo.shared():
660 pool = self.ui.config('share', 'pool')
660 pool = self.ui.config('share', 'pool')
661 if pool:
661 if pool:
662 pool = util.expandpath(pool)
662 pool = util.expandpath(pool)
663
663
664 shareopts = {
664 shareopts = {
665 'pool': pool,
665 'pool': pool,
666 'mode': self.ui.config('share', 'poolnaming'),
666 'mode': self.ui.config('share', 'poolnaming'),
667 }
667 }
668 else:
668 else:
669 shareopts = {}
669 shareopts = {}
670
670
671 self.ui.status(_('cloning subrepo %s from %s\n')
671 self.ui.status(_('cloning subrepo %s from %s\n')
672 % (subrelpath(self), util.hidepassword(srcurl)))
672 % (subrelpath(self), util.hidepassword(srcurl)))
673 other, cloned = hg.clone(self._repo._subparent.baseui, {},
673 other, cloned = hg.clone(self._repo._subparent.baseui, {},
674 getpeer(), self._repo.root,
674 getpeer(), self._repo.root,
675 update=False, shareopts=shareopts)
675 update=False, shareopts=shareopts)
676 self._repo = cloned.local()
676 self._repo = cloned.local()
677 self._initrepo(parentrepo, source, create=True)
677 self._initrepo(parentrepo, source, create=True)
678 self._cachestorehash(srcurl)
678 self._cachestorehash(srcurl)
679 else:
679 else:
680 self.ui.status(_('pulling subrepo %s from %s\n')
680 self.ui.status(_('pulling subrepo %s from %s\n')
681 % (subrelpath(self), util.hidepassword(srcurl)))
681 % (subrelpath(self), util.hidepassword(srcurl)))
682 cleansub = self.storeclean(srcurl)
682 cleansub = self.storeclean(srcurl)
683 exchange.pull(self._repo, getpeer())
683 exchange.pull(self._repo, getpeer())
684 if cleansub:
684 if cleansub:
685 # keep the repo clean after pull
685 # keep the repo clean after pull
686 self._cachestorehash(srcurl)
686 self._cachestorehash(srcurl)
687 return False
687 return False
688
688
689 @annotatesubrepoerror
689 @annotatesubrepoerror
690 def get(self, state, overwrite=False):
690 def get(self, state, overwrite=False):
691 inrepo = self._get(state)
691 inrepo = self._get(state)
692 source, revision, kind = state
692 source, revision, kind = state
693 repo = self._repo
693 repo = self._repo
694 repo.ui.debug("getting subrepo %s\n" % self._path)
694 repo.ui.debug("getting subrepo %s\n" % self._path)
695 if inrepo:
695 if inrepo:
696 urepo = repo.unfiltered()
696 urepo = repo.unfiltered()
697 ctx = urepo[revision]
697 ctx = urepo[revision]
698 if ctx.hidden():
698 if ctx.hidden():
699 urepo.ui.warn(
699 urepo.ui.warn(
700 _('revision %s in subrepository "%s" is hidden\n') \
700 _('revision %s in subrepository "%s" is hidden\n') \
701 % (revision[0:12], self._path))
701 % (revision[0:12], self._path))
702 repo = urepo
702 repo = urepo
703 hg.updaterepo(repo, revision, overwrite)
703 hg.updaterepo(repo, revision, overwrite)
704
704
705 @annotatesubrepoerror
705 @annotatesubrepoerror
706 def merge(self, state):
706 def merge(self, state):
707 self._get(state)
707 self._get(state)
708 cur = self._repo['.']
708 cur = self._repo['.']
709 dst = self._repo[state[1]]
709 dst = self._repo[state[1]]
710 anc = dst.ancestor(cur)
710 anc = dst.ancestor(cur)
711
711
712 def mergefunc():
712 def mergefunc():
713 if anc == cur and dst.branch() == cur.branch():
713 if anc == cur and dst.branch() == cur.branch():
714 self.ui.debug('updating subrepository "%s"\n'
714 self.ui.debug('updating subrepository "%s"\n'
715 % subrelpath(self))
715 % subrelpath(self))
716 hg.update(self._repo, state[1])
716 hg.update(self._repo, state[1])
717 elif anc == dst:
717 elif anc == dst:
718 self.ui.debug('skipping subrepository "%s"\n'
718 self.ui.debug('skipping subrepository "%s"\n'
719 % subrelpath(self))
719 % subrelpath(self))
720 else:
720 else:
721 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
721 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
722 hg.merge(self._repo, state[1], remind=False)
722 hg.merge(self._repo, state[1], remind=False)
723
723
724 wctx = self._repo[None]
724 wctx = self._repo[None]
725 if self.dirty():
725 if self.dirty():
726 if anc != dst:
726 if anc != dst:
727 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
727 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
728 mergefunc()
728 mergefunc()
729 else:
729 else:
730 mergefunc()
730 mergefunc()
731 else:
731 else:
732 mergefunc()
732 mergefunc()
733
733
734 @annotatesubrepoerror
734 @annotatesubrepoerror
735 def push(self, opts):
735 def push(self, opts):
736 force = opts.get('force')
736 force = opts.get('force')
737 newbranch = opts.get('new_branch')
737 newbranch = opts.get('new_branch')
738 ssh = opts.get('ssh')
738 ssh = opts.get('ssh')
739
739
740 # push subrepos depth-first for coherent ordering
740 # push subrepos depth-first for coherent ordering
741 c = self._repo['.']
741 c = self._repo['.']
742 subs = c.substate # only repos that are committed
742 subs = c.substate # only repos that are committed
743 for s in sorted(subs):
743 for s in sorted(subs):
744 if c.sub(s).push(opts) == 0:
744 if c.sub(s).push(opts) == 0:
745 return False
745 return False
746
746
747 dsturl = _abssource(self._repo, True)
747 dsturl = _abssource(self._repo, True)
748 if not force:
748 if not force:
749 if self.storeclean(dsturl):
749 if self.storeclean(dsturl):
750 self.ui.status(
750 self.ui.status(
751 _('no changes made to subrepo %s since last push to %s\n')
751 _('no changes made to subrepo %s since last push to %s\n')
752 % (subrelpath(self), util.hidepassword(dsturl)))
752 % (subrelpath(self), util.hidepassword(dsturl)))
753 return None
753 return None
754 self.ui.status(_('pushing subrepo %s to %s\n') %
754 self.ui.status(_('pushing subrepo %s to %s\n') %
755 (subrelpath(self), util.hidepassword(dsturl)))
755 (subrelpath(self), util.hidepassword(dsturl)))
756 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
756 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
757 res = exchange.push(self._repo, other, force, newbranch=newbranch)
757 res = exchange.push(self._repo, other, force, newbranch=newbranch)
758
758
759 # the repo is now clean
759 # the repo is now clean
760 self._cachestorehash(dsturl)
760 self._cachestorehash(dsturl)
761 return res.cgresult
761 return res.cgresult
762
762
763 @annotatesubrepoerror
763 @annotatesubrepoerror
764 def outgoing(self, ui, dest, opts):
764 def outgoing(self, ui, dest, opts):
765 if 'rev' in opts or 'branch' in opts:
765 if 'rev' in opts or 'branch' in opts:
766 opts = copy.copy(opts)
766 opts = copy.copy(opts)
767 opts.pop('rev', None)
767 opts.pop('rev', None)
768 opts.pop('branch', None)
768 opts.pop('branch', None)
769 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
769 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
770
770
771 @annotatesubrepoerror
771 @annotatesubrepoerror
772 def incoming(self, ui, source, opts):
772 def incoming(self, ui, source, opts):
773 if 'rev' in opts or 'branch' in opts:
773 if 'rev' in opts or 'branch' in opts:
774 opts = copy.copy(opts)
774 opts = copy.copy(opts)
775 opts.pop('rev', None)
775 opts.pop('rev', None)
776 opts.pop('branch', None)
776 opts.pop('branch', None)
777 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
777 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
778
778
779 @annotatesubrepoerror
779 @annotatesubrepoerror
780 def files(self):
780 def files(self):
781 rev = self._state[1]
781 rev = self._state[1]
782 ctx = self._repo[rev]
782 ctx = self._repo[rev]
783 return ctx.manifest().keys()
783 return ctx.manifest().keys()
784
784
785 def filedata(self, name, decode):
785 def filedata(self, name, decode):
786 rev = self._state[1]
786 rev = self._state[1]
787 data = self._repo[rev][name].data()
787 data = self._repo[rev][name].data()
788 if decode:
788 if decode:
789 data = self._repo.wwritedata(name, data)
789 data = self._repo.wwritedata(name, data)
790 return data
790 return data
791
791
792 def fileflags(self, name):
792 def fileflags(self, name):
793 rev = self._state[1]
793 rev = self._state[1]
794 ctx = self._repo[rev]
794 ctx = self._repo[rev]
795 return ctx.flags(name)
795 return ctx.flags(name)
796
796
797 @annotatesubrepoerror
797 @annotatesubrepoerror
798 def printfiles(self, ui, m, fm, fmt, subrepos):
798 def printfiles(self, ui, m, fm, fmt, subrepos):
799 # If the parent context is a workingctx, use the workingctx here for
799 # If the parent context is a workingctx, use the workingctx here for
800 # consistency.
800 # consistency.
801 if self._ctx.rev() is None:
801 if self._ctx.rev() is None:
802 ctx = self._repo[None]
802 ctx = self._repo[None]
803 else:
803 else:
804 rev = self._state[1]
804 rev = self._state[1]
805 ctx = self._repo[rev]
805 ctx = self._repo[rev]
806 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
806 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
807
807
808 @annotatesubrepoerror
808 @annotatesubrepoerror
809 def matchfileset(self, expr, badfn=None):
809 def matchfileset(self, expr, badfn=None):
810 if self._ctx.rev() is None:
810 if self._ctx.rev() is None:
811 ctx = self._repo[None]
811 ctx = self._repo[None]
812 else:
812 else:
813 rev = self._state[1]
813 rev = self._state[1]
814 ctx = self._repo[rev]
814 ctx = self._repo[rev]
815
815
816 matchers = [ctx.matchfileset(expr, badfn=badfn)]
816 matchers = [ctx.matchfileset(expr, badfn=badfn)]
817
817
818 for subpath in ctx.substate:
818 for subpath in ctx.substate:
819 sub = ctx.sub(subpath)
819 sub = ctx.sub(subpath)
820
820
821 try:
821 try:
822 sm = sub.matchfileset(expr, badfn=badfn)
822 sm = sub.matchfileset(expr, badfn=badfn)
823 pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn)
823 pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn)
824 matchers.append(pm)
824 matchers.append(pm)
825 except error.LookupError:
825 except error.LookupError:
826 self.ui.status(_("skipping missing subrepository: %s\n")
826 self.ui.status(_("skipping missing subrepository: %s\n")
827 % self.wvfs.reljoin(reporelpath(self), subpath))
827 % self.wvfs.reljoin(reporelpath(self), subpath))
828 if len(matchers) == 1:
828 if len(matchers) == 1:
829 return matchers[0]
829 return matchers[0]
830 return matchmod.unionmatcher(matchers)
830 return matchmod.unionmatcher(matchers)
831
831
832 def walk(self, match):
832 def walk(self, match):
833 ctx = self._repo[None]
833 ctx = self._repo[None]
834 return ctx.walk(match)
834 return ctx.walk(match)
835
835
836 @annotatesubrepoerror
836 @annotatesubrepoerror
837 def forget(self, match, prefix, uipathfn, dryrun, interactive):
837 def forget(self, match, prefix, uipathfn, dryrun, interactive):
838 return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn,
838 return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn,
839 True, dryrun=dryrun, interactive=interactive)
839 True, dryrun=dryrun, interactive=interactive)
840
840
841 @annotatesubrepoerror
841 @annotatesubrepoerror
842 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
842 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
843 dryrun, warnings):
843 dryrun, warnings):
844 return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn,
844 return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn,
845 after, force, subrepos, dryrun)
845 after, force, subrepos, dryrun)
846
846
847 @annotatesubrepoerror
847 @annotatesubrepoerror
848 def revert(self, substate, *pats, **opts):
848 def revert(self, substate, *pats, **opts):
849 # reverting a subrepo is a 2 step process:
849 # reverting a subrepo is a 2 step process:
850 # 1. if the no_backup is not set, revert all modified
850 # 1. if the no_backup is not set, revert all modified
851 # files inside the subrepo
851 # files inside the subrepo
852 # 2. update the subrepo to the revision specified in
852 # 2. update the subrepo to the revision specified in
853 # the corresponding substate dictionary
853 # the corresponding substate dictionary
854 self.ui.status(_('reverting subrepo %s\n') % substate[0])
854 self.ui.status(_('reverting subrepo %s\n') % substate[0])
855 if not opts.get(r'no_backup'):
855 if not opts.get(r'no_backup'):
856 # Revert all files on the subrepo, creating backups
856 # Revert all files on the subrepo, creating backups
857 # Note that this will not recursively revert subrepos
857 # Note that this will not recursively revert subrepos
858 # We could do it if there was a set:subrepos() predicate
858 # We could do it if there was a set:subrepos() predicate
859 opts = opts.copy()
859 opts = opts.copy()
860 opts[r'date'] = None
860 opts[r'date'] = None
861 opts[r'rev'] = substate[1]
861 opts[r'rev'] = substate[1]
862
862
863 self.filerevert(*pats, **opts)
863 self.filerevert(*pats, **opts)
864
864
865 # Update the repo to the revision specified in the given substate
865 # Update the repo to the revision specified in the given substate
866 if not opts.get(r'dry_run'):
866 if not opts.get(r'dry_run'):
867 self.get(substate, overwrite=True)
867 self.get(substate, overwrite=True)
868
868
869 def filerevert(self, *pats, **opts):
869 def filerevert(self, *pats, **opts):
870 ctx = self._repo[opts[r'rev']]
870 ctx = self._repo[opts[r'rev']]
871 parents = self._repo.dirstate.parents()
871 parents = self._repo.dirstate.parents()
872 if opts.get(r'all'):
872 if opts.get(r'all'):
873 pats = ['set:modified()']
873 pats = ['set:modified()']
874 else:
874 else:
875 pats = []
875 pats = []
876 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
876 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
877
877
878 def shortid(self, revid):
878 def shortid(self, revid):
879 return revid[:12]
879 return revid[:12]
880
880
881 @annotatesubrepoerror
881 @annotatesubrepoerror
882 def unshare(self):
882 def unshare(self):
883 # subrepo inherently violates our import layering rules
883 # subrepo inherently violates our import layering rules
884 # because it wants to make repo objects from deep inside the stack
884 # because it wants to make repo objects from deep inside the stack
885 # so we manually delay the circular imports to not break
885 # so we manually delay the circular imports to not break
886 # scripts that don't use our demand-loading
886 # scripts that don't use our demand-loading
887 global hg
887 global hg
888 from . import hg as h
888 from . import hg as h
889 hg = h
889 hg = h
890
890
891 # Nothing prevents a user from sharing in a repo, and then making that a
891 # Nothing prevents a user from sharing in a repo, and then making that a
892 # subrepo. Alternately, the previous unshare attempt may have failed
892 # subrepo. Alternately, the previous unshare attempt may have failed
893 # part way through. So recurse whether or not this layer is shared.
893 # part way through. So recurse whether or not this layer is shared.
894 if self._repo.shared():
894 if self._repo.shared():
895 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
895 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
896
896
897 hg.unshare(self.ui, self._repo)
897 hg.unshare(self.ui, self._repo)
898
898
899 def verify(self):
899 def verify(self):
900 try:
900 try:
901 rev = self._state[1]
901 rev = self._state[1]
902 ctx = self._repo.unfiltered()[rev]
902 ctx = self._repo.unfiltered()[rev]
903 if ctx.hidden():
903 if ctx.hidden():
904 # Since hidden revisions aren't pushed/pulled, it seems worth an
904 # Since hidden revisions aren't pushed/pulled, it seems worth an
905 # explicit warning.
905 # explicit warning.
906 ui = self._repo.ui
906 ui = self._repo.ui
907 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
907 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
908 (self._relpath, node.short(self._ctx.node())))
908 (self._relpath, node.short(self._ctx.node())))
909 return 0
909 return 0
910 except error.RepoLookupError:
910 except error.RepoLookupError:
911 # A missing subrepo revision may be a case of needing to pull it, so
911 # A missing subrepo revision may be a case of needing to pull it, so
912 # don't treat this as an error.
912 # don't treat this as an error.
913 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
913 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
914 (self._relpath, node.short(self._ctx.node())))
914 (self._relpath, node.short(self._ctx.node())))
915 return 0
915 return 0
916
916
917 @propertycache
917 @propertycache
918 def wvfs(self):
918 def wvfs(self):
919 """return own wvfs for efficiency and consistency
919 """return own wvfs for efficiency and consistency
920 """
920 """
921 return self._repo.wvfs
921 return self._repo.wvfs
922
922
923 @propertycache
923 @propertycache
924 def _relpath(self):
924 def _relpath(self):
925 """return path to this subrepository as seen from outermost repository
925 """return path to this subrepository as seen from outermost repository
926 """
926 """
927 # Keep consistent dir separators by avoiding vfs.join(self._path)
927 # Keep consistent dir separators by avoiding vfs.join(self._path)
928 return reporelpath(self._repo)
928 return reporelpath(self._repo)
929
929
930 class svnsubrepo(abstractsubrepo):
930 class svnsubrepo(abstractsubrepo):
931 def __init__(self, ctx, path, state, allowcreate):
931 def __init__(self, ctx, path, state, allowcreate):
932 super(svnsubrepo, self).__init__(ctx, path)
932 super(svnsubrepo, self).__init__(ctx, path)
933 self._state = state
933 self._state = state
934 self._exe = procutil.findexe('svn')
934 self._exe = procutil.findexe('svn')
935 if not self._exe:
935 if not self._exe:
936 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
936 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
937 % self._path)
937 % self._path)
938
938
939 def _svncommand(self, commands, filename='', failok=False):
939 def _svncommand(self, commands, filename='', failok=False):
940 cmd = [self._exe]
940 cmd = [self._exe]
941 extrakw = {}
941 extrakw = {}
942 if not self.ui.interactive():
942 if not self.ui.interactive():
943 # Making stdin be a pipe should prevent svn from behaving
943 # Making stdin be a pipe should prevent svn from behaving
944 # interactively even if we can't pass --non-interactive.
944 # interactively even if we can't pass --non-interactive.
945 extrakw[r'stdin'] = subprocess.PIPE
945 extrakw[r'stdin'] = subprocess.PIPE
946 # Starting in svn 1.5 --non-interactive is a global flag
946 # Starting in svn 1.5 --non-interactive is a global flag
947 # instead of being per-command, but we need to support 1.4 so
947 # instead of being per-command, but we need to support 1.4 so
948 # we have to be intelligent about what commands take
948 # we have to be intelligent about what commands take
949 # --non-interactive.
949 # --non-interactive.
950 if commands[0] in ('update', 'checkout', 'commit'):
950 if commands[0] in ('update', 'checkout', 'commit'):
951 cmd.append('--non-interactive')
951 cmd.append('--non-interactive')
952 cmd.extend(commands)
952 cmd.extend(commands)
953 if filename is not None:
953 if filename is not None:
954 path = self.wvfs.reljoin(self._ctx.repo().origroot,
954 path = self.wvfs.reljoin(self._ctx.repo().origroot,
955 self._path, filename)
955 self._path, filename)
956 cmd.append(path)
956 cmd.append(path)
957 env = dict(encoding.environ)
957 env = dict(encoding.environ)
958 # Avoid localized output, preserve current locale for everything else.
958 # Avoid localized output, preserve current locale for everything else.
959 lc_all = env.get('LC_ALL')
959 lc_all = env.get('LC_ALL')
960 if lc_all:
960 if lc_all:
961 env['LANG'] = lc_all
961 env['LANG'] = lc_all
962 del env['LC_ALL']
962 del env['LC_ALL']
963 env['LC_MESSAGES'] = 'C'
963 env['LC_MESSAGES'] = 'C'
964 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
964 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
965 bufsize=-1, close_fds=procutil.closefds,
965 bufsize=-1, close_fds=procutil.closefds,
966 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
966 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
967 env=procutil.tonativeenv(env), **extrakw)
967 env=procutil.tonativeenv(env), **extrakw)
968 stdout, stderr = map(util.fromnativeeol, p.communicate())
968 stdout, stderr = map(util.fromnativeeol, p.communicate())
969 stderr = stderr.strip()
969 stderr = stderr.strip()
970 if not failok:
970 if not failok:
971 if p.returncode:
971 if p.returncode:
972 raise error.Abort(stderr or 'exited with code %d'
972 raise error.Abort(stderr or 'exited with code %d'
973 % p.returncode)
973 % p.returncode)
974 if stderr:
974 if stderr:
975 self.ui.warn(stderr + '\n')
975 self.ui.warn(stderr + '\n')
976 return stdout, stderr
976 return stdout, stderr
977
977
978 @propertycache
978 @propertycache
979 def _svnversion(self):
979 def _svnversion(self):
980 output, err = self._svncommand(['--version', '--quiet'], filename=None)
980 output, err = self._svncommand(['--version', '--quiet'], filename=None)
981 m = re.search(br'^(\d+)\.(\d+)', output)
981 m = re.search(br'^(\d+)\.(\d+)', output)
982 if not m:
982 if not m:
983 raise error.Abort(_('cannot retrieve svn tool version'))
983 raise error.Abort(_('cannot retrieve svn tool version'))
984 return (int(m.group(1)), int(m.group(2)))
984 return (int(m.group(1)), int(m.group(2)))
985
985
986 def _svnmissing(self):
986 def _svnmissing(self):
987 return not self.wvfs.exists('.svn')
987 return not self.wvfs.exists('.svn')
988
988
989 def _wcrevs(self):
989 def _wcrevs(self):
990 # Get the working directory revision as well as the last
990 # Get the working directory revision as well as the last
991 # commit revision so we can compare the subrepo state with
991 # commit revision so we can compare the subrepo state with
992 # both. We used to store the working directory one.
992 # both. We used to store the working directory one.
993 output, err = self._svncommand(['info', '--xml'])
993 output, err = self._svncommand(['info', '--xml'])
994 doc = xml.dom.minidom.parseString(output)
994 doc = xml.dom.minidom.parseString(output)
995 entries = doc.getElementsByTagName(r'entry')
995 entries = doc.getElementsByTagName(r'entry')
996 lastrev, rev = '0', '0'
996 lastrev, rev = '0', '0'
997 if entries:
997 if entries:
998 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
998 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
999 commits = entries[0].getElementsByTagName(r'commit')
999 commits = entries[0].getElementsByTagName(r'commit')
1000 if commits:
1000 if commits:
1001 lastrev = pycompat.bytestr(
1001 lastrev = pycompat.bytestr(
1002 commits[0].getAttribute(r'revision')) or '0'
1002 commits[0].getAttribute(r'revision')) or '0'
1003 return (lastrev, rev)
1003 return (lastrev, rev)
1004
1004
1005 def _wcrev(self):
1005 def _wcrev(self):
1006 return self._wcrevs()[0]
1006 return self._wcrevs()[0]
1007
1007
1008 def _wcchanged(self):
1008 def _wcchanged(self):
1009 """Return (changes, extchanges, missing) where changes is True
1009 """Return (changes, extchanges, missing) where changes is True
1010 if the working directory was changed, extchanges is
1010 if the working directory was changed, extchanges is
1011 True if any of these changes concern an external entry and missing
1011 True if any of these changes concern an external entry and missing
1012 is True if any change is a missing entry.
1012 is True if any change is a missing entry.
1013 """
1013 """
1014 output, err = self._svncommand(['status', '--xml'])
1014 output, err = self._svncommand(['status', '--xml'])
1015 externals, changes, missing = [], [], []
1015 externals, changes, missing = [], [], []
1016 doc = xml.dom.minidom.parseString(output)
1016 doc = xml.dom.minidom.parseString(output)
1017 for e in doc.getElementsByTagName(r'entry'):
1017 for e in doc.getElementsByTagName(r'entry'):
1018 s = e.getElementsByTagName(r'wc-status')
1018 s = e.getElementsByTagName(r'wc-status')
1019 if not s:
1019 if not s:
1020 continue
1020 continue
1021 item = s[0].getAttribute(r'item')
1021 item = s[0].getAttribute(r'item')
1022 props = s[0].getAttribute(r'props')
1022 props = s[0].getAttribute(r'props')
1023 path = e.getAttribute(r'path').encode('utf8')
1023 path = e.getAttribute(r'path').encode('utf8')
1024 if item == r'external':
1024 if item == r'external':
1025 externals.append(path)
1025 externals.append(path)
1026 elif item == r'missing':
1026 elif item == r'missing':
1027 missing.append(path)
1027 missing.append(path)
1028 if (item not in (r'', r'normal', r'unversioned', r'external')
1028 if (item not in (r'', r'normal', r'unversioned', r'external')
1029 or props not in (r'', r'none', r'normal')):
1029 or props not in (r'', r'none', r'normal')):
1030 changes.append(path)
1030 changes.append(path)
1031 for path in changes:
1031 for path in changes:
1032 for ext in externals:
1032 for ext in externals:
1033 if path == ext or path.startswith(ext + pycompat.ossep):
1033 if path == ext or path.startswith(ext + pycompat.ossep):
1034 return True, True, bool(missing)
1034 return True, True, bool(missing)
1035 return bool(changes), False, bool(missing)
1035 return bool(changes), False, bool(missing)
1036
1036
1037 @annotatesubrepoerror
1037 @annotatesubrepoerror
1038 def dirty(self, ignoreupdate=False, missing=False):
1038 def dirty(self, ignoreupdate=False, missing=False):
1039 if self._svnmissing():
1039 if self._svnmissing():
1040 return self._state[1] != ''
1040 return self._state[1] != ''
1041 wcchanged = self._wcchanged()
1041 wcchanged = self._wcchanged()
1042 changed = wcchanged[0] or (missing and wcchanged[2])
1042 changed = wcchanged[0] or (missing and wcchanged[2])
1043 if not changed:
1043 if not changed:
1044 if self._state[1] in self._wcrevs() or ignoreupdate:
1044 if self._state[1] in self._wcrevs() or ignoreupdate:
1045 return False
1045 return False
1046 return True
1046 return True
1047
1047
1048 def basestate(self):
1048 def basestate(self):
1049 lastrev, rev = self._wcrevs()
1049 lastrev, rev = self._wcrevs()
1050 if lastrev != rev:
1050 if lastrev != rev:
1051 # Last committed rev is not the same than rev. We would
1051 # Last committed rev is not the same than rev. We would
1052 # like to take lastrev but we do not know if the subrepo
1052 # like to take lastrev but we do not know if the subrepo
1053 # URL exists at lastrev. Test it and fallback to rev it
1053 # URL exists at lastrev. Test it and fallback to rev it
1054 # is not there.
1054 # is not there.
1055 try:
1055 try:
1056 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1056 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1057 return lastrev
1057 return lastrev
1058 except error.Abort:
1058 except error.Abort:
1059 pass
1059 pass
1060 return rev
1060 return rev
1061
1061
1062 @annotatesubrepoerror
1062 @annotatesubrepoerror
1063 def commit(self, text, user, date):
1063 def commit(self, text, user, date):
1064 # user and date are out of our hands since svn is centralized
1064 # user and date are out of our hands since svn is centralized
1065 changed, extchanged, missing = self._wcchanged()
1065 changed, extchanged, missing = self._wcchanged()
1066 if not changed:
1066 if not changed:
1067 return self.basestate()
1067 return self.basestate()
1068 if extchanged:
1068 if extchanged:
1069 # Do not try to commit externals
1069 # Do not try to commit externals
1070 raise error.Abort(_('cannot commit svn externals'))
1070 raise error.Abort(_('cannot commit svn externals'))
1071 if missing:
1071 if missing:
1072 # svn can commit with missing entries but aborting like hg
1072 # svn can commit with missing entries but aborting like hg
1073 # seems a better approach.
1073 # seems a better approach.
1074 raise error.Abort(_('cannot commit missing svn entries'))
1074 raise error.Abort(_('cannot commit missing svn entries'))
1075 commitinfo, err = self._svncommand(['commit', '-m', text])
1075 commitinfo, err = self._svncommand(['commit', '-m', text])
1076 self.ui.status(commitinfo)
1076 self.ui.status(commitinfo)
1077 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1077 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1078 if not newrev:
1078 if not newrev:
1079 if not commitinfo.strip():
1079 if not commitinfo.strip():
1080 # Sometimes, our definition of "changed" differs from
1080 # Sometimes, our definition of "changed" differs from
1081 # svn one. For instance, svn ignores missing files
1081 # svn one. For instance, svn ignores missing files
1082 # when committing. If there are only missing files, no
1082 # when committing. If there are only missing files, no
1083 # commit is made, no output and no error code.
1083 # commit is made, no output and no error code.
1084 raise error.Abort(_('failed to commit svn changes'))
1084 raise error.Abort(_('failed to commit svn changes'))
1085 raise error.Abort(commitinfo.splitlines()[-1])
1085 raise error.Abort(commitinfo.splitlines()[-1])
1086 newrev = newrev.groups()[0]
1086 newrev = newrev.groups()[0]
1087 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1087 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1088 return newrev
1088 return newrev
1089
1089
1090 @annotatesubrepoerror
1090 @annotatesubrepoerror
1091 def remove(self):
1091 def remove(self):
1092 if self.dirty():
1092 if self.dirty():
1093 self.ui.warn(_('not removing repo %s because '
1093 self.ui.warn(_('not removing repo %s because '
1094 'it has changes.\n') % self._path)
1094 'it has changes.\n') % self._path)
1095 return
1095 return
1096 self.ui.note(_('removing subrepo %s\n') % self._path)
1096 self.ui.note(_('removing subrepo %s\n') % self._path)
1097
1097
1098 self.wvfs.rmtree(forcibly=True)
1098 self.wvfs.rmtree(forcibly=True)
1099 try:
1099 try:
1100 pwvfs = self._ctx.repo().wvfs
1100 pwvfs = self._ctx.repo().wvfs
1101 pwvfs.removedirs(pwvfs.dirname(self._path))
1101 pwvfs.removedirs(pwvfs.dirname(self._path))
1102 except OSError:
1102 except OSError:
1103 pass
1103 pass
1104
1104
1105 @annotatesubrepoerror
1105 @annotatesubrepoerror
1106 def get(self, state, overwrite=False):
1106 def get(self, state, overwrite=False):
1107 if overwrite:
1107 if overwrite:
1108 self._svncommand(['revert', '--recursive'])
1108 self._svncommand(['revert', '--recursive'])
1109 args = ['checkout']
1109 args = ['checkout']
1110 if self._svnversion >= (1, 5):
1110 if self._svnversion >= (1, 5):
1111 args.append('--force')
1111 args.append('--force')
1112 # The revision must be specified at the end of the URL to properly
1112 # The revision must be specified at the end of the URL to properly
1113 # update to a directory which has since been deleted and recreated.
1113 # update to a directory which has since been deleted and recreated.
1114 args.append('%s@%s' % (state[0], state[1]))
1114 args.append('%s@%s' % (state[0], state[1]))
1115
1115
1116 # SEC: check that the ssh url is safe
1116 # SEC: check that the ssh url is safe
1117 util.checksafessh(state[0])
1117 util.checksafessh(state[0])
1118
1118
1119 status, err = self._svncommand(args, failok=True)
1119 status, err = self._svncommand(args, failok=True)
1120 _sanitize(self.ui, self.wvfs, '.svn')
1120 _sanitize(self.ui, self.wvfs, '.svn')
1121 if not re.search('Checked out revision [0-9]+.', status):
1121 if not re.search('Checked out revision [0-9]+.', status):
1122 if ('is already a working copy for a different URL' in err
1122 if ('is already a working copy for a different URL' in err
1123 and (self._wcchanged()[:2] == (False, False))):
1123 and (self._wcchanged()[:2] == (False, False))):
1124 # obstructed but clean working copy, so just blow it away.
1124 # obstructed but clean working copy, so just blow it away.
1125 self.remove()
1125 self.remove()
1126 self.get(state, overwrite=False)
1126 self.get(state, overwrite=False)
1127 return
1127 return
1128 raise error.Abort((status or err).splitlines()[-1])
1128 raise error.Abort((status or err).splitlines()[-1])
1129 self.ui.status(status)
1129 self.ui.status(status)
1130
1130
1131 @annotatesubrepoerror
1131 @annotatesubrepoerror
1132 def merge(self, state):
1132 def merge(self, state):
1133 old = self._state[1]
1133 old = self._state[1]
1134 new = state[1]
1134 new = state[1]
1135 wcrev = self._wcrev()
1135 wcrev = self._wcrev()
1136 if new != wcrev:
1136 if new != wcrev:
1137 dirty = old == wcrev or self._wcchanged()[0]
1137 dirty = old == wcrev or self._wcchanged()[0]
1138 if _updateprompt(self.ui, self, dirty, wcrev, new):
1138 if _updateprompt(self.ui, self, dirty, wcrev, new):
1139 self.get(state, False)
1139 self.get(state, False)
1140
1140
1141 def push(self, opts):
1141 def push(self, opts):
1142 # push is a no-op for SVN
1142 # push is a no-op for SVN
1143 return True
1143 return True
1144
1144
1145 @annotatesubrepoerror
1145 @annotatesubrepoerror
1146 def files(self):
1146 def files(self):
1147 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1147 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1148 doc = xml.dom.minidom.parseString(output)
1148 doc = xml.dom.minidom.parseString(output)
1149 paths = []
1149 paths = []
1150 for e in doc.getElementsByTagName(r'entry'):
1150 for e in doc.getElementsByTagName(r'entry'):
1151 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1151 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1152 if kind != 'file':
1152 if kind != 'file':
1153 continue
1153 continue
1154 name = r''.join(c.data for c
1154 name = r''.join(c.data for c
1155 in e.getElementsByTagName(r'name')[0].childNodes
1155 in e.getElementsByTagName(r'name')[0].childNodes
1156 if c.nodeType == c.TEXT_NODE)
1156 if c.nodeType == c.TEXT_NODE)
1157 paths.append(name.encode('utf8'))
1157 paths.append(name.encode('utf8'))
1158 return paths
1158 return paths
1159
1159
1160 def filedata(self, name, decode):
1160 def filedata(self, name, decode):
1161 return self._svncommand(['cat'], name)[0]
1161 return self._svncommand(['cat'], name)[0]
1162
1162
1163
1163
1164 class gitsubrepo(abstractsubrepo):
1164 class gitsubrepo(abstractsubrepo):
1165 def __init__(self, ctx, path, state, allowcreate):
1165 def __init__(self, ctx, path, state, allowcreate):
1166 super(gitsubrepo, self).__init__(ctx, path)
1166 super(gitsubrepo, self).__init__(ctx, path)
1167 self._state = state
1167 self._state = state
1168 self._abspath = ctx.repo().wjoin(path)
1168 self._abspath = ctx.repo().wjoin(path)
1169 self._subparent = ctx.repo()
1169 self._subparent = ctx.repo()
1170 self._ensuregit()
1170 self._ensuregit()
1171
1171
1172 def _ensuregit(self):
1172 def _ensuregit(self):
1173 try:
1173 try:
1174 self._gitexecutable = 'git'
1174 self._gitexecutable = 'git'
1175 out, err = self._gitnodir(['--version'])
1175 out, err = self._gitnodir(['--version'])
1176 except OSError as e:
1176 except OSError as e:
1177 genericerror = _("error executing git for subrepo '%s': %s")
1177 genericerror = _("error executing git for subrepo '%s': %s")
1178 notfoundhint = _("check git is installed and in your PATH")
1178 notfoundhint = _("check git is installed and in your PATH")
1179 if e.errno != errno.ENOENT:
1179 if e.errno != errno.ENOENT:
1180 raise error.Abort(genericerror % (
1180 raise error.Abort(genericerror % (
1181 self._path, encoding.strtolocal(e.strerror)))
1181 self._path, encoding.strtolocal(e.strerror)))
1182 elif pycompat.iswindows:
1182 elif pycompat.iswindows:
1183 try:
1183 try:
1184 self._gitexecutable = 'git.cmd'
1184 self._gitexecutable = 'git.cmd'
1185 out, err = self._gitnodir(['--version'])
1185 out, err = self._gitnodir(['--version'])
1186 except OSError as e2:
1186 except OSError as e2:
1187 if e2.errno == errno.ENOENT:
1187 if e2.errno == errno.ENOENT:
1188 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1188 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1189 " for subrepo '%s'") % self._path,
1189 " for subrepo '%s'") % self._path,
1190 hint=notfoundhint)
1190 hint=notfoundhint)
1191 else:
1191 else:
1192 raise error.Abort(genericerror % (self._path,
1192 raise error.Abort(genericerror % (self._path,
1193 encoding.strtolocal(e2.strerror)))
1193 encoding.strtolocal(e2.strerror)))
1194 else:
1194 else:
1195 raise error.Abort(_("couldn't find git for subrepo '%s'")
1195 raise error.Abort(_("couldn't find git for subrepo '%s'")
1196 % self._path, hint=notfoundhint)
1196 % self._path, hint=notfoundhint)
1197 versionstatus = self._checkversion(out)
1197 versionstatus = self._checkversion(out)
1198 if versionstatus == 'unknown':
1198 if versionstatus == 'unknown':
1199 self.ui.warn(_('cannot retrieve git version\n'))
1199 self.ui.warn(_('cannot retrieve git version\n'))
1200 elif versionstatus == 'abort':
1200 elif versionstatus == 'abort':
1201 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1201 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1202 elif versionstatus == 'warning':
1202 elif versionstatus == 'warning':
1203 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1203 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1204
1204
1205 @staticmethod
1205 @staticmethod
1206 def _gitversion(out):
1206 def _gitversion(out):
1207 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1207 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1208 if m:
1208 if m:
1209 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1209 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1210
1210
1211 m = re.search(br'^git version (\d+)\.(\d+)', out)
1211 m = re.search(br'^git version (\d+)\.(\d+)', out)
1212 if m:
1212 if m:
1213 return (int(m.group(1)), int(m.group(2)), 0)
1213 return (int(m.group(1)), int(m.group(2)), 0)
1214
1214
1215 return -1
1215 return -1
1216
1216
1217 @staticmethod
1217 @staticmethod
1218 def _checkversion(out):
1218 def _checkversion(out):
1219 '''ensure git version is new enough
1219 '''ensure git version is new enough
1220
1220
1221 >>> _checkversion = gitsubrepo._checkversion
1221 >>> _checkversion = gitsubrepo._checkversion
1222 >>> _checkversion(b'git version 1.6.0')
1222 >>> _checkversion(b'git version 1.6.0')
1223 'ok'
1223 'ok'
1224 >>> _checkversion(b'git version 1.8.5')
1224 >>> _checkversion(b'git version 1.8.5')
1225 'ok'
1225 'ok'
1226 >>> _checkversion(b'git version 1.4.0')
1226 >>> _checkversion(b'git version 1.4.0')
1227 'abort'
1227 'abort'
1228 >>> _checkversion(b'git version 1.5.0')
1228 >>> _checkversion(b'git version 1.5.0')
1229 'warning'
1229 'warning'
1230 >>> _checkversion(b'git version 1.9-rc0')
1230 >>> _checkversion(b'git version 1.9-rc0')
1231 'ok'
1231 'ok'
1232 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1232 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1233 'ok'
1233 'ok'
1234 >>> _checkversion(b'git version 1.9.0.GIT')
1234 >>> _checkversion(b'git version 1.9.0.GIT')
1235 'ok'
1235 'ok'
1236 >>> _checkversion(b'git version 12345')
1236 >>> _checkversion(b'git version 12345')
1237 'unknown'
1237 'unknown'
1238 >>> _checkversion(b'no')
1238 >>> _checkversion(b'no')
1239 'unknown'
1239 'unknown'
1240 '''
1240 '''
1241 version = gitsubrepo._gitversion(out)
1241 version = gitsubrepo._gitversion(out)
1242 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1242 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1243 # despite the docstring comment. For now, error on 1.4.0, warn on
1243 # despite the docstring comment. For now, error on 1.4.0, warn on
1244 # 1.5.0 but attempt to continue.
1244 # 1.5.0 but attempt to continue.
1245 if version == -1:
1245 if version == -1:
1246 return 'unknown'
1246 return 'unknown'
1247 if version < (1, 5, 0):
1247 if version < (1, 5, 0):
1248 return 'abort'
1248 return 'abort'
1249 elif version < (1, 6, 0):
1249 elif version < (1, 6, 0):
1250 return 'warning'
1250 return 'warning'
1251 return 'ok'
1251 return 'ok'
1252
1252
1253 def _gitcommand(self, commands, env=None, stream=False):
1253 def _gitcommand(self, commands, env=None, stream=False):
1254 return self._gitdir(commands, env=env, stream=stream)[0]
1254 return self._gitdir(commands, env=env, stream=stream)[0]
1255
1255
1256 def _gitdir(self, commands, env=None, stream=False):
1256 def _gitdir(self, commands, env=None, stream=False):
1257 return self._gitnodir(commands, env=env, stream=stream,
1257 return self._gitnodir(commands, env=env, stream=stream,
1258 cwd=self._abspath)
1258 cwd=self._abspath)
1259
1259
1260 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1260 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1261 """Calls the git command
1261 """Calls the git command
1262
1262
1263 The methods tries to call the git command. versions prior to 1.6.0
1263 The methods tries to call the git command. versions prior to 1.6.0
1264 are not supported and very probably fail.
1264 are not supported and very probably fail.
1265 """
1265 """
1266 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1266 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1267 if env is None:
1267 if env is None:
1268 env = encoding.environ.copy()
1268 env = encoding.environ.copy()
1269 # disable localization for Git output (issue5176)
1269 # disable localization for Git output (issue5176)
1270 env['LC_ALL'] = 'C'
1270 env['LC_ALL'] = 'C'
1271 # fix for Git CVE-2015-7545
1271 # fix for Git CVE-2015-7545
1272 if 'GIT_ALLOW_PROTOCOL' not in env:
1272 if 'GIT_ALLOW_PROTOCOL' not in env:
1273 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1273 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1274 # unless ui.quiet is set, print git's stderr,
1274 # unless ui.quiet is set, print git's stderr,
1275 # which is mostly progress and useful info
1275 # which is mostly progress and useful info
1276 errpipe = None
1276 errpipe = None
1277 if self.ui.quiet:
1277 if self.ui.quiet:
1278 errpipe = open(os.devnull, 'w')
1278 errpipe = open(os.devnull, 'w')
1279 if self.ui._colormode and len(commands) and commands[0] == "diff":
1279 if self.ui._colormode and len(commands) and commands[0] == "diff":
1280 # insert the argument in the front,
1280 # insert the argument in the front,
1281 # the end of git diff arguments is used for paths
1281 # the end of git diff arguments is used for paths
1282 commands.insert(1, '--color')
1282 commands.insert(1, '--color')
1283 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1283 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1284 [self._gitexecutable] + commands),
1284 [self._gitexecutable] + commands),
1285 bufsize=-1,
1285 bufsize=-1,
1286 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1286 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1287 env=procutil.tonativeenv(env),
1287 env=procutil.tonativeenv(env),
1288 close_fds=procutil.closefds,
1288 close_fds=procutil.closefds,
1289 stdout=subprocess.PIPE, stderr=errpipe)
1289 stdout=subprocess.PIPE, stderr=errpipe)
1290 if stream:
1290 if stream:
1291 return p.stdout, None
1291 return p.stdout, None
1292
1292
1293 retdata = p.stdout.read().strip()
1293 retdata = p.stdout.read().strip()
1294 # wait for the child to exit to avoid race condition.
1294 # wait for the child to exit to avoid race condition.
1295 p.wait()
1295 p.wait()
1296
1296
1297 if p.returncode != 0 and p.returncode != 1:
1297 if p.returncode != 0 and p.returncode != 1:
1298 # there are certain error codes that are ok
1298 # there are certain error codes that are ok
1299 command = commands[0]
1299 command = commands[0]
1300 if command in ('cat-file', 'symbolic-ref'):
1300 if command in ('cat-file', 'symbolic-ref'):
1301 return retdata, p.returncode
1301 return retdata, p.returncode
1302 # for all others, abort
1302 # for all others, abort
1303 raise error.Abort(_('git %s error %d in %s') %
1303 raise error.Abort(_('git %s error %d in %s') %
1304 (command, p.returncode, self._relpath))
1304 (command, p.returncode, self._relpath))
1305
1305
1306 return retdata, p.returncode
1306 return retdata, p.returncode
1307
1307
1308 def _gitmissing(self):
1308 def _gitmissing(self):
1309 return not self.wvfs.exists('.git')
1309 return not self.wvfs.exists('.git')
1310
1310
1311 def _gitstate(self):
1311 def _gitstate(self):
1312 return self._gitcommand(['rev-parse', 'HEAD'])
1312 return self._gitcommand(['rev-parse', 'HEAD'])
1313
1313
1314 def _gitcurrentbranch(self):
1314 def _gitcurrentbranch(self):
1315 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1315 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1316 if err:
1316 if err:
1317 current = None
1317 current = None
1318 return current
1318 return current
1319
1319
1320 def _gitremote(self, remote):
1320 def _gitremote(self, remote):
1321 out = self._gitcommand(['remote', 'show', '-n', remote])
1321 out = self._gitcommand(['remote', 'show', '-n', remote])
1322 line = out.split('\n')[1]
1322 line = out.split('\n')[1]
1323 i = line.index('URL: ') + len('URL: ')
1323 i = line.index('URL: ') + len('URL: ')
1324 return line[i:]
1324 return line[i:]
1325
1325
1326 def _githavelocally(self, revision):
1326 def _githavelocally(self, revision):
1327 out, code = self._gitdir(['cat-file', '-e', revision])
1327 out, code = self._gitdir(['cat-file', '-e', revision])
1328 return code == 0
1328 return code == 0
1329
1329
1330 def _gitisancestor(self, r1, r2):
1330 def _gitisancestor(self, r1, r2):
1331 base = self._gitcommand(['merge-base', r1, r2])
1331 base = self._gitcommand(['merge-base', r1, r2])
1332 return base == r1
1332 return base == r1
1333
1333
1334 def _gitisbare(self):
1334 def _gitisbare(self):
1335 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1335 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1336
1336
1337 def _gitupdatestat(self):
1337 def _gitupdatestat(self):
1338 """This must be run before git diff-index.
1338 """This must be run before git diff-index.
1339 diff-index only looks at changes to file stat;
1339 diff-index only looks at changes to file stat;
1340 this command looks at file contents and updates the stat."""
1340 this command looks at file contents and updates the stat."""
1341 self._gitcommand(['update-index', '-q', '--refresh'])
1341 self._gitcommand(['update-index', '-q', '--refresh'])
1342
1342
1343 def _gitbranchmap(self):
1343 def _gitbranchmap(self):
1344 '''returns 2 things:
1344 '''returns 2 things:
1345 a map from git branch to revision
1345 a map from git branch to revision
1346 a map from revision to branches'''
1346 a map from revision to branches'''
1347 branch2rev = {}
1347 branch2rev = {}
1348 rev2branch = {}
1348 rev2branch = {}
1349
1349
1350 out = self._gitcommand(['for-each-ref', '--format',
1350 out = self._gitcommand(['for-each-ref', '--format',
1351 '%(objectname) %(refname)'])
1351 '%(objectname) %(refname)'])
1352 for line in out.split('\n'):
1352 for line in out.split('\n'):
1353 revision, ref = line.split(' ')
1353 revision, ref = line.split(' ')
1354 if (not ref.startswith('refs/heads/') and
1354 if (not ref.startswith('refs/heads/') and
1355 not ref.startswith('refs/remotes/')):
1355 not ref.startswith('refs/remotes/')):
1356 continue
1356 continue
1357 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1357 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1358 continue # ignore remote/HEAD redirects
1358 continue # ignore remote/HEAD redirects
1359 branch2rev[ref] = revision
1359 branch2rev[ref] = revision
1360 rev2branch.setdefault(revision, []).append(ref)
1360 rev2branch.setdefault(revision, []).append(ref)
1361 return branch2rev, rev2branch
1361 return branch2rev, rev2branch
1362
1362
1363 def _gittracking(self, branches):
1363 def _gittracking(self, branches):
1364 'return map of remote branch to local tracking branch'
1364 'return map of remote branch to local tracking branch'
1365 # assumes no more than one local tracking branch for each remote
1365 # assumes no more than one local tracking branch for each remote
1366 tracking = {}
1366 tracking = {}
1367 for b in branches:
1367 for b in branches:
1368 if b.startswith('refs/remotes/'):
1368 if b.startswith('refs/remotes/'):
1369 continue
1369 continue
1370 bname = b.split('/', 2)[2]
1370 bname = b.split('/', 2)[2]
1371 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1371 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1372 if remote:
1372 if remote:
1373 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1373 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1374 tracking['refs/remotes/%s/%s' %
1374 tracking['refs/remotes/%s/%s' %
1375 (remote, ref.split('/', 2)[2])] = b
1375 (remote, ref.split('/', 2)[2])] = b
1376 return tracking
1376 return tracking
1377
1377
1378 def _abssource(self, source):
1378 def _abssource(self, source):
1379 if '://' not in source:
1379 if '://' not in source:
1380 # recognize the scp syntax as an absolute source
1380 # recognize the scp syntax as an absolute source
1381 colon = source.find(':')
1381 colon = source.find(':')
1382 if colon != -1 and '/' not in source[:colon]:
1382 if colon != -1 and '/' not in source[:colon]:
1383 return source
1383 return source
1384 self._subsource = source
1384 self._subsource = source
1385 return _abssource(self)
1385 return _abssource(self)
1386
1386
1387 def _fetch(self, source, revision):
1387 def _fetch(self, source, revision):
1388 if self._gitmissing():
1388 if self._gitmissing():
1389 # SEC: check for safe ssh url
1389 # SEC: check for safe ssh url
1390 util.checksafessh(source)
1390 util.checksafessh(source)
1391
1391
1392 source = self._abssource(source)
1392 source = self._abssource(source)
1393 self.ui.status(_('cloning subrepo %s from %s\n') %
1393 self.ui.status(_('cloning subrepo %s from %s\n') %
1394 (self._relpath, source))
1394 (self._relpath, source))
1395 self._gitnodir(['clone', source, self._abspath])
1395 self._gitnodir(['clone', source, self._abspath])
1396 if self._githavelocally(revision):
1396 if self._githavelocally(revision):
1397 return
1397 return
1398 self.ui.status(_('pulling subrepo %s from %s\n') %
1398 self.ui.status(_('pulling subrepo %s from %s\n') %
1399 (self._relpath, self._gitremote('origin')))
1399 (self._relpath, self._gitremote('origin')))
1400 # try only origin: the originally cloned repo
1400 # try only origin: the originally cloned repo
1401 self._gitcommand(['fetch'])
1401 self._gitcommand(['fetch'])
1402 if not self._githavelocally(revision):
1402 if not self._githavelocally(revision):
1403 raise error.Abort(_('revision %s does not exist in subrepository '
1403 raise error.Abort(_('revision %s does not exist in subrepository '
1404 '"%s"\n') % (revision, self._relpath))
1404 '"%s"\n') % (revision, self._relpath))
1405
1405
1406 @annotatesubrepoerror
1406 @annotatesubrepoerror
1407 def dirty(self, ignoreupdate=False, missing=False):
1407 def dirty(self, ignoreupdate=False, missing=False):
1408 if self._gitmissing():
1408 if self._gitmissing():
1409 return self._state[1] != ''
1409 return self._state[1] != ''
1410 if self._gitisbare():
1410 if self._gitisbare():
1411 return True
1411 return True
1412 if not ignoreupdate and self._state[1] != self._gitstate():
1412 if not ignoreupdate and self._state[1] != self._gitstate():
1413 # different version checked out
1413 # different version checked out
1414 return True
1414 return True
1415 # check for staged changes or modified files; ignore untracked files
1415 # check for staged changes or modified files; ignore untracked files
1416 self._gitupdatestat()
1416 self._gitupdatestat()
1417 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1417 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1418 return code == 1
1418 return code == 1
1419
1419
1420 def basestate(self):
1420 def basestate(self):
1421 return self._gitstate()
1421 return self._gitstate()
1422
1422
1423 @annotatesubrepoerror
1423 @annotatesubrepoerror
1424 def get(self, state, overwrite=False):
1424 def get(self, state, overwrite=False):
1425 source, revision, kind = state
1425 source, revision, kind = state
1426 if not revision:
1426 if not revision:
1427 self.remove()
1427 self.remove()
1428 return
1428 return
1429 self._fetch(source, revision)
1429 self._fetch(source, revision)
1430 # if the repo was set to be bare, unbare it
1430 # if the repo was set to be bare, unbare it
1431 if self._gitisbare():
1431 if self._gitisbare():
1432 self._gitcommand(['config', 'core.bare', 'false'])
1432 self._gitcommand(['config', 'core.bare', 'false'])
1433 if self._gitstate() == revision:
1433 if self._gitstate() == revision:
1434 self._gitcommand(['reset', '--hard', 'HEAD'])
1434 self._gitcommand(['reset', '--hard', 'HEAD'])
1435 return
1435 return
1436 elif self._gitstate() == revision:
1436 elif self._gitstate() == revision:
1437 if overwrite:
1437 if overwrite:
1438 # first reset the index to unmark new files for commit, because
1438 # first reset the index to unmark new files for commit, because
1439 # reset --hard will otherwise throw away files added for commit,
1439 # reset --hard will otherwise throw away files added for commit,
1440 # not just unmark them.
1440 # not just unmark them.
1441 self._gitcommand(['reset', 'HEAD'])
1441 self._gitcommand(['reset', 'HEAD'])
1442 self._gitcommand(['reset', '--hard', 'HEAD'])
1442 self._gitcommand(['reset', '--hard', 'HEAD'])
1443 return
1443 return
1444 branch2rev, rev2branch = self._gitbranchmap()
1444 branch2rev, rev2branch = self._gitbranchmap()
1445
1445
1446 def checkout(args):
1446 def checkout(args):
1447 cmd = ['checkout']
1447 cmd = ['checkout']
1448 if overwrite:
1448 if overwrite:
1449 # first reset the index to unmark new files for commit, because
1449 # first reset the index to unmark new files for commit, because
1450 # the -f option will otherwise throw away files added for
1450 # the -f option will otherwise throw away files added for
1451 # commit, not just unmark them.
1451 # commit, not just unmark them.
1452 self._gitcommand(['reset', 'HEAD'])
1452 self._gitcommand(['reset', 'HEAD'])
1453 cmd.append('-f')
1453 cmd.append('-f')
1454 self._gitcommand(cmd + args)
1454 self._gitcommand(cmd + args)
1455 _sanitize(self.ui, self.wvfs, '.git')
1455 _sanitize(self.ui, self.wvfs, '.git')
1456
1456
1457 def rawcheckout():
1457 def rawcheckout():
1458 # no branch to checkout, check it out with no branch
1458 # no branch to checkout, check it out with no branch
1459 self.ui.warn(_('checking out detached HEAD in '
1459 self.ui.warn(_('checking out detached HEAD in '
1460 'subrepository "%s"\n') % self._relpath)
1460 'subrepository "%s"\n') % self._relpath)
1461 self.ui.warn(_('check out a git branch if you intend '
1461 self.ui.warn(_('check out a git branch if you intend '
1462 'to make changes\n'))
1462 'to make changes\n'))
1463 checkout(['-q', revision])
1463 checkout(['-q', revision])
1464
1464
1465 if revision not in rev2branch:
1465 if revision not in rev2branch:
1466 rawcheckout()
1466 rawcheckout()
1467 return
1467 return
1468 branches = rev2branch[revision]
1468 branches = rev2branch[revision]
1469 firstlocalbranch = None
1469 firstlocalbranch = None
1470 for b in branches:
1470 for b in branches:
1471 if b == 'refs/heads/master':
1471 if b == 'refs/heads/master':
1472 # master trumps all other branches
1472 # master trumps all other branches
1473 checkout(['refs/heads/master'])
1473 checkout(['refs/heads/master'])
1474 return
1474 return
1475 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1475 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1476 firstlocalbranch = b
1476 firstlocalbranch = b
1477 if firstlocalbranch:
1477 if firstlocalbranch:
1478 checkout([firstlocalbranch])
1478 checkout([firstlocalbranch])
1479 return
1479 return
1480
1480
1481 tracking = self._gittracking(branch2rev.keys())
1481 tracking = self._gittracking(branch2rev.keys())
1482 # choose a remote branch already tracked if possible
1482 # choose a remote branch already tracked if possible
1483 remote = branches[0]
1483 remote = branches[0]
1484 if remote not in tracking:
1484 if remote not in tracking:
1485 for b in branches:
1485 for b in branches:
1486 if b in tracking:
1486 if b in tracking:
1487 remote = b
1487 remote = b
1488 break
1488 break
1489
1489
1490 if remote not in tracking:
1490 if remote not in tracking:
1491 # create a new local tracking branch
1491 # create a new local tracking branch
1492 local = remote.split('/', 3)[3]
1492 local = remote.split('/', 3)[3]
1493 checkout(['-b', local, remote])
1493 checkout(['-b', local, remote])
1494 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1494 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1495 # When updating to a tracked remote branch,
1495 # When updating to a tracked remote branch,
1496 # if the local tracking branch is downstream of it,
1496 # if the local tracking branch is downstream of it,
1497 # a normal `git pull` would have performed a "fast-forward merge"
1497 # a normal `git pull` would have performed a "fast-forward merge"
1498 # which is equivalent to updating the local branch to the remote.
1498 # which is equivalent to updating the local branch to the remote.
1499 # Since we are only looking at branching at update, we need to
1499 # Since we are only looking at branching at update, we need to
1500 # detect this situation and perform this action lazily.
1500 # detect this situation and perform this action lazily.
1501 if tracking[remote] != self._gitcurrentbranch():
1501 if tracking[remote] != self._gitcurrentbranch():
1502 checkout([tracking[remote]])
1502 checkout([tracking[remote]])
1503 self._gitcommand(['merge', '--ff', remote])
1503 self._gitcommand(['merge', '--ff', remote])
1504 _sanitize(self.ui, self.wvfs, '.git')
1504 _sanitize(self.ui, self.wvfs, '.git')
1505 else:
1505 else:
1506 # a real merge would be required, just checkout the revision
1506 # a real merge would be required, just checkout the revision
1507 rawcheckout()
1507 rawcheckout()
1508
1508
1509 @annotatesubrepoerror
1509 @annotatesubrepoerror
1510 def commit(self, text, user, date):
1510 def commit(self, text, user, date):
1511 if self._gitmissing():
1511 if self._gitmissing():
1512 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1512 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1513 cmd = ['commit', '-a', '-m', text]
1513 cmd = ['commit', '-a', '-m', text]
1514 env = encoding.environ.copy()
1514 env = encoding.environ.copy()
1515 if user:
1515 if user:
1516 cmd += ['--author', user]
1516 cmd += ['--author', user]
1517 if date:
1517 if date:
1518 # git's date parser silently ignores when seconds < 1e9
1518 # git's date parser silently ignores when seconds < 1e9
1519 # convert to ISO8601
1519 # convert to ISO8601
1520 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1520 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1521 '%Y-%m-%dT%H:%M:%S %1%2')
1521 '%Y-%m-%dT%H:%M:%S %1%2')
1522 self._gitcommand(cmd, env=env)
1522 self._gitcommand(cmd, env=env)
1523 # make sure commit works otherwise HEAD might not exist under certain
1523 # make sure commit works otherwise HEAD might not exist under certain
1524 # circumstances
1524 # circumstances
1525 return self._gitstate()
1525 return self._gitstate()
1526
1526
1527 @annotatesubrepoerror
1527 @annotatesubrepoerror
1528 def merge(self, state):
1528 def merge(self, state):
1529 source, revision, kind = state
1529 source, revision, kind = state
1530 self._fetch(source, revision)
1530 self._fetch(source, revision)
1531 base = self._gitcommand(['merge-base', revision, self._state[1]])
1531 base = self._gitcommand(['merge-base', revision, self._state[1]])
1532 self._gitupdatestat()
1532 self._gitupdatestat()
1533 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1533 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1534
1534
1535 def mergefunc():
1535 def mergefunc():
1536 if base == revision:
1536 if base == revision:
1537 self.get(state) # fast forward merge
1537 self.get(state) # fast forward merge
1538 elif base != self._state[1]:
1538 elif base != self._state[1]:
1539 self._gitcommand(['merge', '--no-commit', revision])
1539 self._gitcommand(['merge', '--no-commit', revision])
1540 _sanitize(self.ui, self.wvfs, '.git')
1540 _sanitize(self.ui, self.wvfs, '.git')
1541
1541
1542 if self.dirty():
1542 if self.dirty():
1543 if self._gitstate() != revision:
1543 if self._gitstate() != revision:
1544 dirty = self._gitstate() == self._state[1] or code != 0
1544 dirty = self._gitstate() == self._state[1] or code != 0
1545 if _updateprompt(self.ui, self, dirty,
1545 if _updateprompt(self.ui, self, dirty,
1546 self._state[1][:7], revision[:7]):
1546 self._state[1][:7], revision[:7]):
1547 mergefunc()
1547 mergefunc()
1548 else:
1548 else:
1549 mergefunc()
1549 mergefunc()
1550
1550
1551 @annotatesubrepoerror
1551 @annotatesubrepoerror
1552 def push(self, opts):
1552 def push(self, opts):
1553 force = opts.get('force')
1553 force = opts.get('force')
1554
1554
1555 if not self._state[1]:
1555 if not self._state[1]:
1556 return True
1556 return True
1557 if self._gitmissing():
1557 if self._gitmissing():
1558 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1558 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1559 # if a branch in origin contains the revision, nothing to do
1559 # if a branch in origin contains the revision, nothing to do
1560 branch2rev, rev2branch = self._gitbranchmap()
1560 branch2rev, rev2branch = self._gitbranchmap()
1561 if self._state[1] in rev2branch:
1561 if self._state[1] in rev2branch:
1562 for b in rev2branch[self._state[1]]:
1562 for b in rev2branch[self._state[1]]:
1563 if b.startswith('refs/remotes/origin/'):
1563 if b.startswith('refs/remotes/origin/'):
1564 return True
1564 return True
1565 for b, revision in branch2rev.iteritems():
1565 for b, revision in branch2rev.iteritems():
1566 if b.startswith('refs/remotes/origin/'):
1566 if b.startswith('refs/remotes/origin/'):
1567 if self._gitisancestor(self._state[1], revision):
1567 if self._gitisancestor(self._state[1], revision):
1568 return True
1568 return True
1569 # otherwise, try to push the currently checked out branch
1569 # otherwise, try to push the currently checked out branch
1570 cmd = ['push']
1570 cmd = ['push']
1571 if force:
1571 if force:
1572 cmd.append('--force')
1572 cmd.append('--force')
1573
1573
1574 current = self._gitcurrentbranch()
1574 current = self._gitcurrentbranch()
1575 if current:
1575 if current:
1576 # determine if the current branch is even useful
1576 # determine if the current branch is even useful
1577 if not self._gitisancestor(self._state[1], current):
1577 if not self._gitisancestor(self._state[1], current):
1578 self.ui.warn(_('unrelated git branch checked out '
1578 self.ui.warn(_('unrelated git branch checked out '
1579 'in subrepository "%s"\n') % self._relpath)
1579 'in subrepository "%s"\n') % self._relpath)
1580 return False
1580 return False
1581 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1581 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1582 (current.split('/', 2)[2], self._relpath))
1582 (current.split('/', 2)[2], self._relpath))
1583 ret = self._gitdir(cmd + ['origin', current])
1583 ret = self._gitdir(cmd + ['origin', current])
1584 return ret[1] == 0
1584 return ret[1] == 0
1585 else:
1585 else:
1586 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1586 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1587 'cannot push revision %s\n') %
1587 'cannot push revision %s\n') %
1588 (self._relpath, self._state[1]))
1588 (self._relpath, self._state[1]))
1589 return False
1589 return False
1590
1590
1591 @annotatesubrepoerror
1591 @annotatesubrepoerror
1592 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
1592 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
1593 if self._gitmissing():
1593 if self._gitmissing():
1594 return []
1594 return []
1595
1595
1596 s = self.status(None, unknown=True, clean=True)
1596 s = self.status(None, unknown=True, clean=True)
1597
1597
1598 tracked = set()
1598 tracked = set()
1599 # dirstates 'amn' warn, 'r' is added again
1599 # dirstates 'amn' warn, 'r' is added again
1600 for l in (s.modified, s.added, s.deleted, s.clean):
1600 for l in (s.modified, s.added, s.deleted, s.clean):
1601 tracked.update(l)
1601 tracked.update(l)
1602
1602
1603 # Unknown files not of interest will be rejected by the matcher
1603 # Unknown files not of interest will be rejected by the matcher
1604 files = s.unknown
1604 files = s.unknown
1605 files.extend(match.files())
1605 files.extend(match.files())
1606
1606
1607 rejected = []
1607 rejected = []
1608
1608
1609 files = [f for f in sorted(set(files)) if match(f)]
1609 files = [f for f in sorted(set(files)) if match(f)]
1610 for f in files:
1610 for f in files:
1611 exact = match.exact(f)
1611 exact = match.exact(f)
1612 command = ["add"]
1612 command = ["add"]
1613 if exact:
1613 if exact:
1614 command.append("-f") #should be added, even if ignored
1614 command.append("-f") #should be added, even if ignored
1615 if ui.verbose or not exact:
1615 if ui.verbose or not exact:
1616 ui.status(_('adding %s\n') % uipathfn(f))
1616 ui.status(_('adding %s\n') % uipathfn(f))
1617
1617
1618 if f in tracked: # hg prints 'adding' even if already tracked
1618 if f in tracked: # hg prints 'adding' even if already tracked
1619 if exact:
1619 if exact:
1620 rejected.append(f)
1620 rejected.append(f)
1621 continue
1621 continue
1622 if not opts.get(r'dry_run'):
1622 if not opts.get(r'dry_run'):
1623 self._gitcommand(command + [f])
1623 self._gitcommand(command + [f])
1624
1624
1625 for f in rejected:
1625 for f in rejected:
1626 ui.warn(_("%s already tracked!\n") % uipathfn(f))
1626 ui.warn(_("%s already tracked!\n") % uipathfn(f))
1627
1627
1628 return rejected
1628 return rejected
1629
1629
1630 @annotatesubrepoerror
1630 @annotatesubrepoerror
1631 def remove(self):
1631 def remove(self):
1632 if self._gitmissing():
1632 if self._gitmissing():
1633 return
1633 return
1634 if self.dirty():
1634 if self.dirty():
1635 self.ui.warn(_('not removing repo %s because '
1635 self.ui.warn(_('not removing repo %s because '
1636 'it has changes.\n') % self._relpath)
1636 'it has changes.\n') % self._relpath)
1637 return
1637 return
1638 # we can't fully delete the repository as it may contain
1638 # we can't fully delete the repository as it may contain
1639 # local-only history
1639 # local-only history
1640 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1640 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1641 self._gitcommand(['config', 'core.bare', 'true'])
1641 self._gitcommand(['config', 'core.bare', 'true'])
1642 for f, kind in self.wvfs.readdir():
1642 for f, kind in self.wvfs.readdir():
1643 if f == '.git':
1643 if f == '.git':
1644 continue
1644 continue
1645 if kind == stat.S_IFDIR:
1645 if kind == stat.S_IFDIR:
1646 self.wvfs.rmtree(f)
1646 self.wvfs.rmtree(f)
1647 else:
1647 else:
1648 self.wvfs.unlink(f)
1648 self.wvfs.unlink(f)
1649
1649
1650 def archive(self, archiver, prefix, match=None, decode=True):
1650 def archive(self, archiver, prefix, match=None, decode=True):
1651 total = 0
1651 total = 0
1652 source, revision = self._state
1652 source, revision = self._state
1653 if not revision:
1653 if not revision:
1654 return total
1654 return total
1655 self._fetch(source, revision)
1655 self._fetch(source, revision)
1656
1656
1657 # Parse git's native archive command.
1657 # Parse git's native archive command.
1658 # This should be much faster than manually traversing the trees
1658 # This should be much faster than manually traversing the trees
1659 # and objects with many subprocess calls.
1659 # and objects with many subprocess calls.
1660 tarstream = self._gitcommand(['archive', revision], stream=True)
1660 tarstream = self._gitcommand(['archive', revision], stream=True)
1661 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1661 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1662 relpath = subrelpath(self)
1662 relpath = subrelpath(self)
1663 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1663 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1664 unit=_('files'))
1664 unit=_('files'))
1665 progress.update(0)
1665 progress.update(0)
1666 for info in tar:
1666 for info in tar:
1667 if info.isdir():
1667 if info.isdir():
1668 continue
1668 continue
1669 bname = pycompat.fsencode(info.name)
1669 bname = pycompat.fsencode(info.name)
1670 if match and not match(bname):
1670 if match and not match(bname):
1671 continue
1671 continue
1672 if info.issym():
1672 if info.issym():
1673 data = info.linkname
1673 data = info.linkname
1674 else:
1674 else:
1675 data = tar.extractfile(info).read()
1675 data = tar.extractfile(info).read()
1676 archiver.addfile(prefix + bname, info.mode, info.issym(), data)
1676 archiver.addfile(prefix + bname, info.mode, info.issym(), data)
1677 total += 1
1677 total += 1
1678 progress.increment()
1678 progress.increment()
1679 progress.complete()
1679 progress.complete()
1680 return total
1680 return total
1681
1681
1682
1682
1683 @annotatesubrepoerror
1683 @annotatesubrepoerror
1684 def cat(self, match, fm, fntemplate, prefix, **opts):
1684 def cat(self, match, fm, fntemplate, prefix, **opts):
1685 rev = self._state[1]
1685 rev = self._state[1]
1686 if match.anypats():
1686 if match.anypats():
1687 return 1 #No support for include/exclude yet
1687 return 1 #No support for include/exclude yet
1688
1688
1689 if not match.files():
1689 if not match.files():
1690 return 1
1690 return 1
1691
1691
1692 # TODO: add support for non-plain formatter (see cmdutil.cat())
1692 # TODO: add support for non-plain formatter (see cmdutil.cat())
1693 for f in match.files():
1693 for f in match.files():
1694 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1694 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1695 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1695 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1696 pathname=self.wvfs.reljoin(prefix, f))
1696 pathname=self.wvfs.reljoin(prefix, f))
1697 fp.write(output)
1697 fp.write(output)
1698 fp.close()
1698 fp.close()
1699 return 0
1699 return 0
1700
1700
1701
1701
1702 @annotatesubrepoerror
1702 @annotatesubrepoerror
1703 def status(self, rev2, **opts):
1703 def status(self, rev2, **opts):
1704 rev1 = self._state[1]
1704 rev1 = self._state[1]
1705 if self._gitmissing() or not rev1:
1705 if self._gitmissing() or not rev1:
1706 # if the repo is missing, return no results
1706 # if the repo is missing, return no results
1707 return scmutil.status([], [], [], [], [], [], [])
1707 return scmutil.status([], [], [], [], [], [], [])
1708 modified, added, removed = [], [], []
1708 modified, added, removed = [], [], []
1709 self._gitupdatestat()
1709 self._gitupdatestat()
1710 if rev2:
1710 if rev2:
1711 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1711 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1712 else:
1712 else:
1713 command = ['diff-index', '--no-renames', rev1]
1713 command = ['diff-index', '--no-renames', rev1]
1714 out = self._gitcommand(command)
1714 out = self._gitcommand(command)
1715 for line in out.split('\n'):
1715 for line in out.split('\n'):
1716 tab = line.find('\t')
1716 tab = line.find('\t')
1717 if tab == -1:
1717 if tab == -1:
1718 continue
1718 continue
1719 status, f = line[tab - 1:tab], line[tab + 1:]
1719 status, f = line[tab - 1:tab], line[tab + 1:]
1720 if status == 'M':
1720 if status == 'M':
1721 modified.append(f)
1721 modified.append(f)
1722 elif status == 'A':
1722 elif status == 'A':
1723 added.append(f)
1723 added.append(f)
1724 elif status == 'D':
1724 elif status == 'D':
1725 removed.append(f)
1725 removed.append(f)
1726
1726
1727 deleted, unknown, ignored, clean = [], [], [], []
1727 deleted, unknown, ignored, clean = [], [], [], []
1728
1728
1729 command = ['status', '--porcelain', '-z']
1729 command = ['status', '--porcelain', '-z']
1730 if opts.get(r'unknown'):
1730 if opts.get(r'unknown'):
1731 command += ['--untracked-files=all']
1731 command += ['--untracked-files=all']
1732 if opts.get(r'ignored'):
1732 if opts.get(r'ignored'):
1733 command += ['--ignored']
1733 command += ['--ignored']
1734 out = self._gitcommand(command)
1734 out = self._gitcommand(command)
1735
1735
1736 changedfiles = set()
1736 changedfiles = set()
1737 changedfiles.update(modified)
1737 changedfiles.update(modified)
1738 changedfiles.update(added)
1738 changedfiles.update(added)
1739 changedfiles.update(removed)
1739 changedfiles.update(removed)
1740 for line in out.split('\0'):
1740 for line in out.split('\0'):
1741 if not line:
1741 if not line:
1742 continue
1742 continue
1743 st = line[0:2]
1743 st = line[0:2]
1744 #moves and copies show 2 files on one line
1744 #moves and copies show 2 files on one line
1745 if line.find('\0') >= 0:
1745 if line.find('\0') >= 0:
1746 filename1, filename2 = line[3:].split('\0')
1746 filename1, filename2 = line[3:].split('\0')
1747 else:
1747 else:
1748 filename1 = line[3:]
1748 filename1 = line[3:]
1749 filename2 = None
1749 filename2 = None
1750
1750
1751 changedfiles.add(filename1)
1751 changedfiles.add(filename1)
1752 if filename2:
1752 if filename2:
1753 changedfiles.add(filename2)
1753 changedfiles.add(filename2)
1754
1754
1755 if st == '??':
1755 if st == '??':
1756 unknown.append(filename1)
1756 unknown.append(filename1)
1757 elif st == '!!':
1757 elif st == '!!':
1758 ignored.append(filename1)
1758 ignored.append(filename1)
1759
1759
1760 if opts.get(r'clean'):
1760 if opts.get(r'clean'):
1761 out = self._gitcommand(['ls-files'])
1761 out = self._gitcommand(['ls-files'])
1762 for f in out.split('\n'):
1762 for f in out.split('\n'):
1763 if not f in changedfiles:
1763 if not f in changedfiles:
1764 clean.append(f)
1764 clean.append(f)
1765
1765
1766 return scmutil.status(modified, added, removed, deleted,
1766 return scmutil.status(modified, added, removed, deleted,
1767 unknown, ignored, clean)
1767 unknown, ignored, clean)
1768
1768
1769 @annotatesubrepoerror
1769 @annotatesubrepoerror
1770 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1770 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1771 node1 = self._state[1]
1771 node1 = self._state[1]
1772 cmd = ['diff', '--no-renames']
1772 cmd = ['diff', '--no-renames']
1773 if opts[r'stat']:
1773 if opts[r'stat']:
1774 cmd.append('--stat')
1774 cmd.append('--stat')
1775 else:
1775 else:
1776 # for Git, this also implies '-p'
1776 # for Git, this also implies '-p'
1777 cmd.append('-U%d' % diffopts.context)
1777 cmd.append('-U%d' % diffopts.context)
1778
1778
1779 if diffopts.noprefix:
1779 if diffopts.noprefix:
1780 cmd.extend(['--src-prefix=%s/' % prefix,
1780 cmd.extend(['--src-prefix=%s/' % prefix,
1781 '--dst-prefix=%s/' % prefix])
1781 '--dst-prefix=%s/' % prefix])
1782 else:
1782 else:
1783 cmd.extend(['--src-prefix=a/%s/' % prefix,
1783 cmd.extend(['--src-prefix=a/%s/' % prefix,
1784 '--dst-prefix=b/%s/' % prefix])
1784 '--dst-prefix=b/%s/' % prefix])
1785
1785
1786 if diffopts.ignorews:
1786 if diffopts.ignorews:
1787 cmd.append('--ignore-all-space')
1787 cmd.append('--ignore-all-space')
1788 if diffopts.ignorewsamount:
1788 if diffopts.ignorewsamount:
1789 cmd.append('--ignore-space-change')
1789 cmd.append('--ignore-space-change')
1790 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1790 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1791 and diffopts.ignoreblanklines:
1791 and diffopts.ignoreblanklines:
1792 cmd.append('--ignore-blank-lines')
1792 cmd.append('--ignore-blank-lines')
1793
1793
1794 cmd.append(node1)
1794 cmd.append(node1)
1795 if node2:
1795 if node2:
1796 cmd.append(node2)
1796 cmd.append(node2)
1797
1797
1798 output = ""
1798 output = ""
1799 if match.always():
1799 if match.always():
1800 output += self._gitcommand(cmd) + '\n'
1800 output += self._gitcommand(cmd) + '\n'
1801 else:
1801 else:
1802 st = self.status(node2)[:3]
1802 st = self.status(node2)[:3]
1803 files = [f for sublist in st for f in sublist]
1803 files = [f for sublist in st for f in sublist]
1804 for f in files:
1804 for f in files:
1805 if match(f):
1805 if match(f):
1806 output += self._gitcommand(cmd + ['--', f]) + '\n'
1806 output += self._gitcommand(cmd + ['--', f]) + '\n'
1807
1807
1808 if output.strip():
1808 if output.strip():
1809 ui.write(output)
1809 ui.write(output)
1810
1810
1811 @annotatesubrepoerror
1811 @annotatesubrepoerror
1812 def revert(self, substate, *pats, **opts):
1812 def revert(self, substate, *pats, **opts):
1813 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1813 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1814 if not opts.get(r'no_backup'):
1814 if not opts.get(r'no_backup'):
1815 status = self.status(None)
1815 status = self.status(None)
1816 names = status.modified
1816 names = status.modified
1817 for name in names:
1817 for name in names:
1818 # backuppath() expects a path relative to the parent repo (the
1818 # backuppath() expects a path relative to the parent repo (the
1819 # repo that ui.origbackuppath is relative to)
1819 # repo that ui.origbackuppath is relative to)
1820 parentname = os.path.join(self._path, name)
1820 parentname = os.path.join(self._path, name)
1821 bakname = scmutil.backuppath(self.ui, self._subparent,
1821 bakname = scmutil.backuppath(self.ui, self._subparent,
1822 parentname)
1822 parentname)
1823 self.ui.note(_('saving current version of %s as %s\n') %
1823 self.ui.note(_('saving current version of %s as %s\n') %
1824 (name, os.path.relpath(bakname)))
1824 (name, os.path.relpath(bakname)))
1825 util.rename(self.wvfs.join(name), bakname)
1825 util.rename(self.wvfs.join(name), bakname)
1826
1826
1827 if not opts.get(r'dry_run'):
1827 if not opts.get(r'dry_run'):
1828 self.get(substate, overwrite=True)
1828 self.get(substate, overwrite=True)
1829 return []
1829 return []
1830
1830
1831 def shortid(self, revid):
1831 def shortid(self, revid):
1832 return revid[:7]
1832 return revid[:7]
1833
1833
1834 types = {
1834 types = {
1835 'hg': hgsubrepo,
1835 'hg': hgsubrepo,
1836 'svn': svnsubrepo,
1836 'svn': svnsubrepo,
1837 'git': gitsubrepo,
1837 'git': gitsubrepo,
1838 }
1838 }
@@ -1,556 +1,556 b''
1 #require no-reposimplestore no-chg
1 #require no-reposimplestore no-chg
2
2
3 Set up a server
3 Set up a server
4
4
5 $ hg init server
5 $ hg init server
6 $ cd server
6 $ cd server
7 $ cat >> .hg/hgrc << EOF
7 $ cat >> .hg/hgrc << EOF
8 > [extensions]
8 > [extensions]
9 > clonebundles =
9 > clonebundles =
10 > EOF
10 > EOF
11
11
12 $ touch foo
12 $ touch foo
13 $ hg -q commit -A -m 'add foo'
13 $ hg -q commit -A -m 'add foo'
14 $ touch bar
14 $ touch bar
15 $ hg -q commit -A -m 'add bar'
15 $ hg -q commit -A -m 'add bar'
16
16
17 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
17 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
18 $ cat hg.pid >> $DAEMON_PIDS
18 $ cat hg.pid >> $DAEMON_PIDS
19 $ cd ..
19 $ cd ..
20
20
21 Missing manifest should not result in server lookup
21 Missing manifest should not result in server lookup
22
22
23 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
23 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
24 requesting all changes
24 requesting all changes
25 adding changesets
25 adding changesets
26 adding manifests
26 adding manifests
27 adding file changes
27 adding file changes
28 added 2 changesets with 2 changes to 2 files
28 added 2 changesets with 2 changes to 2 files
29 new changesets 53245c60e682:aaff8d2ffbbf
29 new changesets 53245c60e682:aaff8d2ffbbf
30 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
30 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
31
31
32 $ cat server/access.log
32 $ cat server/access.log
33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
34 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
34 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
35 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
35 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
36
36
37 Empty manifest file results in retrieval
37 Empty manifest file results in retrieval
38 (the extension only checks if the manifest file exists)
38 (the extension only checks if the manifest file exists)
39
39
40 $ touch server/.hg/clonebundles.manifest
40 $ touch server/.hg/clonebundles.manifest
41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
42 no clone bundles available on remote; falling back to regular clone
42 no clone bundles available on remote; falling back to regular clone
43 requesting all changes
43 requesting all changes
44 adding changesets
44 adding changesets
45 adding manifests
45 adding manifests
46 adding file changes
46 adding file changes
47 added 2 changesets with 2 changes to 2 files
47 added 2 changesets with 2 changes to 2 files
48 new changesets 53245c60e682:aaff8d2ffbbf
48 new changesets 53245c60e682:aaff8d2ffbbf
49 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
49 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
50
50
51 Manifest file with invalid URL aborts
51 Manifest file with invalid URL aborts
52
52
53 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
53 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
54 $ hg clone http://localhost:$HGPORT 404-url
54 $ hg clone http://localhost:$HGPORT 404-url
55 applying clone bundle from http://does.not.exist/bundle.hg
55 applying clone bundle from http://does.not.exist/bundle.hg
56 error fetching bundle: (.* not known|(\[Errno -?\d+])? No address associated with hostname) (re) (no-windows !)
56 error fetching bundle: (.* not known|(\[Errno -?\d+])? No address associated with hostname) (re) (no-windows !)
57 error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
57 error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
58 abort: error applying bundle
58 abort: error applying bundle
59 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
59 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
60 [255]
60 [255]
61
61
62 Server is not running aborts
62 Server is not running aborts
63
63
64 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
64 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
65 $ hg clone http://localhost:$HGPORT server-not-runner
65 $ hg clone http://localhost:$HGPORT server-not-runner
66 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
66 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
67 error fetching bundle: (.* refused.*|Protocol not supported|(.* )?\$EADDRNOTAVAIL\$) (re)
67 error fetching bundle: (.* refused.*|Protocol not supported|(.* )?\$EADDRNOTAVAIL\$|.* No route to host) (re)
68 abort: error applying bundle
68 abort: error applying bundle
69 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
69 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
70 [255]
70 [255]
71
71
72 Server returns 404
72 Server returns 404
73
73
74 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
74 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
75 $ cat http.pid >> $DAEMON_PIDS
75 $ cat http.pid >> $DAEMON_PIDS
76 $ hg clone http://localhost:$HGPORT running-404
76 $ hg clone http://localhost:$HGPORT running-404
77 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
77 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
78 HTTP error fetching bundle: HTTP Error 404: File not found
78 HTTP error fetching bundle: HTTP Error 404: File not found
79 abort: error applying bundle
79 abort: error applying bundle
80 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
80 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
81 [255]
81 [255]
82
82
83 We can override failure to fall back to regular clone
83 We can override failure to fall back to regular clone
84
84
85 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
85 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
86 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
86 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
87 HTTP error fetching bundle: HTTP Error 404: File not found
87 HTTP error fetching bundle: HTTP Error 404: File not found
88 falling back to normal clone
88 falling back to normal clone
89 requesting all changes
89 requesting all changes
90 adding changesets
90 adding changesets
91 adding manifests
91 adding manifests
92 adding file changes
92 adding file changes
93 added 2 changesets with 2 changes to 2 files
93 added 2 changesets with 2 changes to 2 files
94 new changesets 53245c60e682:aaff8d2ffbbf
94 new changesets 53245c60e682:aaff8d2ffbbf
95
95
96 Bundle with partial content works
96 Bundle with partial content works
97
97
98 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
98 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
99 1 changesets found
99 1 changesets found
100
100
101 We verify exact bundle content as an extra check against accidental future
101 We verify exact bundle content as an extra check against accidental future
102 changes. If this output changes, we could break old clients.
102 changes. If this output changes, we could break old clients.
103
103
104 $ f --size --hexdump partial.hg
104 $ f --size --hexdump partial.hg
105 partial.hg: size=207
105 partial.hg: size=207
106 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
106 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
107 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
107 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
108 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
108 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
109 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
109 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
110 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
110 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
111 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
111 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
112 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
112 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
113 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
113 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
114 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
114 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
115 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
115 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
116 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
116 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
117 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
117 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
118 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
118 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
119
119
120 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
120 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
121 $ hg clone -U http://localhost:$HGPORT partial-bundle
121 $ hg clone -U http://localhost:$HGPORT partial-bundle
122 applying clone bundle from http://localhost:$HGPORT1/partial.hg
122 applying clone bundle from http://localhost:$HGPORT1/partial.hg
123 adding changesets
123 adding changesets
124 adding manifests
124 adding manifests
125 adding file changes
125 adding file changes
126 added 1 changesets with 1 changes to 1 files
126 added 1 changesets with 1 changes to 1 files
127 finished applying clone bundle
127 finished applying clone bundle
128 searching for changes
128 searching for changes
129 adding changesets
129 adding changesets
130 adding manifests
130 adding manifests
131 adding file changes
131 adding file changes
132 added 1 changesets with 1 changes to 1 files
132 added 1 changesets with 1 changes to 1 files
133 new changesets aaff8d2ffbbf
133 new changesets aaff8d2ffbbf
134 1 local changesets published
134 1 local changesets published
135
135
136 Incremental pull doesn't fetch bundle
136 Incremental pull doesn't fetch bundle
137
137
138 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
138 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
139 adding changesets
139 adding changesets
140 adding manifests
140 adding manifests
141 adding file changes
141 adding file changes
142 added 1 changesets with 1 changes to 1 files
142 added 1 changesets with 1 changes to 1 files
143 new changesets 53245c60e682
143 new changesets 53245c60e682
144
144
145 $ cd partial-clone
145 $ cd partial-clone
146 $ hg pull
146 $ hg pull
147 pulling from http://localhost:$HGPORT/
147 pulling from http://localhost:$HGPORT/
148 searching for changes
148 searching for changes
149 adding changesets
149 adding changesets
150 adding manifests
150 adding manifests
151 adding file changes
151 adding file changes
152 added 1 changesets with 1 changes to 1 files
152 added 1 changesets with 1 changes to 1 files
153 new changesets aaff8d2ffbbf
153 new changesets aaff8d2ffbbf
154 (run 'hg update' to get a working copy)
154 (run 'hg update' to get a working copy)
155 $ cd ..
155 $ cd ..
156
156
157 Bundle with full content works
157 Bundle with full content works
158
158
159 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
159 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
160 2 changesets found
160 2 changesets found
161
161
162 Again, we perform an extra check against bundle content changes. If this content
162 Again, we perform an extra check against bundle content changes. If this content
163 changes, clone bundles produced by new Mercurial versions may not be readable
163 changes, clone bundles produced by new Mercurial versions may not be readable
164 by old clients.
164 by old clients.
165
165
166 $ f --size --hexdump full.hg
166 $ f --size --hexdump full.hg
167 full.hg: size=442
167 full.hg: size=442
168 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
168 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
169 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
169 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
170 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
170 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
171 0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
171 0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
172 0040: f4 d4 62 23 06 06 e6 19 40 f9 4d c1 2a 31 09 cf |..b#....@.M.*1..|
172 0040: f4 d4 62 23 06 06 e6 19 40 f9 4d c1 2a 31 09 cf |..b#....@.M.*1..|
173 0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
173 0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
174 0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
174 0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
175 0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
175 0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
176 0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
176 0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
177 0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
177 0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
178 00a0: 28 00 19 20 17 af fa df ab ff 7b 3f fb 92 dc 8b |(.. ......{?....|
178 00a0: 28 00 19 20 17 af fa df ab ff 7b 3f fb 92 dc 8b |(.. ......{?....|
179 00b0: 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 89 2f b0 99 87 |.b......=ZD./...|
179 00b0: 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 89 2f b0 99 87 |.b......=ZD./...|
180 00c0: ec e2 54 63 43 e3 b4 64 43 73 23 33 43 53 0b 63 |..TcC..dCs#3CS.c|
180 00c0: ec e2 54 63 43 e3 b4 64 43 73 23 33 43 53 0b 63 |..TcC..dCs#3CS.c|
181 00d0: d3 14 23 03 a0 fb 2c 2c 0c d3 80 1e 30 49 49 b1 |..#...,,....0II.|
181 00d0: d3 14 23 03 a0 fb 2c 2c 0c d3 80 1e 30 49 49 b1 |..#...,,....0II.|
182 00e0: 4c 4a 32 48 33 30 b0 34 42 b8 38 29 b1 08 e2 62 |LJ2H30.4B.8)...b|
182 00e0: 4c 4a 32 48 33 30 b0 34 42 b8 38 29 b1 08 e2 62 |LJ2H30.4B.8)...b|
183 00f0: 20 03 6a ca c2 2c db 2f f7 2c fa 6d fc fb 34 be | .j..,./.,.m..4.|
183 00f0: 20 03 6a ca c2 2c db 2f f7 2c fa 6d fc fb 34 be | .j..,./.,.m..4.|
184 0100: fc 5c 21 a2 39 cb 66 77 7c 00 0d c3 59 17 14 58 |.\!.9.fw|...Y..X|
184 0100: fc 5c 21 a2 39 cb 66 77 7c 00 0d c3 59 17 14 58 |.\!.9.fw|...Y..X|
185 0110: 49 16 06 29 a9 a6 29 86 c6 16 e6 a6 16 a6 26 86 |I..)..).......&.|
185 0110: 49 16 06 29 a9 a6 29 86 c6 16 e6 a6 16 a6 26 86 |I..)..).......&.|
186 0120: c9 a6 69 06 a6 46 66 a6 89 29 86 26 26 89 49 96 |..i..Ff..).&&.I.|
186 0120: c9 a6 69 06 a6 46 66 a6 89 29 86 26 26 89 49 96 |..i..Ff..).&&.I.|
187 0130: 69 89 16 66 29 86 29 49 5c 20 07 3e 16 fe 23 ae |i..f).)I\ .>..#.|
187 0130: 69 89 16 66 29 86 29 49 5c 20 07 3e 16 fe 23 ae |i..f).)I\ .>..#.|
188 0140: 26 da 1c ab 10 1f d1 f8 e3 b3 ef cd dd fc 0c 93 |&...............|
188 0140: 26 da 1c ab 10 1f d1 f8 e3 b3 ef cd dd fc 0c 93 |&...............|
189 0150: 88 75 34 36 75 04 82 55 17 14 36 a4 38 10 04 d8 |.u46u..U..6.8...|
189 0150: 88 75 34 36 75 04 82 55 17 14 36 a4 38 10 04 d8 |.u46u..U..6.8...|
190 0160: 21 01 9a b1 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 |!......E..V....R|
190 0160: 21 01 9a b1 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 |!......E..V....R|
191 0170: d7 8a 78 ed fc d5 76 f1 36 25 81 89 c7 ad ec 90 |..x...v.6%......|
191 0170: d7 8a 78 ed fc d5 76 f1 36 25 81 89 c7 ad ec 90 |..x...v.6%......|
192 0180: 54 47 75 2b 89 48 b1 b2 62 c9 89 c9 19 a9 56 45 |TGu+.H..b.....VE|
192 0180: 54 47 75 2b 89 48 b1 b2 62 c9 89 c9 19 a9 56 45 |TGu+.H..b.....VE|
193 0190: a9 65 ba 49 45 89 79 c9 19 ba 60 01 a0 14 23 58 |.e.IE.y...`...#X|
193 0190: a9 65 ba 49 45 89 79 c9 19 ba 60 01 a0 14 23 58 |.e.IE.y...`...#X|
194 01a0: 81 35 c8 7d 40 cc 04 e2 a4 a4 a6 25 96 e6 94 60 |.5.}@......%...`|
194 01a0: 81 35 c8 7d 40 cc 04 e2 a4 a4 a6 25 96 e6 94 60 |.5.}@......%...`|
195 01b0: 33 17 5f 54 00 00 d3 1b 0d 4c |3._T.....L|
195 01b0: 33 17 5f 54 00 00 d3 1b 0d 4c |3._T.....L|
196
196
197 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
197 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
198 $ hg clone -U http://localhost:$HGPORT full-bundle
198 $ hg clone -U http://localhost:$HGPORT full-bundle
199 applying clone bundle from http://localhost:$HGPORT1/full.hg
199 applying clone bundle from http://localhost:$HGPORT1/full.hg
200 adding changesets
200 adding changesets
201 adding manifests
201 adding manifests
202 adding file changes
202 adding file changes
203 added 2 changesets with 2 changes to 2 files
203 added 2 changesets with 2 changes to 2 files
204 finished applying clone bundle
204 finished applying clone bundle
205 searching for changes
205 searching for changes
206 no changes found
206 no changes found
207 2 local changesets published
207 2 local changesets published
208
208
209 Feature works over SSH
209 Feature works over SSH
210
210
211 $ hg clone -U -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
211 $ hg clone -U -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
212 applying clone bundle from http://localhost:$HGPORT1/full.hg
212 applying clone bundle from http://localhost:$HGPORT1/full.hg
213 adding changesets
213 adding changesets
214 adding manifests
214 adding manifests
215 adding file changes
215 adding file changes
216 added 2 changesets with 2 changes to 2 files
216 added 2 changesets with 2 changes to 2 files
217 finished applying clone bundle
217 finished applying clone bundle
218 searching for changes
218 searching for changes
219 no changes found
219 no changes found
220 2 local changesets published
220 2 local changesets published
221
221
222 Entry with unknown BUNDLESPEC is filtered and not used
222 Entry with unknown BUNDLESPEC is filtered and not used
223
223
224 $ cat > server/.hg/clonebundles.manifest << EOF
224 $ cat > server/.hg/clonebundles.manifest << EOF
225 > http://bad.entry1 BUNDLESPEC=UNKNOWN
225 > http://bad.entry1 BUNDLESPEC=UNKNOWN
226 > http://bad.entry2 BUNDLESPEC=xz-v1
226 > http://bad.entry2 BUNDLESPEC=xz-v1
227 > http://bad.entry3 BUNDLESPEC=none-v100
227 > http://bad.entry3 BUNDLESPEC=none-v100
228 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
228 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
229 > EOF
229 > EOF
230
230
231 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
231 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
232 applying clone bundle from http://localhost:$HGPORT1/full.hg
232 applying clone bundle from http://localhost:$HGPORT1/full.hg
233 adding changesets
233 adding changesets
234 adding manifests
234 adding manifests
235 adding file changes
235 adding file changes
236 added 2 changesets with 2 changes to 2 files
236 added 2 changesets with 2 changes to 2 files
237 finished applying clone bundle
237 finished applying clone bundle
238 searching for changes
238 searching for changes
239 no changes found
239 no changes found
240 2 local changesets published
240 2 local changesets published
241
241
242 Automatic fallback when all entries are filtered
242 Automatic fallback when all entries are filtered
243
243
244 $ cat > server/.hg/clonebundles.manifest << EOF
244 $ cat > server/.hg/clonebundles.manifest << EOF
245 > http://bad.entry BUNDLESPEC=UNKNOWN
245 > http://bad.entry BUNDLESPEC=UNKNOWN
246 > EOF
246 > EOF
247
247
248 $ hg clone -U http://localhost:$HGPORT filter-all
248 $ hg clone -U http://localhost:$HGPORT filter-all
249 no compatible clone bundles available on server; falling back to regular clone
249 no compatible clone bundles available on server; falling back to regular clone
250 (you may want to report this to the server operator)
250 (you may want to report this to the server operator)
251 requesting all changes
251 requesting all changes
252 adding changesets
252 adding changesets
253 adding manifests
253 adding manifests
254 adding file changes
254 adding file changes
255 added 2 changesets with 2 changes to 2 files
255 added 2 changesets with 2 changes to 2 files
256 new changesets 53245c60e682:aaff8d2ffbbf
256 new changesets 53245c60e682:aaff8d2ffbbf
257
257
258 URLs requiring SNI are filtered in Python <2.7.9
258 URLs requiring SNI are filtered in Python <2.7.9
259
259
260 $ cp full.hg sni.hg
260 $ cp full.hg sni.hg
261 $ cat > server/.hg/clonebundles.manifest << EOF
261 $ cat > server/.hg/clonebundles.manifest << EOF
262 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
262 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
263 > http://localhost:$HGPORT1/full.hg
263 > http://localhost:$HGPORT1/full.hg
264 > EOF
264 > EOF
265
265
266 #if sslcontext
266 #if sslcontext
267 Python 2.7.9+ support SNI
267 Python 2.7.9+ support SNI
268
268
269 $ hg clone -U http://localhost:$HGPORT sni-supported
269 $ hg clone -U http://localhost:$HGPORT sni-supported
270 applying clone bundle from http://localhost:$HGPORT1/sni.hg
270 applying clone bundle from http://localhost:$HGPORT1/sni.hg
271 adding changesets
271 adding changesets
272 adding manifests
272 adding manifests
273 adding file changes
273 adding file changes
274 added 2 changesets with 2 changes to 2 files
274 added 2 changesets with 2 changes to 2 files
275 finished applying clone bundle
275 finished applying clone bundle
276 searching for changes
276 searching for changes
277 no changes found
277 no changes found
278 2 local changesets published
278 2 local changesets published
279 #else
279 #else
280 Python <2.7.9 will filter SNI URLs
280 Python <2.7.9 will filter SNI URLs
281
281
282 $ hg clone -U http://localhost:$HGPORT sni-unsupported
282 $ hg clone -U http://localhost:$HGPORT sni-unsupported
283 applying clone bundle from http://localhost:$HGPORT1/full.hg
283 applying clone bundle from http://localhost:$HGPORT1/full.hg
284 adding changesets
284 adding changesets
285 adding manifests
285 adding manifests
286 adding file changes
286 adding file changes
287 added 2 changesets with 2 changes to 2 files
287 added 2 changesets with 2 changes to 2 files
288 finished applying clone bundle
288 finished applying clone bundle
289 searching for changes
289 searching for changes
290 no changes found
290 no changes found
291 2 local changesets published
291 2 local changesets published
292 #endif
292 #endif
293
293
294 Stream clone bundles are supported
294 Stream clone bundles are supported
295
295
296 $ hg -R server debugcreatestreamclonebundle packed.hg
296 $ hg -R server debugcreatestreamclonebundle packed.hg
297 writing 613 bytes for 4 files
297 writing 613 bytes for 4 files
298 bundle requirements: generaldelta, revlogv1, sparserevlog
298 bundle requirements: generaldelta, revlogv1, sparserevlog
299
299
300 No bundle spec should work
300 No bundle spec should work
301
301
302 $ cat > server/.hg/clonebundles.manifest << EOF
302 $ cat > server/.hg/clonebundles.manifest << EOF
303 > http://localhost:$HGPORT1/packed.hg
303 > http://localhost:$HGPORT1/packed.hg
304 > EOF
304 > EOF
305
305
306 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
306 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
307 applying clone bundle from http://localhost:$HGPORT1/packed.hg
307 applying clone bundle from http://localhost:$HGPORT1/packed.hg
308 4 files to transfer, 613 bytes of data
308 4 files to transfer, 613 bytes of data
309 transferred 613 bytes in *.* seconds (*) (glob)
309 transferred 613 bytes in *.* seconds (*) (glob)
310 finished applying clone bundle
310 finished applying clone bundle
311 searching for changes
311 searching for changes
312 no changes found
312 no changes found
313
313
314 Bundle spec without parameters should work
314 Bundle spec without parameters should work
315
315
316 $ cat > server/.hg/clonebundles.manifest << EOF
316 $ cat > server/.hg/clonebundles.manifest << EOF
317 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
317 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
318 > EOF
318 > EOF
319
319
320 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
320 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
321 applying clone bundle from http://localhost:$HGPORT1/packed.hg
321 applying clone bundle from http://localhost:$HGPORT1/packed.hg
322 4 files to transfer, 613 bytes of data
322 4 files to transfer, 613 bytes of data
323 transferred 613 bytes in *.* seconds (*) (glob)
323 transferred 613 bytes in *.* seconds (*) (glob)
324 finished applying clone bundle
324 finished applying clone bundle
325 searching for changes
325 searching for changes
326 no changes found
326 no changes found
327
327
328 Bundle spec with format requirements should work
328 Bundle spec with format requirements should work
329
329
330 $ cat > server/.hg/clonebundles.manifest << EOF
330 $ cat > server/.hg/clonebundles.manifest << EOF
331 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
331 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
332 > EOF
332 > EOF
333
333
334 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
334 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
335 applying clone bundle from http://localhost:$HGPORT1/packed.hg
335 applying clone bundle from http://localhost:$HGPORT1/packed.hg
336 4 files to transfer, 613 bytes of data
336 4 files to transfer, 613 bytes of data
337 transferred 613 bytes in *.* seconds (*) (glob)
337 transferred 613 bytes in *.* seconds (*) (glob)
338 finished applying clone bundle
338 finished applying clone bundle
339 searching for changes
339 searching for changes
340 no changes found
340 no changes found
341
341
342 Stream bundle spec with unknown requirements should be filtered out
342 Stream bundle spec with unknown requirements should be filtered out
343
343
344 $ cat > server/.hg/clonebundles.manifest << EOF
344 $ cat > server/.hg/clonebundles.manifest << EOF
345 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
345 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
346 > EOF
346 > EOF
347
347
348 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
348 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
349 no compatible clone bundles available on server; falling back to regular clone
349 no compatible clone bundles available on server; falling back to regular clone
350 (you may want to report this to the server operator)
350 (you may want to report this to the server operator)
351 requesting all changes
351 requesting all changes
352 adding changesets
352 adding changesets
353 adding manifests
353 adding manifests
354 adding file changes
354 adding file changes
355 added 2 changesets with 2 changes to 2 files
355 added 2 changesets with 2 changes to 2 files
356 new changesets 53245c60e682:aaff8d2ffbbf
356 new changesets 53245c60e682:aaff8d2ffbbf
357
357
358 Set up manifest for testing preferences
358 Set up manifest for testing preferences
359 (Remember, the TYPE does not have to match reality - the URL is
359 (Remember, the TYPE does not have to match reality - the URL is
360 important)
360 important)
361
361
362 $ cp full.hg gz-a.hg
362 $ cp full.hg gz-a.hg
363 $ cp full.hg gz-b.hg
363 $ cp full.hg gz-b.hg
364 $ cp full.hg bz2-a.hg
364 $ cp full.hg bz2-a.hg
365 $ cp full.hg bz2-b.hg
365 $ cp full.hg bz2-b.hg
366 $ cat > server/.hg/clonebundles.manifest << EOF
366 $ cat > server/.hg/clonebundles.manifest << EOF
367 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
367 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
368 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
368 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
369 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
369 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
370 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
370 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
371 > EOF
371 > EOF
372
372
373 Preferring an undefined attribute will take first entry
373 Preferring an undefined attribute will take first entry
374
374
375 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
375 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
376 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
376 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
377 adding changesets
377 adding changesets
378 adding manifests
378 adding manifests
379 adding file changes
379 adding file changes
380 added 2 changesets with 2 changes to 2 files
380 added 2 changesets with 2 changes to 2 files
381 finished applying clone bundle
381 finished applying clone bundle
382 searching for changes
382 searching for changes
383 no changes found
383 no changes found
384 2 local changesets published
384 2 local changesets published
385
385
386 Preferring bz2 type will download first entry of that type
386 Preferring bz2 type will download first entry of that type
387
387
388 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
388 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
389 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
389 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
390 adding changesets
390 adding changesets
391 adding manifests
391 adding manifests
392 adding file changes
392 adding file changes
393 added 2 changesets with 2 changes to 2 files
393 added 2 changesets with 2 changes to 2 files
394 finished applying clone bundle
394 finished applying clone bundle
395 searching for changes
395 searching for changes
396 no changes found
396 no changes found
397 2 local changesets published
397 2 local changesets published
398
398
399 Preferring multiple values of an option works
399 Preferring multiple values of an option works
400
400
401 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
401 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
402 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
402 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
403 adding changesets
403 adding changesets
404 adding manifests
404 adding manifests
405 adding file changes
405 adding file changes
406 added 2 changesets with 2 changes to 2 files
406 added 2 changesets with 2 changes to 2 files
407 finished applying clone bundle
407 finished applying clone bundle
408 searching for changes
408 searching for changes
409 no changes found
409 no changes found
410 2 local changesets published
410 2 local changesets published
411
411
412 Sorting multiple values should get us back to original first entry
412 Sorting multiple values should get us back to original first entry
413
413
414 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
414 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
415 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
415 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
416 adding changesets
416 adding changesets
417 adding manifests
417 adding manifests
418 adding file changes
418 adding file changes
419 added 2 changesets with 2 changes to 2 files
419 added 2 changesets with 2 changes to 2 files
420 finished applying clone bundle
420 finished applying clone bundle
421 searching for changes
421 searching for changes
422 no changes found
422 no changes found
423 2 local changesets published
423 2 local changesets published
424
424
425 Preferring multiple attributes has correct order
425 Preferring multiple attributes has correct order
426
426
427 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
427 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
428 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
428 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
429 adding changesets
429 adding changesets
430 adding manifests
430 adding manifests
431 adding file changes
431 adding file changes
432 added 2 changesets with 2 changes to 2 files
432 added 2 changesets with 2 changes to 2 files
433 finished applying clone bundle
433 finished applying clone bundle
434 searching for changes
434 searching for changes
435 no changes found
435 no changes found
436 2 local changesets published
436 2 local changesets published
437
437
438 Test where attribute is missing from some entries
438 Test where attribute is missing from some entries
439
439
440 $ cat > server/.hg/clonebundles.manifest << EOF
440 $ cat > server/.hg/clonebundles.manifest << EOF
441 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
441 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
442 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
442 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
443 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
443 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
444 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
444 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
445 > EOF
445 > EOF
446
446
447 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
447 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
448 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
448 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
449 adding changesets
449 adding changesets
450 adding manifests
450 adding manifests
451 adding file changes
451 adding file changes
452 added 2 changesets with 2 changes to 2 files
452 added 2 changesets with 2 changes to 2 files
453 finished applying clone bundle
453 finished applying clone bundle
454 searching for changes
454 searching for changes
455 no changes found
455 no changes found
456 2 local changesets published
456 2 local changesets published
457
457
458 Test interaction between clone bundles and --stream
458 Test interaction between clone bundles and --stream
459
459
460 A manifest with just a gzip bundle
460 A manifest with just a gzip bundle
461
461
462 $ cat > server/.hg/clonebundles.manifest << EOF
462 $ cat > server/.hg/clonebundles.manifest << EOF
463 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
463 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
464 > EOF
464 > EOF
465
465
466 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip
466 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip
467 no compatible clone bundles available on server; falling back to regular clone
467 no compatible clone bundles available on server; falling back to regular clone
468 (you may want to report this to the server operator)
468 (you may want to report this to the server operator)
469 streaming all changes
469 streaming all changes
470 9 files to transfer, 816 bytes of data
470 9 files to transfer, 816 bytes of data
471 transferred 816 bytes in * seconds (*) (glob)
471 transferred 816 bytes in * seconds (*) (glob)
472
472
473 A manifest with a stream clone but no BUNDLESPEC
473 A manifest with a stream clone but no BUNDLESPEC
474
474
475 $ cat > server/.hg/clonebundles.manifest << EOF
475 $ cat > server/.hg/clonebundles.manifest << EOF
476 > http://localhost:$HGPORT1/packed.hg
476 > http://localhost:$HGPORT1/packed.hg
477 > EOF
477 > EOF
478
478
479 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-no-bundlespec
479 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-no-bundlespec
480 no compatible clone bundles available on server; falling back to regular clone
480 no compatible clone bundles available on server; falling back to regular clone
481 (you may want to report this to the server operator)
481 (you may want to report this to the server operator)
482 streaming all changes
482 streaming all changes
483 9 files to transfer, 816 bytes of data
483 9 files to transfer, 816 bytes of data
484 transferred 816 bytes in * seconds (*) (glob)
484 transferred 816 bytes in * seconds (*) (glob)
485
485
486 A manifest with a gzip bundle and a stream clone
486 A manifest with a gzip bundle and a stream clone
487
487
488 $ cat > server/.hg/clonebundles.manifest << EOF
488 $ cat > server/.hg/clonebundles.manifest << EOF
489 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
489 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
490 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
490 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
491 > EOF
491 > EOF
492
492
493 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed
493 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed
494 applying clone bundle from http://localhost:$HGPORT1/packed.hg
494 applying clone bundle from http://localhost:$HGPORT1/packed.hg
495 4 files to transfer, 613 bytes of data
495 4 files to transfer, 613 bytes of data
496 transferred 613 bytes in * seconds (*) (glob)
496 transferred 613 bytes in * seconds (*) (glob)
497 finished applying clone bundle
497 finished applying clone bundle
498 searching for changes
498 searching for changes
499 no changes found
499 no changes found
500
500
501 A manifest with a gzip bundle and stream clone with supported requirements
501 A manifest with a gzip bundle and stream clone with supported requirements
502
502
503 $ cat > server/.hg/clonebundles.manifest << EOF
503 $ cat > server/.hg/clonebundles.manifest << EOF
504 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
504 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
505 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
505 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
506 > EOF
506 > EOF
507
507
508 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-requirements
508 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-requirements
509 applying clone bundle from http://localhost:$HGPORT1/packed.hg
509 applying clone bundle from http://localhost:$HGPORT1/packed.hg
510 4 files to transfer, 613 bytes of data
510 4 files to transfer, 613 bytes of data
511 transferred 613 bytes in * seconds (*) (glob)
511 transferred 613 bytes in * seconds (*) (glob)
512 finished applying clone bundle
512 finished applying clone bundle
513 searching for changes
513 searching for changes
514 no changes found
514 no changes found
515
515
516 A manifest with a gzip bundle and a stream clone with unsupported requirements
516 A manifest with a gzip bundle and a stream clone with unsupported requirements
517
517
518 $ cat > server/.hg/clonebundles.manifest << EOF
518 $ cat > server/.hg/clonebundles.manifest << EOF
519 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
519 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
520 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
520 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
521 > EOF
521 > EOF
522
522
523 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
523 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
524 no compatible clone bundles available on server; falling back to regular clone
524 no compatible clone bundles available on server; falling back to regular clone
525 (you may want to report this to the server operator)
525 (you may want to report this to the server operator)
526 streaming all changes
526 streaming all changes
527 9 files to transfer, 816 bytes of data
527 9 files to transfer, 816 bytes of data
528 transferred 816 bytes in * seconds (*) (glob)
528 transferred 816 bytes in * seconds (*) (glob)
529
529
530 Test clone bundle retrieved through bundle2
530 Test clone bundle retrieved through bundle2
531
531
532 $ cat << EOF >> $HGRCPATH
532 $ cat << EOF >> $HGRCPATH
533 > [extensions]
533 > [extensions]
534 > largefiles=
534 > largefiles=
535 > EOF
535 > EOF
536 $ killdaemons.py
536 $ killdaemons.py
537 $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
537 $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
538 $ cat hg.pid >> $DAEMON_PIDS
538 $ cat hg.pid >> $DAEMON_PIDS
539
539
540 $ hg -R server debuglfput gz-a.hg
540 $ hg -R server debuglfput gz-a.hg
541 1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
541 1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
542
542
543 $ cat > server/.hg/clonebundles.manifest << EOF
543 $ cat > server/.hg/clonebundles.manifest << EOF
544 > largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae BUNDLESPEC=gzip-v2
544 > largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae BUNDLESPEC=gzip-v2
545 > EOF
545 > EOF
546
546
547 $ hg clone -U http://localhost:$HGPORT largefile-provided --traceback
547 $ hg clone -U http://localhost:$HGPORT largefile-provided --traceback
548 applying clone bundle from largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
548 applying clone bundle from largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
549 adding changesets
549 adding changesets
550 adding manifests
550 adding manifests
551 adding file changes
551 adding file changes
552 added 2 changesets with 2 changes to 2 files
552 added 2 changesets with 2 changes to 2 files
553 finished applying clone bundle
553 finished applying clone bundle
554 searching for changes
554 searching for changes
555 no changes found
555 no changes found
556 2 local changesets published
556 2 local changesets published
@@ -1,122 +1,122 b''
1 #require serve
1 #require serve
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ echo a > a
5 $ echo a > a
6 $ hg ci -Ama -d '1123456789 0'
6 $ hg ci -Ama -d '1123456789 0'
7 adding a
7 adding a
8 $ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid
8 $ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid
9 $ cat hg.pid >> $DAEMON_PIDS
9 $ cat hg.pid >> $DAEMON_PIDS
10 $ cd ..
10 $ cd ..
11 $ tinyproxy.py $HGPORT1 localhost 2>proxy.log >/dev/null </dev/null &
11 $ tinyproxy.py $HGPORT1 localhost 2>proxy.log >/dev/null </dev/null &
12 $ while [ ! -f proxy.pid ]; do sleep 0; done
12 $ while [ ! -f proxy.pid ]; do sleep 0; done
13 $ cat proxy.pid >> $DAEMON_PIDS
13 $ cat proxy.pid >> $DAEMON_PIDS
14
14
15 url for proxy, stream
15 url for proxy, stream
16
16
17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --stream http://localhost:$HGPORT/ b
17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --stream http://localhost:$HGPORT/ b
18 streaming all changes
18 streaming all changes
19 6 files to transfer, 412 bytes of data (reporevlogstore !)
19 6 files to transfer, 412 bytes of data (reporevlogstore !)
20 4 files to transfer, 330 bytes of data (reposimplestore !)
20 4 files to transfer, 330 bytes of data (reposimplestore !)
21 transferred * bytes in * seconds (*/sec) (glob)
21 transferred * bytes in * seconds (*/sec) (glob)
22 updating to branch default
22 updating to branch default
23 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
24 $ cd b
24 $ cd b
25 $ hg verify
25 $ hg verify
26 checking changesets
26 checking changesets
27 checking manifests
27 checking manifests
28 crosschecking files in changesets and manifests
28 crosschecking files in changesets and manifests
29 checking files
29 checking files
30 checked 1 changesets with 1 changes to 1 files
30 checked 1 changesets with 1 changes to 1 files
31 $ cd ..
31 $ cd ..
32
32
33 url for proxy, pull
33 url for proxy, pull
34
34
35 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
35 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
36 requesting all changes
36 requesting all changes
37 adding changesets
37 adding changesets
38 adding manifests
38 adding manifests
39 adding file changes
39 adding file changes
40 added 1 changesets with 1 changes to 1 files
40 added 1 changesets with 1 changes to 1 files
41 new changesets 83180e7845de
41 new changesets 83180e7845de
42 updating to branch default
42 updating to branch default
43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
44 $ cd b-pull
44 $ cd b-pull
45 $ hg verify
45 $ hg verify
46 checking changesets
46 checking changesets
47 checking manifests
47 checking manifests
48 crosschecking files in changesets and manifests
48 crosschecking files in changesets and manifests
49 checking files
49 checking files
50 checked 1 changesets with 1 changes to 1 files
50 checked 1 changesets with 1 changes to 1 files
51 $ cd ..
51 $ cd ..
52
52
53 host:port for proxy
53 host:port for proxy
54
54
55 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
55 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
56 requesting all changes
56 requesting all changes
57 adding changesets
57 adding changesets
58 adding manifests
58 adding manifests
59 adding file changes
59 adding file changes
60 added 1 changesets with 1 changes to 1 files
60 added 1 changesets with 1 changes to 1 files
61 new changesets 83180e7845de
61 new changesets 83180e7845de
62 updating to branch default
62 updating to branch default
63 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
64
64
65 proxy url with user name and password
65 proxy url with user name and password
66
66
67 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
67 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
68 requesting all changes
68 requesting all changes
69 adding changesets
69 adding changesets
70 adding manifests
70 adding manifests
71 adding file changes
71 adding file changes
72 added 1 changesets with 1 changes to 1 files
72 added 1 changesets with 1 changes to 1 files
73 new changesets 83180e7845de
73 new changesets 83180e7845de
74 updating to branch default
74 updating to branch default
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76
76
77 url with user name and password
77 url with user name and password
78
78
79 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
79 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
80 requesting all changes
80 requesting all changes
81 adding changesets
81 adding changesets
82 adding manifests
82 adding manifests
83 adding file changes
83 adding file changes
84 added 1 changesets with 1 changes to 1 files
84 added 1 changesets with 1 changes to 1 files
85 new changesets 83180e7845de
85 new changesets 83180e7845de
86 updating to branch default
86 updating to branch default
87 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
88
88
89 bad host:port for proxy ("Protocol not supported" can happen on
89 bad host:port for proxy ("Protocol not supported" can happen on
90 misconfigured hosts)
90 misconfigured hosts)
91
91
92 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
92 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
93 abort: error: (Connection refused|Protocol not supported|.* actively refused it|\$EADDRNOTAVAIL\$) (re)
93 abort: error: (Connection refused|Protocol not supported|.* actively refused it|\$EADDRNOTAVAIL\$|No route to host) (re)
94 [255]
94 [255]
95
95
96 do not use the proxy if it is in the no list
96 do not use the proxy if it is in the no list
97
97
98 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
98 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
99 requesting all changes
99 requesting all changes
100 adding changesets
100 adding changesets
101 adding manifests
101 adding manifests
102 adding file changes
102 adding file changes
103 added 1 changesets with 1 changes to 1 files
103 added 1 changesets with 1 changes to 1 files
104 new changesets 83180e7845de
104 new changesets 83180e7845de
105 updating to branch default
105 updating to branch default
106 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
106 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 $ cat proxy.log
107 $ cat proxy.log
108 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
108 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
109 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
109 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
110 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&stream=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
110 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&stream=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
111 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
111 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
112 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
112 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
113 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
113 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
115 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
115 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
116 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
116 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
118 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
118 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
119 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
119 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
120 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
120 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
121 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
121 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
122 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
122 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
@@ -1,505 +1,506 b''
1 #require serve no-reposimplestore no-chg
1 #require serve no-reposimplestore no-chg
2
2
3 $ cat >> $HGRCPATH <<EOF
3 $ cat >> $HGRCPATH <<EOF
4 > [extensions]
4 > [extensions]
5 > lfs=
5 > lfs=
6 > [lfs]
6 > [lfs]
7 > track=all()
7 > track=all()
8 > [web]
8 > [web]
9 > push_ssl = False
9 > push_ssl = False
10 > allow-push = *
10 > allow-push = *
11 > EOF
11 > EOF
12
12
13 Serving LFS files can experimentally be turned off. The long term solution is
13 Serving LFS files can experimentally be turned off. The long term solution is
14 to support the 'verify' action in both client and server, so that the server can
14 to support the 'verify' action in both client and server, so that the server can
15 tell the client to store files elsewhere.
15 tell the client to store files elsewhere.
16
16
17 $ hg init server
17 $ hg init server
18 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
18 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
19 > --config experimental.lfs.serve=False -R server serve -d \
19 > --config experimental.lfs.serve=False -R server serve -d \
20 > -p $HGPORT --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
20 > -p $HGPORT --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
21 $ cat hg.pid >> $DAEMON_PIDS
21 $ cat hg.pid >> $DAEMON_PIDS
22
22
23 Uploads fail...
23 Uploads fail...
24
24
25 $ hg init client
25 $ hg init client
26 $ echo 'this-is-an-lfs-file' > client/lfs.bin
26 $ echo 'this-is-an-lfs-file' > client/lfs.bin
27 $ hg -R client ci -Am 'initial commit'
27 $ hg -R client ci -Am 'initial commit'
28 adding lfs.bin
28 adding lfs.bin
29 $ hg -R client push http://localhost:$HGPORT
29 $ hg -R client push http://localhost:$HGPORT
30 pushing to http://localhost:$HGPORT/
30 pushing to http://localhost:$HGPORT/
31 searching for changes
31 searching for changes
32 abort: LFS HTTP error: HTTP Error 400: no such method: .git!
32 abort: LFS HTTP error: HTTP Error 400: no such method: .git!
33 (check that lfs serving is enabled on http://localhost:$HGPORT/.git/info/lfs and "upload" is supported)
33 (check that lfs serving is enabled on http://localhost:$HGPORT/.git/info/lfs and "upload" is supported)
34 [255]
34 [255]
35
35
36 ... so do a local push to make the data available. Remove the blob from the
36 ... so do a local push to make the data available. Remove the blob from the
37 default cache, so it attempts to download.
37 default cache, so it attempts to download.
38 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
38 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
39 > --config "lfs.url=null://" \
39 > --config "lfs.url=null://" \
40 > -R client push -q server
40 > -R client push -q server
41 $ mv `hg config lfs.usercache` $TESTTMP/servercache
41 $ mv `hg config lfs.usercache` $TESTTMP/servercache
42
42
43 Downloads fail...
43 Downloads fail...
44
44
45 $ hg clone http://localhost:$HGPORT httpclone
45 $ hg clone http://localhost:$HGPORT httpclone
46 (remote is using large file support (lfs); lfs will be enabled for this repository)
46 (remote is using large file support (lfs); lfs will be enabled for this repository)
47 requesting all changes
47 requesting all changes
48 adding changesets
48 adding changesets
49 adding manifests
49 adding manifests
50 adding file changes
50 adding file changes
51 added 1 changesets with 1 changes to 1 files
51 added 1 changesets with 1 changes to 1 files
52 new changesets 525251863cad
52 new changesets 525251863cad
53 updating to branch default
53 updating to branch default
54 abort: LFS HTTP error: HTTP Error 400: no such method: .git!
54 abort: LFS HTTP error: HTTP Error 400: no such method: .git!
55 (check that lfs serving is enabled on http://localhost:$HGPORT/.git/info/lfs and "download" is supported)
55 (check that lfs serving is enabled on http://localhost:$HGPORT/.git/info/lfs and "download" is supported)
56 [255]
56 [255]
57
57
58 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
58 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
59
59
60 $ cat $TESTTMP/access.log $TESTTMP/errors.log
60 $ cat $TESTTMP/access.log $TESTTMP/errors.log
61 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
61 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
62 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
62 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
63 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
63 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
64 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
64 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
65 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
65 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
66 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
66 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
67 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
67 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
68 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
68 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
69 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
69 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
70
70
71 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
71 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
72 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R server serve -d \
72 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R server serve -d \
73 > -p $HGPORT --pid-file=hg.pid --prefix=subdir/mount/point \
73 > -p $HGPORT --pid-file=hg.pid --prefix=subdir/mount/point \
74 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
74 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
75 $ cat hg.pid >> $DAEMON_PIDS
75 $ cat hg.pid >> $DAEMON_PIDS
76
76
77 Reasonable hint for a misconfigured blob server
77 Reasonable hint for a misconfigured blob server
78
78
79 $ hg -R httpclone update default --config lfs.url=http://localhost:$HGPORT/missing
79 $ hg -R httpclone update default --config lfs.url=http://localhost:$HGPORT/missing
80 abort: LFS HTTP error: HTTP Error 404: Not Found!
80 abort: LFS HTTP error: HTTP Error 404: Not Found!
81 (the "lfs.url" config may be used to override http://localhost:$HGPORT/missing)
81 (the "lfs.url" config may be used to override http://localhost:$HGPORT/missing)
82 [255]
82 [255]
83
83
84 $ hg -R httpclone update default --config lfs.url=http://localhost:$HGPORT2/missing
84 $ hg -R httpclone update default --config lfs.url=http://localhost:$HGPORT2/missing
85 abort: LFS error: *onnection *refused*! (glob) (?)
85 abort: LFS error: *onnection *refused*! (glob) (?)
86 abort: LFS error: $EADDRNOTAVAIL$! (glob) (?)
86 abort: LFS error: $EADDRNOTAVAIL$! (glob) (?)
87 abort: LFS error: No route to host! (?)
87 (the "lfs.url" config may be used to override http://localhost:$HGPORT2/missing)
88 (the "lfs.url" config may be used to override http://localhost:$HGPORT2/missing)
88 [255]
89 [255]
89
90
90 Blob URIs are correct when --prefix is used
91 Blob URIs are correct when --prefix is used
91
92
92 $ hg clone --debug http://localhost:$HGPORT/subdir/mount/point cloned2
93 $ hg clone --debug http://localhost:$HGPORT/subdir/mount/point cloned2
93 using http://localhost:$HGPORT/subdir/mount/point
94 using http://localhost:$HGPORT/subdir/mount/point
94 sending capabilities command
95 sending capabilities command
95 (remote is using large file support (lfs); lfs will be enabled for this repository)
96 (remote is using large file support (lfs); lfs will be enabled for this repository)
96 query 1; heads
97 query 1; heads
97 sending batch command
98 sending batch command
98 requesting all changes
99 requesting all changes
99 sending getbundle command
100 sending getbundle command
100 bundle2-input-bundle: with-transaction
101 bundle2-input-bundle: with-transaction
101 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
102 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
102 adding changesets
103 adding changesets
103 add changeset 525251863cad
104 add changeset 525251863cad
104 adding manifests
105 adding manifests
105 adding file changes
106 adding file changes
106 adding lfs.bin revisions
107 adding lfs.bin revisions
107 added 1 changesets with 1 changes to 1 files
108 added 1 changesets with 1 changes to 1 files
108 bundle2-input-part: total payload size 648
109 bundle2-input-part: total payload size 648
109 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
110 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
110 bundle2-input-part: "phase-heads" supported
111 bundle2-input-part: "phase-heads" supported
111 bundle2-input-part: total payload size 24
112 bundle2-input-part: total payload size 24
112 bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
113 bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
113 bundle2-input-part: total payload size 39
114 bundle2-input-part: total payload size 39
114 bundle2-input-bundle: 3 parts total
115 bundle2-input-bundle: 3 parts total
115 checking for updated bookmarks
116 checking for updated bookmarks
116 updating the branch cache
117 updating the branch cache
117 new changesets 525251863cad
118 new changesets 525251863cad
118 updating to branch default
119 updating to branch default
119 resolving manifests
120 resolving manifests
120 branchmerge: False, force: False, partial: False
121 branchmerge: False, force: False, partial: False
121 ancestor: 000000000000, local: 000000000000+, remote: 525251863cad
122 ancestor: 000000000000, local: 000000000000+, remote: 525251863cad
122 lfs: assuming remote store: http://localhost:$HGPORT/subdir/mount/point/.git/info/lfs
123 lfs: assuming remote store: http://localhost:$HGPORT/subdir/mount/point/.git/info/lfs
123 Status: 200
124 Status: 200
124 Content-Length: 371
125 Content-Length: 371
125 Content-Type: application/vnd.git-lfs+json
126 Content-Type: application/vnd.git-lfs+json
126 Date: $HTTP_DATE$
127 Date: $HTTP_DATE$
127 Server: testing stub value
128 Server: testing stub value
128 {
129 {
129 "objects": [
130 "objects": [
130 {
131 {
131 "actions": {
132 "actions": {
132 "download": {
133 "download": {
133 "expires_at": "$ISO_8601_DATE_TIME$"
134 "expires_at": "$ISO_8601_DATE_TIME$"
134 "header": {
135 "header": {
135 "Accept": "application/vnd.git-lfs"
136 "Accept": "application/vnd.git-lfs"
136 }
137 }
137 "href": "http://localhost:$HGPORT/subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
138 "href": "http://localhost:$HGPORT/subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
138 }
139 }
139 }
140 }
140 "oid": "f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
141 "oid": "f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
141 "size": 20
142 "size": 20
142 }
143 }
143 ]
144 ]
144 "transfer": "basic"
145 "transfer": "basic"
145 }
146 }
146 lfs: downloading f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e (20 bytes)
147 lfs: downloading f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e (20 bytes)
147 Status: 200
148 Status: 200
148 Content-Length: 20
149 Content-Length: 20
149 Content-Type: application/octet-stream
150 Content-Type: application/octet-stream
150 Date: $HTTP_DATE$
151 Date: $HTTP_DATE$
151 Server: testing stub value
152 Server: testing stub value
152 lfs: adding f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e to the usercache
153 lfs: adding f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e to the usercache
153 lfs: processed: f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e
154 lfs: processed: f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e
154 lfs: downloaded 1 files (20 bytes)
155 lfs: downloaded 1 files (20 bytes)
155 lfs.bin: remote created -> g
156 lfs.bin: remote created -> g
156 getting lfs.bin
157 getting lfs.bin
157 lfs: found f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e in the local lfs store
158 lfs: found f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e in the local lfs store
158 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
160 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
160
161
161 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
162 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
162
163
163 $ cat $TESTTMP/access.log $TESTTMP/errors.log
164 $ cat $TESTTMP/access.log $TESTTMP/errors.log
164 $LOCALIP - - [$LOGDATE$] "POST /missing/objects/batch HTTP/1.1" 404 - (glob)
165 $LOCALIP - - [$LOGDATE$] "POST /missing/objects/batch HTTP/1.1" 404 - (glob)
165 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob)
166 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob)
166 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
167 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
167 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
168 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
168 $LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
169 $LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
169 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob)
170 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob)
170
171
171 Blobs that already exist in the usercache are linked into the repo store, even
172 Blobs that already exist in the usercache are linked into the repo store, even
172 though the client doesn't send the blob.
173 though the client doesn't send the blob.
173
174
174 $ hg init server2
175 $ hg init server2
175 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R server2 serve -d \
176 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R server2 serve -d \
176 > -p $HGPORT --pid-file=hg.pid \
177 > -p $HGPORT --pid-file=hg.pid \
177 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
178 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
178 $ cat hg.pid >> $DAEMON_PIDS
179 $ cat hg.pid >> $DAEMON_PIDS
179
180
180 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R cloned2 --debug \
181 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R cloned2 --debug \
181 > push http://localhost:$HGPORT | grep '^[{} ]'
182 > push http://localhost:$HGPORT | grep '^[{} ]'
182 {
183 {
183 "objects": [
184 "objects": [
184 {
185 {
185 "oid": "f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
186 "oid": "f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
186 "size": 20
187 "size": 20
187 }
188 }
188 ]
189 ]
189 "transfer": "basic"
190 "transfer": "basic"
190 }
191 }
191 $ find server2/.hg/store/lfs/objects | sort
192 $ find server2/.hg/store/lfs/objects | sort
192 server2/.hg/store/lfs/objects
193 server2/.hg/store/lfs/objects
193 server2/.hg/store/lfs/objects/f0
194 server2/.hg/store/lfs/objects/f0
194 server2/.hg/store/lfs/objects/f0/3217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e
195 server2/.hg/store/lfs/objects/f0/3217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e
195 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
196 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
196 $ cat $TESTTMP/errors.log
197 $ cat $TESTTMP/errors.log
197
198
198 $ cat >> $TESTTMP/lfsstoreerror.py <<EOF
199 $ cat >> $TESTTMP/lfsstoreerror.py <<EOF
199 > import errno
200 > import errno
200 > from hgext.lfs import blobstore
201 > from hgext.lfs import blobstore
201 >
202 >
202 > _numverifies = 0
203 > _numverifies = 0
203 > _readerr = True
204 > _readerr = True
204 >
205 >
205 > def reposetup(ui, repo):
206 > def reposetup(ui, repo):
206 > # Nothing to do with a remote repo
207 > # Nothing to do with a remote repo
207 > if not repo.local():
208 > if not repo.local():
208 > return
209 > return
209 >
210 >
210 > store = repo.svfs.lfslocalblobstore
211 > store = repo.svfs.lfslocalblobstore
211 > class badstore(store.__class__):
212 > class badstore(store.__class__):
212 > def download(self, oid, src):
213 > def download(self, oid, src):
213 > '''Called in the server to handle reading from the client in a
214 > '''Called in the server to handle reading from the client in a
214 > PUT request.'''
215 > PUT request.'''
215 > origread = src.read
216 > origread = src.read
216 > def _badread(nbytes):
217 > def _badread(nbytes):
217 > # Simulate bad data/checksum failure from the client
218 > # Simulate bad data/checksum failure from the client
218 > return b'0' * len(origread(nbytes))
219 > return b'0' * len(origread(nbytes))
219 > src.read = _badread
220 > src.read = _badread
220 > super(badstore, self).download(oid, src)
221 > super(badstore, self).download(oid, src)
221 >
222 >
222 > def _read(self, vfs, oid, verify):
223 > def _read(self, vfs, oid, verify):
223 > '''Called in the server to read data for a GET request, and then
224 > '''Called in the server to read data for a GET request, and then
224 > calls self._verify() on it before returning.'''
225 > calls self._verify() on it before returning.'''
225 > global _readerr
226 > global _readerr
226 > # One time simulation of a read error
227 > # One time simulation of a read error
227 > if _readerr:
228 > if _readerr:
228 > _readerr = False
229 > _readerr = False
229 > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
230 > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
230 > # Simulate corrupt content on client download
231 > # Simulate corrupt content on client download
231 > blobstore._verify(oid, b'dummy content')
232 > blobstore._verify(oid, b'dummy content')
232 >
233 >
233 > def verify(self, oid):
234 > def verify(self, oid):
234 > '''Called in the server to populate the Batch API response,
235 > '''Called in the server to populate the Batch API response,
235 > letting the client re-upload if the file is corrupt.'''
236 > letting the client re-upload if the file is corrupt.'''
236 > # Fail verify in Batch API for one clone command and one push
237 > # Fail verify in Batch API for one clone command and one push
237 > # command with an IOError. Then let it through to access other
238 > # command with an IOError. Then let it through to access other
238 > # functions. Checksum failure is tested elsewhere.
239 > # functions. Checksum failure is tested elsewhere.
239 > global _numverifies
240 > global _numverifies
240 > _numverifies += 1
241 > _numverifies += 1
241 > if _numverifies <= 2:
242 > if _numverifies <= 2:
242 > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
243 > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
243 > return super(badstore, self).verify(oid)
244 > return super(badstore, self).verify(oid)
244 >
245 >
245 > store.__class__ = badstore
246 > store.__class__ = badstore
246 > EOF
247 > EOF
247
248
248 $ rm -rf `hg config lfs.usercache`
249 $ rm -rf `hg config lfs.usercache`
249 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
250 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
250 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
251 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
251 > --config extensions.lfsstoreerror=$TESTTMP/lfsstoreerror.py \
252 > --config extensions.lfsstoreerror=$TESTTMP/lfsstoreerror.py \
252 > -R server serve -d \
253 > -R server serve -d \
253 > -p $HGPORT1 --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
254 > -p $HGPORT1 --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
254 $ cat hg.pid >> $DAEMON_PIDS
255 $ cat hg.pid >> $DAEMON_PIDS
255
256
256 Test an I/O error in localstore.verify() (Batch API) with GET
257 Test an I/O error in localstore.verify() (Batch API) with GET
257
258
258 $ hg clone http://localhost:$HGPORT1 httpclone2
259 $ hg clone http://localhost:$HGPORT1 httpclone2
259 (remote is using large file support (lfs); lfs will be enabled for this repository)
260 (remote is using large file support (lfs); lfs will be enabled for this repository)
260 requesting all changes
261 requesting all changes
261 adding changesets
262 adding changesets
262 adding manifests
263 adding manifests
263 adding file changes
264 adding file changes
264 added 1 changesets with 1 changes to 1 files
265 added 1 changesets with 1 changes to 1 files
265 new changesets 525251863cad
266 new changesets 525251863cad
266 updating to branch default
267 updating to branch default
267 abort: LFS server error for "lfs.bin": Internal server error!
268 abort: LFS server error for "lfs.bin": Internal server error!
268 [255]
269 [255]
269
270
270 Test an I/O error in localstore.verify() (Batch API) with PUT
271 Test an I/O error in localstore.verify() (Batch API) with PUT
271
272
272 $ echo foo > client/lfs.bin
273 $ echo foo > client/lfs.bin
273 $ hg -R client ci -m 'mod lfs'
274 $ hg -R client ci -m 'mod lfs'
274 $ hg -R client push http://localhost:$HGPORT1
275 $ hg -R client push http://localhost:$HGPORT1
275 pushing to http://localhost:$HGPORT1/
276 pushing to http://localhost:$HGPORT1/
276 searching for changes
277 searching for changes
277 abort: LFS server error for "unknown": Internal server error!
278 abort: LFS server error for "unknown": Internal server error!
278 [255]
279 [255]
279 TODO: figure out how to associate the file name in the error above
280 TODO: figure out how to associate the file name in the error above
280
281
281 Test a bad checksum sent by the client in the transfer API
282 Test a bad checksum sent by the client in the transfer API
282
283
283 $ hg -R client push http://localhost:$HGPORT1
284 $ hg -R client push http://localhost:$HGPORT1
284 pushing to http://localhost:$HGPORT1/
285 pushing to http://localhost:$HGPORT1/
285 searching for changes
286 searching for changes
286 abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c, action=upload)!
287 abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c, action=upload)!
287 [255]
288 [255]
288
289
289 $ echo 'test lfs file' > server/lfs3.bin
290 $ echo 'test lfs file' > server/lfs3.bin
290 $ hg --config experimental.lfs.disableusercache=True \
291 $ hg --config experimental.lfs.disableusercache=True \
291 > -R server ci -Aqm 'another lfs file'
292 > -R server ci -Aqm 'another lfs file'
292 $ hg -R client pull -q http://localhost:$HGPORT1
293 $ hg -R client pull -q http://localhost:$HGPORT1
293
294
294 Test an I/O error during the processing of the GET request
295 Test an I/O error during the processing of the GET request
295
296
296 $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
297 $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
297 > -R client update -r tip
298 > -R client update -r tip
298 abort: LFS HTTP error: HTTP Error 500: Internal Server Error (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
299 abort: LFS HTTP error: HTTP Error 500: Internal Server Error (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
299 [255]
300 [255]
300
301
301 Test a checksum failure during the processing of the GET request
302 Test a checksum failure during the processing of the GET request
302
303
303 $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
304 $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
304 > -R client update -r tip
305 > -R client update -r tip
305 abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
306 abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
306 [255]
307 [255]
307
308
308 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
309 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
309
310
310 $ cat $TESTTMP/access.log
311 $ cat $TESTTMP/access.log
311 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
312 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
312 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
313 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
313 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
314 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
314 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
315 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
315 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
316 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
316 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
317 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
317 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
318 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
318 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
319 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
319 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
320 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
320 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
321 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
321 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
322 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
322 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
323 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
323 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
324 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
324 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
325 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
325 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
326 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
326 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
327 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
327 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
328 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
328 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
329 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
329 $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 422 - (glob)
330 $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 422 - (glob)
330 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
331 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
331 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
332 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
332 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
333 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
333 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
334 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
334 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob)
335 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob)
335 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
336 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
336 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 422 - (glob)
337 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 422 - (glob)
337
338
338 $ grep -v ' File "' $TESTTMP/errors.log
339 $ grep -v ' File "' $TESTTMP/errors.log
339 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
340 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
340 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
341 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
341 $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob)
342 $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob)
342 $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob)
343 $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob)
343 $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob)
344 $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob)
344 $LOCALIP - - [$ERRDATE$] HG error: (glob)
345 $LOCALIP - - [$ERRDATE$] HG error: (glob)
345 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
346 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
346 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
347 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
347 $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob)
348 $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob)
348 $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob)
349 $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob)
349 $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob)
350 $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob)
350 $LOCALIP - - [$ERRDATE$] HG error: (glob)
351 $LOCALIP - - [$ERRDATE$] HG error: (glob)
351 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c': (glob)
352 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c': (glob)
352 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
353 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
353 $LOCALIP - - [$ERRDATE$] HG error: localstore.download(oid, req.bodyfh) (glob)
354 $LOCALIP - - [$ERRDATE$] HG error: localstore.download(oid, req.bodyfh) (glob)
354 $LOCALIP - - [$ERRDATE$] HG error: super(badstore, self).download(oid, src) (glob)
355 $LOCALIP - - [$ERRDATE$] HG error: super(badstore, self).download(oid, src) (glob)
355 $LOCALIP - - [$ERRDATE$] HG error: % oid) (glob)
356 $LOCALIP - - [$ERRDATE$] HG error: % oid) (glob)
356 $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: corrupt remote lfs object: b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c (glob)
357 $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: corrupt remote lfs object: b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c (glob)
357 $LOCALIP - - [$ERRDATE$] HG error: (glob)
358 $LOCALIP - - [$ERRDATE$] HG error: (glob)
358 $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
359 $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
359 Traceback (most recent call last):
360 Traceback (most recent call last):
360 self.do_write()
361 self.do_write()
361 self.do_hgweb()
362 self.do_hgweb()
362 for chunk in self.server.application(env, self._start_response):
363 for chunk in self.server.application(env, self._start_response):
363 for r in self._runwsgi(req, res, repo):
364 for r in self._runwsgi(req, res, repo):
364 rctx, req, res, self.check_perm)
365 rctx, req, res, self.check_perm)
365 return func(*(args + a), **kw) (no-py3 !)
366 return func(*(args + a), **kw) (no-py3 !)
366 lambda perm:
367 lambda perm:
367 res.setbodybytes(localstore.read(oid))
368 res.setbodybytes(localstore.read(oid))
368 blob = self._read(self.vfs, oid, verify)
369 blob = self._read(self.vfs, oid, verify)
369 raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
370 raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
370 *Error: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error (glob)
371 *Error: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error (glob)
371
372
372 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
373 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
373 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
374 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
374 $LOCALIP - - [$ERRDATE$] HG error: res.setbodybytes(localstore.read(oid)) (glob)
375 $LOCALIP - - [$ERRDATE$] HG error: res.setbodybytes(localstore.read(oid)) (glob)
375 $LOCALIP - - [$ERRDATE$] HG error: blob = self._read(self.vfs, oid, verify) (glob)
376 $LOCALIP - - [$ERRDATE$] HG error: blob = self._read(self.vfs, oid, verify) (glob)
376 $LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, b'dummy content') (glob)
377 $LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, b'dummy content') (glob)
377 $LOCALIP - - [$ERRDATE$] HG error: hint=_(b'run hg verify')) (glob)
378 $LOCALIP - - [$ERRDATE$] HG error: hint=_(b'run hg verify')) (glob)
378 $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (glob)
379 $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (glob)
379 $LOCALIP - - [$ERRDATE$] HG error: (glob)
380 $LOCALIP - - [$ERRDATE$] HG error: (glob)
380
381
381 Basic Authorization headers are returned by the Batch API, and sent back with
382 Basic Authorization headers are returned by the Batch API, and sent back with
382 the GET/PUT request.
383 the GET/PUT request.
383
384
384 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
385 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
385
386
386 $ cat >> $HGRCPATH << EOF
387 $ cat >> $HGRCPATH << EOF
387 > [experimental]
388 > [experimental]
388 > lfs.disableusercache = True
389 > lfs.disableusercache = True
389 > [auth]
390 > [auth]
390 > l.schemes=http
391 > l.schemes=http
391 > l.prefix=lo
392 > l.prefix=lo
392 > l.username=user
393 > l.username=user
393 > l.password=pass
394 > l.password=pass
394 > EOF
395 > EOF
395
396
396 $ hg --config extensions.x=$TESTDIR/httpserverauth.py \
397 $ hg --config extensions.x=$TESTDIR/httpserverauth.py \
397 > -R server serve -d -p $HGPORT1 --pid-file=hg.pid \
398 > -R server serve -d -p $HGPORT1 --pid-file=hg.pid \
398 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
399 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
399 $ mv hg.pid $DAEMON_PIDS
400 $ mv hg.pid $DAEMON_PIDS
400
401
401 $ hg clone --debug http://localhost:$HGPORT1 auth_clone | egrep '^[{}]| '
402 $ hg clone --debug http://localhost:$HGPORT1 auth_clone | egrep '^[{}]| '
402 {
403 {
403 "objects": [
404 "objects": [
404 {
405 {
405 "actions": {
406 "actions": {
406 "download": {
407 "download": {
407 "expires_at": "$ISO_8601_DATE_TIME$"
408 "expires_at": "$ISO_8601_DATE_TIME$"
408 "header": {
409 "header": {
409 "Accept": "application/vnd.git-lfs"
410 "Accept": "application/vnd.git-lfs"
410 "Authorization": "Basic dXNlcjpwYXNz"
411 "Authorization": "Basic dXNlcjpwYXNz"
411 }
412 }
412 "href": "http://localhost:$HGPORT1/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d"
413 "href": "http://localhost:$HGPORT1/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d"
413 }
414 }
414 }
415 }
415 "oid": "276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d"
416 "oid": "276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d"
416 "size": 14
417 "size": 14
417 }
418 }
418 ]
419 ]
419 "transfer": "basic"
420 "transfer": "basic"
420 }
421 }
421
422
422 $ echo 'another blob' > auth_clone/lfs.blob
423 $ echo 'another blob' > auth_clone/lfs.blob
423 $ hg -R auth_clone ci -Aqm 'add blob'
424 $ hg -R auth_clone ci -Aqm 'add blob'
424
425
425 $ cat > use_digests.py << EOF
426 $ cat > use_digests.py << EOF
426 > from mercurial import (
427 > from mercurial import (
427 > exthelper,
428 > exthelper,
428 > url,
429 > url,
429 > )
430 > )
430 >
431 >
431 > eh = exthelper.exthelper()
432 > eh = exthelper.exthelper()
432 > uisetup = eh.finaluisetup
433 > uisetup = eh.finaluisetup
433 >
434 >
434 > @eh.wrapfunction(url, 'opener')
435 > @eh.wrapfunction(url, 'opener')
435 > def urlopener(orig, *args, **kwargs):
436 > def urlopener(orig, *args, **kwargs):
436 > opener = orig(*args, **kwargs)
437 > opener = orig(*args, **kwargs)
437 > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest'))
438 > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest'))
438 > return opener
439 > return opener
439 > EOF
440 > EOF
440
441
441 Test that Digest Auth fails gracefully before testing the successful Basic Auth
442 Test that Digest Auth fails gracefully before testing the successful Basic Auth
442
443
443 $ hg -R auth_clone push --config extensions.x=use_digests.py
444 $ hg -R auth_clone push --config extensions.x=use_digests.py
444 pushing to http://localhost:$HGPORT1/
445 pushing to http://localhost:$HGPORT1/
445 searching for changes
446 searching for changes
446 abort: LFS HTTP error: HTTP Error 401: the server must support Basic Authentication!
447 abort: LFS HTTP error: HTTP Error 401: the server must support Basic Authentication!
447 (api=http://localhost:$HGPORT1/.git/info/lfs/objects/batch, action=upload)
448 (api=http://localhost:$HGPORT1/.git/info/lfs/objects/batch, action=upload)
448 [255]
449 [255]
449
450
450 $ hg -R auth_clone --debug push | egrep '^[{}]| '
451 $ hg -R auth_clone --debug push | egrep '^[{}]| '
451 {
452 {
452 "objects": [
453 "objects": [
453 {
454 {
454 "actions": {
455 "actions": {
455 "upload": {
456 "upload": {
456 "expires_at": "$ISO_8601_DATE_TIME$"
457 "expires_at": "$ISO_8601_DATE_TIME$"
457 "header": {
458 "header": {
458 "Accept": "application/vnd.git-lfs"
459 "Accept": "application/vnd.git-lfs"
459 "Authorization": "Basic dXNlcjpwYXNz"
460 "Authorization": "Basic dXNlcjpwYXNz"
460 }
461 }
461 "href": "http://localhost:$HGPORT1/.hg/lfs/objects/df14287d8d75f076a6459e7a3703ca583ca9fb3f4918caed10c77ac8622d49b3"
462 "href": "http://localhost:$HGPORT1/.hg/lfs/objects/df14287d8d75f076a6459e7a3703ca583ca9fb3f4918caed10c77ac8622d49b3"
462 }
463 }
463 }
464 }
464 "oid": "df14287d8d75f076a6459e7a3703ca583ca9fb3f4918caed10c77ac8622d49b3"
465 "oid": "df14287d8d75f076a6459e7a3703ca583ca9fb3f4918caed10c77ac8622d49b3"
465 "size": 13
466 "size": 13
466 }
467 }
467 ]
468 ]
468 "transfer": "basic"
469 "transfer": "basic"
469 }
470 }
470
471
471 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
472 $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
472
473
473 $ cat $TESTTMP/access.log $TESTTMP/errors.log
474 $ cat $TESTTMP/access.log $TESTTMP/errors.log
474 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob)
475 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob)
475 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
476 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
476 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
477 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
477 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
478 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
478 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob)
479 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob)
479 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
480 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
480 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob)
481 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob)
481 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob)
482 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob)
482 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest (glob)
483 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest (glob)
483 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 401 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
484 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 401 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
484 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
485 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
485 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
486 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
486 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
487 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
487 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
488 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
488 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
489 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
489 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
490 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
490 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
491 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
491 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
492 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
492 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
493 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob)
493 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob)
494 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob)
494 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob)
495 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob)
495 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
496 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
496 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
497 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
497 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
498 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
498 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
499 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
499 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
500 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
500 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
501 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
501 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob)
502 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob)
502 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
503 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
503 $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/df14287d8d75f076a6459e7a3703ca583ca9fb3f4918caed10c77ac8622d49b3 HTTP/1.1" 201 - (glob)
504 $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/df14287d8d75f076a6459e7a3703ca583ca9fb3f4918caed10c77ac8622d49b3 HTTP/1.1" 201 - (glob)
504 $LOCALIP - - [$LOGDATE$] "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
505 $LOCALIP - - [$LOGDATE$] "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
505 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
506 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
General Comments 0
You need to be logged in to leave comments. Login now