##// END OF EJS Templates
subrepos: be smarter about what's an absolute path (issue2808)
Matt Mackall -
r14766:4f56b753 stable
parent child Browse files
Show More
@@ -1,1085 +1,1085 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import errno, os, re, xml.dom.minidom, shutil, posixpath
8 import errno, os, re, xml.dom.minidom, shutil, posixpath
9 import stat, subprocess, tarfile
9 import stat, subprocess, tarfile
10 from i18n import _
10 from i18n import _
11 import config, scmutil, util, node, error, cmdutil, bookmarks
11 import config, scmutil, util, node, error, cmdutil, bookmarks
12 hg = None
12 hg = None
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 nullstate = ('', '', 'empty')
15 nullstate = ('', '', 'empty')
16
16
17 def state(ctx, ui):
17 def state(ctx, ui):
18 """return a state dict, mapping subrepo paths configured in .hgsub
18 """return a state dict, mapping subrepo paths configured in .hgsub
19 to tuple: (source from .hgsub, revision from .hgsubstate, kind
19 to tuple: (source from .hgsub, revision from .hgsubstate, kind
20 (key in types dict))
20 (key in types dict))
21 """
21 """
22 p = config.config()
22 p = config.config()
23 def read(f, sections=None, remap=None):
23 def read(f, sections=None, remap=None):
24 if f in ctx:
24 if f in ctx:
25 try:
25 try:
26 data = ctx[f].data()
26 data = ctx[f].data()
27 except IOError, err:
27 except IOError, err:
28 if err.errno != errno.ENOENT:
28 if err.errno != errno.ENOENT:
29 raise
29 raise
30 # handle missing subrepo spec files as removed
30 # handle missing subrepo spec files as removed
31 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
31 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
32 return
32 return
33 p.parse(f, data, sections, remap, read)
33 p.parse(f, data, sections, remap, read)
34 else:
34 else:
35 raise util.Abort(_("subrepo spec file %s not found") % f)
35 raise util.Abort(_("subrepo spec file %s not found") % f)
36
36
37 if '.hgsub' in ctx:
37 if '.hgsub' in ctx:
38 read('.hgsub')
38 read('.hgsub')
39
39
40 for path, src in ui.configitems('subpaths'):
40 for path, src in ui.configitems('subpaths'):
41 p.set('subpaths', path, src, ui.configsource('subpaths', path))
41 p.set('subpaths', path, src, ui.configsource('subpaths', path))
42
42
43 rev = {}
43 rev = {}
44 if '.hgsubstate' in ctx:
44 if '.hgsubstate' in ctx:
45 try:
45 try:
46 for l in ctx['.hgsubstate'].data().splitlines():
46 for l in ctx['.hgsubstate'].data().splitlines():
47 revision, path = l.split(" ", 1)
47 revision, path = l.split(" ", 1)
48 rev[path] = revision
48 rev[path] = revision
49 except IOError, err:
49 except IOError, err:
50 if err.errno != errno.ENOENT:
50 if err.errno != errno.ENOENT:
51 raise
51 raise
52
52
53 state = {}
53 state = {}
54 for path, src in p[''].items():
54 for path, src in p[''].items():
55 kind = 'hg'
55 kind = 'hg'
56 if src.startswith('['):
56 if src.startswith('['):
57 if ']' not in src:
57 if ']' not in src:
58 raise util.Abort(_('missing ] in subrepo source'))
58 raise util.Abort(_('missing ] in subrepo source'))
59 kind, src = src.split(']', 1)
59 kind, src = src.split(']', 1)
60 kind = kind[1:]
60 kind = kind[1:]
61
61
62 for pattern, repl in p.items('subpaths'):
62 for pattern, repl in p.items('subpaths'):
63 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
63 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
64 # does a string decode.
64 # does a string decode.
65 repl = repl.encode('string-escape')
65 repl = repl.encode('string-escape')
66 # However, we still want to allow back references to go
66 # However, we still want to allow back references to go
67 # through unharmed, so we turn r'\\1' into r'\1'. Again,
67 # through unharmed, so we turn r'\\1' into r'\1'. Again,
68 # extra escapes are needed because re.sub string decodes.
68 # extra escapes are needed because re.sub string decodes.
69 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
69 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
70 try:
70 try:
71 src = re.sub(pattern, repl, src, 1)
71 src = re.sub(pattern, repl, src, 1)
72 except re.error, e:
72 except re.error, e:
73 raise util.Abort(_("bad subrepository pattern in %s: %s")
73 raise util.Abort(_("bad subrepository pattern in %s: %s")
74 % (p.source('subpaths', pattern), e))
74 % (p.source('subpaths', pattern), e))
75
75
76 state[path] = (src.strip(), rev.get(path, ''), kind)
76 state[path] = (src.strip(), rev.get(path, ''), kind)
77
77
78 return state
78 return state
79
79
80 def writestate(repo, state):
80 def writestate(repo, state):
81 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
81 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
82 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
82 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
83 repo.wwrite('.hgsubstate', ''.join(lines), '')
83 repo.wwrite('.hgsubstate', ''.join(lines), '')
84
84
85 def submerge(repo, wctx, mctx, actx, overwrite):
85 def submerge(repo, wctx, mctx, actx, overwrite):
86 """delegated from merge.applyupdates: merging of .hgsubstate file
86 """delegated from merge.applyupdates: merging of .hgsubstate file
87 in working context, merging context and ancestor context"""
87 in working context, merging context and ancestor context"""
88 if mctx == actx: # backwards?
88 if mctx == actx: # backwards?
89 actx = wctx.p1()
89 actx = wctx.p1()
90 s1 = wctx.substate
90 s1 = wctx.substate
91 s2 = mctx.substate
91 s2 = mctx.substate
92 sa = actx.substate
92 sa = actx.substate
93 sm = {}
93 sm = {}
94
94
95 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
95 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
96
96
97 def debug(s, msg, r=""):
97 def debug(s, msg, r=""):
98 if r:
98 if r:
99 r = "%s:%s:%s" % r
99 r = "%s:%s:%s" % r
100 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
100 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
101
101
102 for s, l in s1.items():
102 for s, l in s1.items():
103 a = sa.get(s, nullstate)
103 a = sa.get(s, nullstate)
104 ld = l # local state with possible dirty flag for compares
104 ld = l # local state with possible dirty flag for compares
105 if wctx.sub(s).dirty():
105 if wctx.sub(s).dirty():
106 ld = (l[0], l[1] + "+")
106 ld = (l[0], l[1] + "+")
107 if wctx == actx: # overwrite
107 if wctx == actx: # overwrite
108 a = ld
108 a = ld
109
109
110 if s in s2:
110 if s in s2:
111 r = s2[s]
111 r = s2[s]
112 if ld == r or r == a: # no change or local is newer
112 if ld == r or r == a: # no change or local is newer
113 sm[s] = l
113 sm[s] = l
114 continue
114 continue
115 elif ld == a: # other side changed
115 elif ld == a: # other side changed
116 debug(s, "other changed, get", r)
116 debug(s, "other changed, get", r)
117 wctx.sub(s).get(r, overwrite)
117 wctx.sub(s).get(r, overwrite)
118 sm[s] = r
118 sm[s] = r
119 elif ld[0] != r[0]: # sources differ
119 elif ld[0] != r[0]: # sources differ
120 if repo.ui.promptchoice(
120 if repo.ui.promptchoice(
121 _(' subrepository sources for %s differ\n'
121 _(' subrepository sources for %s differ\n'
122 'use (l)ocal source (%s) or (r)emote source (%s)?')
122 'use (l)ocal source (%s) or (r)emote source (%s)?')
123 % (s, l[0], r[0]),
123 % (s, l[0], r[0]),
124 (_('&Local'), _('&Remote')), 0):
124 (_('&Local'), _('&Remote')), 0):
125 debug(s, "prompt changed, get", r)
125 debug(s, "prompt changed, get", r)
126 wctx.sub(s).get(r, overwrite)
126 wctx.sub(s).get(r, overwrite)
127 sm[s] = r
127 sm[s] = r
128 elif ld[1] == a[1]: # local side is unchanged
128 elif ld[1] == a[1]: # local side is unchanged
129 debug(s, "other side changed, get", r)
129 debug(s, "other side changed, get", r)
130 wctx.sub(s).get(r, overwrite)
130 wctx.sub(s).get(r, overwrite)
131 sm[s] = r
131 sm[s] = r
132 else:
132 else:
133 debug(s, "both sides changed, merge with", r)
133 debug(s, "both sides changed, merge with", r)
134 wctx.sub(s).merge(r)
134 wctx.sub(s).merge(r)
135 sm[s] = l
135 sm[s] = l
136 elif ld == a: # remote removed, local unchanged
136 elif ld == a: # remote removed, local unchanged
137 debug(s, "remote removed, remove")
137 debug(s, "remote removed, remove")
138 wctx.sub(s).remove()
138 wctx.sub(s).remove()
139 elif a == nullstate: # not present in remote or ancestor
139 elif a == nullstate: # not present in remote or ancestor
140 debug(s, "local added, keep")
140 debug(s, "local added, keep")
141 sm[s] = l
141 sm[s] = l
142 continue
142 continue
143 else:
143 else:
144 if repo.ui.promptchoice(
144 if repo.ui.promptchoice(
145 _(' local changed subrepository %s which remote removed\n'
145 _(' local changed subrepository %s which remote removed\n'
146 'use (c)hanged version or (d)elete?') % s,
146 'use (c)hanged version or (d)elete?') % s,
147 (_('&Changed'), _('&Delete')), 0):
147 (_('&Changed'), _('&Delete')), 0):
148 debug(s, "prompt remove")
148 debug(s, "prompt remove")
149 wctx.sub(s).remove()
149 wctx.sub(s).remove()
150
150
151 for s, r in sorted(s2.items()):
151 for s, r in sorted(s2.items()):
152 if s in s1:
152 if s in s1:
153 continue
153 continue
154 elif s not in sa:
154 elif s not in sa:
155 debug(s, "remote added, get", r)
155 debug(s, "remote added, get", r)
156 mctx.sub(s).get(r)
156 mctx.sub(s).get(r)
157 sm[s] = r
157 sm[s] = r
158 elif r != sa[s]:
158 elif r != sa[s]:
159 if repo.ui.promptchoice(
159 if repo.ui.promptchoice(
160 _(' remote changed subrepository %s which local removed\n'
160 _(' remote changed subrepository %s which local removed\n'
161 'use (c)hanged version or (d)elete?') % s,
161 'use (c)hanged version or (d)elete?') % s,
162 (_('&Changed'), _('&Delete')), 0) == 0:
162 (_('&Changed'), _('&Delete')), 0) == 0:
163 debug(s, "prompt recreate", r)
163 debug(s, "prompt recreate", r)
164 wctx.sub(s).get(r)
164 wctx.sub(s).get(r)
165 sm[s] = r
165 sm[s] = r
166
166
167 # record merged .hgsubstate
167 # record merged .hgsubstate
168 writestate(repo, sm)
168 writestate(repo, sm)
169
169
170 def _updateprompt(ui, sub, dirty, local, remote):
170 def _updateprompt(ui, sub, dirty, local, remote):
171 if dirty:
171 if dirty:
172 msg = (_(' subrepository sources for %s differ\n'
172 msg = (_(' subrepository sources for %s differ\n'
173 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
173 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
174 % (subrelpath(sub), local, remote))
174 % (subrelpath(sub), local, remote))
175 else:
175 else:
176 msg = (_(' subrepository sources for %s differ (in checked out version)\n'
176 msg = (_(' subrepository sources for %s differ (in checked out version)\n'
177 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
177 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
178 % (subrelpath(sub), local, remote))
178 % (subrelpath(sub), local, remote))
179 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
179 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
180
180
181 def reporelpath(repo):
181 def reporelpath(repo):
182 """return path to this (sub)repo as seen from outermost repo"""
182 """return path to this (sub)repo as seen from outermost repo"""
183 parent = repo
183 parent = repo
184 while hasattr(parent, '_subparent'):
184 while hasattr(parent, '_subparent'):
185 parent = parent._subparent
185 parent = parent._subparent
186 return repo.root[len(parent.root)+1:]
186 return repo.root[len(parent.root)+1:]
187
187
188 def subrelpath(sub):
188 def subrelpath(sub):
189 """return path to this subrepo as seen from outermost repo"""
189 """return path to this subrepo as seen from outermost repo"""
190 if hasattr(sub, '_relpath'):
190 if hasattr(sub, '_relpath'):
191 return sub._relpath
191 return sub._relpath
192 if not hasattr(sub, '_repo'):
192 if not hasattr(sub, '_repo'):
193 return sub._path
193 return sub._path
194 return reporelpath(sub._repo)
194 return reporelpath(sub._repo)
195
195
196 def _abssource(repo, push=False, abort=True):
196 def _abssource(repo, push=False, abort=True):
197 """return pull/push path of repo - either based on parent repo .hgsub info
197 """return pull/push path of repo - either based on parent repo .hgsub info
198 or on the top repo config. Abort or return None if no source found."""
198 or on the top repo config. Abort or return None if no source found."""
199 if hasattr(repo, '_subparent'):
199 if hasattr(repo, '_subparent'):
200 source = util.url(repo._subsource)
200 source = util.url(repo._subsource)
201 if source.isabs():
202 return str(source)
201 source.path = posixpath.normpath(source.path)
203 source.path = posixpath.normpath(source.path)
202 if posixpath.isabs(source.path) or source.scheme:
203 return str(source)
204 parent = _abssource(repo._subparent, push, abort=False)
204 parent = _abssource(repo._subparent, push, abort=False)
205 if parent:
205 if parent:
206 parent = util.url(parent)
206 parent = util.url(parent)
207 parent.path = posixpath.join(parent.path, source.path)
207 parent.path = posixpath.join(parent.path, source.path)
208 parent.path = posixpath.normpath(parent.path)
208 parent.path = posixpath.normpath(parent.path)
209 return str(parent)
209 return str(parent)
210 else: # recursion reached top repo
210 else: # recursion reached top repo
211 if hasattr(repo, '_subtoppath'):
211 if hasattr(repo, '_subtoppath'):
212 return repo._subtoppath
212 return repo._subtoppath
213 if push and repo.ui.config('paths', 'default-push'):
213 if push and repo.ui.config('paths', 'default-push'):
214 return repo.ui.config('paths', 'default-push')
214 return repo.ui.config('paths', 'default-push')
215 if repo.ui.config('paths', 'default'):
215 if repo.ui.config('paths', 'default'):
216 return repo.ui.config('paths', 'default')
216 return repo.ui.config('paths', 'default')
217 if abort:
217 if abort:
218 raise util.Abort(_("default path for subrepository %s not found") %
218 raise util.Abort(_("default path for subrepository %s not found") %
219 reporelpath(repo))
219 reporelpath(repo))
220
220
221 def itersubrepos(ctx1, ctx2):
221 def itersubrepos(ctx1, ctx2):
222 """find subrepos in ctx1 or ctx2"""
222 """find subrepos in ctx1 or ctx2"""
223 # Create a (subpath, ctx) mapping where we prefer subpaths from
223 # Create a (subpath, ctx) mapping where we prefer subpaths from
224 # ctx1. The subpaths from ctx2 are important when the .hgsub file
224 # ctx1. The subpaths from ctx2 are important when the .hgsub file
225 # has been modified (in ctx2) but not yet committed (in ctx1).
225 # has been modified (in ctx2) but not yet committed (in ctx1).
226 subpaths = dict.fromkeys(ctx2.substate, ctx2)
226 subpaths = dict.fromkeys(ctx2.substate, ctx2)
227 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
227 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
228 for subpath, ctx in sorted(subpaths.iteritems()):
228 for subpath, ctx in sorted(subpaths.iteritems()):
229 yield subpath, ctx.sub(subpath)
229 yield subpath, ctx.sub(subpath)
230
230
231 def subrepo(ctx, path):
231 def subrepo(ctx, path):
232 """return instance of the right subrepo class for subrepo in path"""
232 """return instance of the right subrepo class for subrepo in path"""
233 # subrepo inherently violates our import layering rules
233 # subrepo inherently violates our import layering rules
234 # because it wants to make repo objects from deep inside the stack
234 # because it wants to make repo objects from deep inside the stack
235 # so we manually delay the circular imports to not break
235 # so we manually delay the circular imports to not break
236 # scripts that don't use our demand-loading
236 # scripts that don't use our demand-loading
237 global hg
237 global hg
238 import hg as h
238 import hg as h
239 hg = h
239 hg = h
240
240
241 scmutil.pathauditor(ctx._repo.root)(path)
241 scmutil.pathauditor(ctx._repo.root)(path)
242 state = ctx.substate.get(path, nullstate)
242 state = ctx.substate.get(path, nullstate)
243 if state[2] not in types:
243 if state[2] not in types:
244 raise util.Abort(_('unknown subrepo type %s') % state[2])
244 raise util.Abort(_('unknown subrepo type %s') % state[2])
245 return types[state[2]](ctx, path, state[:2])
245 return types[state[2]](ctx, path, state[:2])
246
246
247 # subrepo classes need to implement the following abstract class:
247 # subrepo classes need to implement the following abstract class:
248
248
249 class abstractsubrepo(object):
249 class abstractsubrepo(object):
250
250
251 def dirty(self, ignoreupdate=False):
251 def dirty(self, ignoreupdate=False):
252 """returns true if the dirstate of the subrepo is dirty or does not
252 """returns true if the dirstate of the subrepo is dirty or does not
253 match current stored state. If ignoreupdate is true, only check
253 match current stored state. If ignoreupdate is true, only check
254 whether the subrepo has uncommitted changes in its dirstate.
254 whether the subrepo has uncommitted changes in its dirstate.
255 """
255 """
256 raise NotImplementedError
256 raise NotImplementedError
257
257
258 def checknested(self, path):
258 def checknested(self, path):
259 """check if path is a subrepository within this repository"""
259 """check if path is a subrepository within this repository"""
260 return False
260 return False
261
261
262 def commit(self, text, user, date):
262 def commit(self, text, user, date):
263 """commit the current changes to the subrepo with the given
263 """commit the current changes to the subrepo with the given
264 log message. Use given user and date if possible. Return the
264 log message. Use given user and date if possible. Return the
265 new state of the subrepo.
265 new state of the subrepo.
266 """
266 """
267 raise NotImplementedError
267 raise NotImplementedError
268
268
269 def remove(self):
269 def remove(self):
270 """remove the subrepo
270 """remove the subrepo
271
271
272 (should verify the dirstate is not dirty first)
272 (should verify the dirstate is not dirty first)
273 """
273 """
274 raise NotImplementedError
274 raise NotImplementedError
275
275
276 def get(self, state, overwrite=False):
276 def get(self, state, overwrite=False):
277 """run whatever commands are needed to put the subrepo into
277 """run whatever commands are needed to put the subrepo into
278 this state
278 this state
279 """
279 """
280 raise NotImplementedError
280 raise NotImplementedError
281
281
282 def merge(self, state):
282 def merge(self, state):
283 """merge currently-saved state with the new state."""
283 """merge currently-saved state with the new state."""
284 raise NotImplementedError
284 raise NotImplementedError
285
285
286 def push(self, force):
286 def push(self, force):
287 """perform whatever action is analogous to 'hg push'
287 """perform whatever action is analogous to 'hg push'
288
288
289 This may be a no-op on some systems.
289 This may be a no-op on some systems.
290 """
290 """
291 raise NotImplementedError
291 raise NotImplementedError
292
292
293 def add(self, ui, match, dryrun, prefix):
293 def add(self, ui, match, dryrun, prefix):
294 return []
294 return []
295
295
296 def status(self, rev2, **opts):
296 def status(self, rev2, **opts):
297 return [], [], [], [], [], [], []
297 return [], [], [], [], [], [], []
298
298
299 def diff(self, diffopts, node2, match, prefix, **opts):
299 def diff(self, diffopts, node2, match, prefix, **opts):
300 pass
300 pass
301
301
302 def outgoing(self, ui, dest, opts):
302 def outgoing(self, ui, dest, opts):
303 return 1
303 return 1
304
304
305 def incoming(self, ui, source, opts):
305 def incoming(self, ui, source, opts):
306 return 1
306 return 1
307
307
308 def files(self):
308 def files(self):
309 """return filename iterator"""
309 """return filename iterator"""
310 raise NotImplementedError
310 raise NotImplementedError
311
311
312 def filedata(self, name):
312 def filedata(self, name):
313 """return file data"""
313 """return file data"""
314 raise NotImplementedError
314 raise NotImplementedError
315
315
316 def fileflags(self, name):
316 def fileflags(self, name):
317 """return file flags"""
317 """return file flags"""
318 return ''
318 return ''
319
319
320 def archive(self, ui, archiver, prefix):
320 def archive(self, ui, archiver, prefix):
321 files = self.files()
321 files = self.files()
322 total = len(files)
322 total = len(files)
323 relpath = subrelpath(self)
323 relpath = subrelpath(self)
324 ui.progress(_('archiving (%s)') % relpath, 0,
324 ui.progress(_('archiving (%s)') % relpath, 0,
325 unit=_('files'), total=total)
325 unit=_('files'), total=total)
326 for i, name in enumerate(files):
326 for i, name in enumerate(files):
327 flags = self.fileflags(name)
327 flags = self.fileflags(name)
328 mode = 'x' in flags and 0755 or 0644
328 mode = 'x' in flags and 0755 or 0644
329 symlink = 'l' in flags
329 symlink = 'l' in flags
330 archiver.addfile(os.path.join(prefix, self._path, name),
330 archiver.addfile(os.path.join(prefix, self._path, name),
331 mode, symlink, self.filedata(name))
331 mode, symlink, self.filedata(name))
332 ui.progress(_('archiving (%s)') % relpath, i + 1,
332 ui.progress(_('archiving (%s)') % relpath, i + 1,
333 unit=_('files'), total=total)
333 unit=_('files'), total=total)
334 ui.progress(_('archiving (%s)') % relpath, None)
334 ui.progress(_('archiving (%s)') % relpath, None)
335
335
336
336
337 class hgsubrepo(abstractsubrepo):
337 class hgsubrepo(abstractsubrepo):
338 def __init__(self, ctx, path, state):
338 def __init__(self, ctx, path, state):
339 self._path = path
339 self._path = path
340 self._state = state
340 self._state = state
341 r = ctx._repo
341 r = ctx._repo
342 root = r.wjoin(path)
342 root = r.wjoin(path)
343 create = False
343 create = False
344 if not os.path.exists(os.path.join(root, '.hg')):
344 if not os.path.exists(os.path.join(root, '.hg')):
345 create = True
345 create = True
346 util.makedirs(root)
346 util.makedirs(root)
347 self._repo = hg.repository(r.ui, root, create=create)
347 self._repo = hg.repository(r.ui, root, create=create)
348 self._initrepo(r, state[0], create)
348 self._initrepo(r, state[0], create)
349
349
350 def _initrepo(self, parentrepo, source, create):
350 def _initrepo(self, parentrepo, source, create):
351 self._repo._subparent = parentrepo
351 self._repo._subparent = parentrepo
352 self._repo._subsource = source
352 self._repo._subsource = source
353
353
354 if create:
354 if create:
355 fp = self._repo.opener("hgrc", "w", text=True)
355 fp = self._repo.opener("hgrc", "w", text=True)
356 fp.write('[paths]\n')
356 fp.write('[paths]\n')
357
357
358 def addpathconfig(key, value):
358 def addpathconfig(key, value):
359 if value:
359 if value:
360 fp.write('%s = %s\n' % (key, value))
360 fp.write('%s = %s\n' % (key, value))
361 self._repo.ui.setconfig('paths', key, value)
361 self._repo.ui.setconfig('paths', key, value)
362
362
363 defpath = _abssource(self._repo, abort=False)
363 defpath = _abssource(self._repo, abort=False)
364 defpushpath = _abssource(self._repo, True, abort=False)
364 defpushpath = _abssource(self._repo, True, abort=False)
365 addpathconfig('default', defpath)
365 addpathconfig('default', defpath)
366 if defpath != defpushpath:
366 if defpath != defpushpath:
367 addpathconfig('default-push', defpushpath)
367 addpathconfig('default-push', defpushpath)
368 fp.close()
368 fp.close()
369
369
370 def add(self, ui, match, dryrun, prefix):
370 def add(self, ui, match, dryrun, prefix):
371 return cmdutil.add(ui, self._repo, match, dryrun, True,
371 return cmdutil.add(ui, self._repo, match, dryrun, True,
372 os.path.join(prefix, self._path))
372 os.path.join(prefix, self._path))
373
373
374 def status(self, rev2, **opts):
374 def status(self, rev2, **opts):
375 try:
375 try:
376 rev1 = self._state[1]
376 rev1 = self._state[1]
377 ctx1 = self._repo[rev1]
377 ctx1 = self._repo[rev1]
378 ctx2 = self._repo[rev2]
378 ctx2 = self._repo[rev2]
379 return self._repo.status(ctx1, ctx2, **opts)
379 return self._repo.status(ctx1, ctx2, **opts)
380 except error.RepoLookupError, inst:
380 except error.RepoLookupError, inst:
381 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
381 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
382 % (inst, subrelpath(self)))
382 % (inst, subrelpath(self)))
383 return [], [], [], [], [], [], []
383 return [], [], [], [], [], [], []
384
384
385 def diff(self, diffopts, node2, match, prefix, **opts):
385 def diff(self, diffopts, node2, match, prefix, **opts):
386 try:
386 try:
387 node1 = node.bin(self._state[1])
387 node1 = node.bin(self._state[1])
388 # We currently expect node2 to come from substate and be
388 # We currently expect node2 to come from substate and be
389 # in hex format
389 # in hex format
390 if node2 is not None:
390 if node2 is not None:
391 node2 = node.bin(node2)
391 node2 = node.bin(node2)
392 cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts,
392 cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts,
393 node1, node2, match,
393 node1, node2, match,
394 prefix=os.path.join(prefix, self._path),
394 prefix=os.path.join(prefix, self._path),
395 listsubrepos=True, **opts)
395 listsubrepos=True, **opts)
396 except error.RepoLookupError, inst:
396 except error.RepoLookupError, inst:
397 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
397 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
398 % (inst, subrelpath(self)))
398 % (inst, subrelpath(self)))
399
399
400 def archive(self, ui, archiver, prefix):
400 def archive(self, ui, archiver, prefix):
401 abstractsubrepo.archive(self, ui, archiver, prefix)
401 abstractsubrepo.archive(self, ui, archiver, prefix)
402
402
403 rev = self._state[1]
403 rev = self._state[1]
404 ctx = self._repo[rev]
404 ctx = self._repo[rev]
405 for subpath in ctx.substate:
405 for subpath in ctx.substate:
406 s = subrepo(ctx, subpath)
406 s = subrepo(ctx, subpath)
407 s.archive(ui, archiver, os.path.join(prefix, self._path))
407 s.archive(ui, archiver, os.path.join(prefix, self._path))
408
408
409 def dirty(self, ignoreupdate=False):
409 def dirty(self, ignoreupdate=False):
410 r = self._state[1]
410 r = self._state[1]
411 if r == '' and not ignoreupdate: # no state recorded
411 if r == '' and not ignoreupdate: # no state recorded
412 return True
412 return True
413 w = self._repo[None]
413 w = self._repo[None]
414 if r != w.p1().hex() and not ignoreupdate:
414 if r != w.p1().hex() and not ignoreupdate:
415 # different version checked out
415 # different version checked out
416 return True
416 return True
417 return w.dirty() # working directory changed
417 return w.dirty() # working directory changed
418
418
419 def checknested(self, path):
419 def checknested(self, path):
420 return self._repo._checknested(self._repo.wjoin(path))
420 return self._repo._checknested(self._repo.wjoin(path))
421
421
422 def commit(self, text, user, date):
422 def commit(self, text, user, date):
423 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
423 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
424 n = self._repo.commit(text, user, date)
424 n = self._repo.commit(text, user, date)
425 if not n:
425 if not n:
426 return self._repo['.'].hex() # different version checked out
426 return self._repo['.'].hex() # different version checked out
427 return node.hex(n)
427 return node.hex(n)
428
428
429 def remove(self):
429 def remove(self):
430 # we can't fully delete the repository as it may contain
430 # we can't fully delete the repository as it may contain
431 # local-only history
431 # local-only history
432 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
432 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
433 hg.clean(self._repo, node.nullid, False)
433 hg.clean(self._repo, node.nullid, False)
434
434
435 def _get(self, state):
435 def _get(self, state):
436 source, revision, kind = state
436 source, revision, kind = state
437 if revision not in self._repo:
437 if revision not in self._repo:
438 self._repo._subsource = source
438 self._repo._subsource = source
439 srcurl = _abssource(self._repo)
439 srcurl = _abssource(self._repo)
440 other = hg.peer(self._repo.ui, {}, srcurl)
440 other = hg.peer(self._repo.ui, {}, srcurl)
441 if len(self._repo) == 0:
441 if len(self._repo) == 0:
442 self._repo.ui.status(_('cloning subrepo %s from %s\n')
442 self._repo.ui.status(_('cloning subrepo %s from %s\n')
443 % (subrelpath(self), srcurl))
443 % (subrelpath(self), srcurl))
444 parentrepo = self._repo._subparent
444 parentrepo = self._repo._subparent
445 shutil.rmtree(self._repo.root)
445 shutil.rmtree(self._repo.root)
446 other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
446 other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
447 self._repo.root, update=False)
447 self._repo.root, update=False)
448 self._initrepo(parentrepo, source, create=True)
448 self._initrepo(parentrepo, source, create=True)
449 else:
449 else:
450 self._repo.ui.status(_('pulling subrepo %s from %s\n')
450 self._repo.ui.status(_('pulling subrepo %s from %s\n')
451 % (subrelpath(self), srcurl))
451 % (subrelpath(self), srcurl))
452 self._repo.pull(other)
452 self._repo.pull(other)
453 bookmarks.updatefromremote(self._repo.ui, self._repo, other)
453 bookmarks.updatefromremote(self._repo.ui, self._repo, other)
454
454
455 def get(self, state, overwrite=False):
455 def get(self, state, overwrite=False):
456 self._get(state)
456 self._get(state)
457 source, revision, kind = state
457 source, revision, kind = state
458 self._repo.ui.debug("getting subrepo %s\n" % self._path)
458 self._repo.ui.debug("getting subrepo %s\n" % self._path)
459 hg.clean(self._repo, revision, False)
459 hg.clean(self._repo, revision, False)
460
460
461 def merge(self, state):
461 def merge(self, state):
462 self._get(state)
462 self._get(state)
463 cur = self._repo['.']
463 cur = self._repo['.']
464 dst = self._repo[state[1]]
464 dst = self._repo[state[1]]
465 anc = dst.ancestor(cur)
465 anc = dst.ancestor(cur)
466
466
467 def mergefunc():
467 def mergefunc():
468 if anc == cur:
468 if anc == cur:
469 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
469 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
470 hg.update(self._repo, state[1])
470 hg.update(self._repo, state[1])
471 elif anc == dst:
471 elif anc == dst:
472 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
472 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
473 else:
473 else:
474 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
474 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
475 hg.merge(self._repo, state[1], remind=False)
475 hg.merge(self._repo, state[1], remind=False)
476
476
477 wctx = self._repo[None]
477 wctx = self._repo[None]
478 if self.dirty():
478 if self.dirty():
479 if anc != dst:
479 if anc != dst:
480 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
480 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
481 mergefunc()
481 mergefunc()
482 else:
482 else:
483 mergefunc()
483 mergefunc()
484 else:
484 else:
485 mergefunc()
485 mergefunc()
486
486
487 def push(self, force):
487 def push(self, force):
488 # push subrepos depth-first for coherent ordering
488 # push subrepos depth-first for coherent ordering
489 c = self._repo['']
489 c = self._repo['']
490 subs = c.substate # only repos that are committed
490 subs = c.substate # only repos that are committed
491 for s in sorted(subs):
491 for s in sorted(subs):
492 if not c.sub(s).push(force):
492 if not c.sub(s).push(force):
493 return False
493 return False
494
494
495 dsturl = _abssource(self._repo, True)
495 dsturl = _abssource(self._repo, True)
496 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
496 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
497 (subrelpath(self), dsturl))
497 (subrelpath(self), dsturl))
498 other = hg.peer(self._repo.ui, {}, dsturl)
498 other = hg.peer(self._repo.ui, {}, dsturl)
499 return self._repo.push(other, force)
499 return self._repo.push(other, force)
500
500
501 def outgoing(self, ui, dest, opts):
501 def outgoing(self, ui, dest, opts):
502 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
502 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
503
503
504 def incoming(self, ui, source, opts):
504 def incoming(self, ui, source, opts):
505 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
505 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
506
506
507 def files(self):
507 def files(self):
508 rev = self._state[1]
508 rev = self._state[1]
509 ctx = self._repo[rev]
509 ctx = self._repo[rev]
510 return ctx.manifest()
510 return ctx.manifest()
511
511
512 def filedata(self, name):
512 def filedata(self, name):
513 rev = self._state[1]
513 rev = self._state[1]
514 return self._repo[rev][name].data()
514 return self._repo[rev][name].data()
515
515
516 def fileflags(self, name):
516 def fileflags(self, name):
517 rev = self._state[1]
517 rev = self._state[1]
518 ctx = self._repo[rev]
518 ctx = self._repo[rev]
519 return ctx.flags(name)
519 return ctx.flags(name)
520
520
521
521
522 class svnsubrepo(abstractsubrepo):
522 class svnsubrepo(abstractsubrepo):
523 def __init__(self, ctx, path, state):
523 def __init__(self, ctx, path, state):
524 self._path = path
524 self._path = path
525 self._state = state
525 self._state = state
526 self._ctx = ctx
526 self._ctx = ctx
527 self._ui = ctx._repo.ui
527 self._ui = ctx._repo.ui
528
528
529 def _svncommand(self, commands, filename='', failok=False):
529 def _svncommand(self, commands, filename='', failok=False):
530 cmd = ['svn']
530 cmd = ['svn']
531 extrakw = {}
531 extrakw = {}
532 if not self._ui.interactive():
532 if not self._ui.interactive():
533 # Making stdin be a pipe should prevent svn from behaving
533 # Making stdin be a pipe should prevent svn from behaving
534 # interactively even if we can't pass --non-interactive.
534 # interactively even if we can't pass --non-interactive.
535 extrakw['stdin'] = subprocess.PIPE
535 extrakw['stdin'] = subprocess.PIPE
536 # Starting in svn 1.5 --non-interactive is a global flag
536 # Starting in svn 1.5 --non-interactive is a global flag
537 # instead of being per-command, but we need to support 1.4 so
537 # instead of being per-command, but we need to support 1.4 so
538 # we have to be intelligent about what commands take
538 # we have to be intelligent about what commands take
539 # --non-interactive.
539 # --non-interactive.
540 if commands[0] in ('update', 'checkout', 'commit'):
540 if commands[0] in ('update', 'checkout', 'commit'):
541 cmd.append('--non-interactive')
541 cmd.append('--non-interactive')
542 cmd.extend(commands)
542 cmd.extend(commands)
543 if filename is not None:
543 if filename is not None:
544 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
544 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
545 cmd.append(path)
545 cmd.append(path)
546 env = dict(os.environ)
546 env = dict(os.environ)
547 # Avoid localized output, preserve current locale for everything else.
547 # Avoid localized output, preserve current locale for everything else.
548 env['LC_MESSAGES'] = 'C'
548 env['LC_MESSAGES'] = 'C'
549 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
549 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
550 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
550 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
551 universal_newlines=True, env=env, **extrakw)
551 universal_newlines=True, env=env, **extrakw)
552 stdout, stderr = p.communicate()
552 stdout, stderr = p.communicate()
553 stderr = stderr.strip()
553 stderr = stderr.strip()
554 if not failok:
554 if not failok:
555 if p.returncode:
555 if p.returncode:
556 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
556 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
557 if stderr:
557 if stderr:
558 self._ui.warn(stderr + '\n')
558 self._ui.warn(stderr + '\n')
559 return stdout, stderr
559 return stdout, stderr
560
560
561 @propertycache
561 @propertycache
562 def _svnversion(self):
562 def _svnversion(self):
563 output, err = self._svncommand(['--version'], filename=None)
563 output, err = self._svncommand(['--version'], filename=None)
564 m = re.search(r'^svn,\s+version\s+(\d+)\.(\d+)', output)
564 m = re.search(r'^svn,\s+version\s+(\d+)\.(\d+)', output)
565 if not m:
565 if not m:
566 raise util.Abort(_('cannot retrieve svn tool version'))
566 raise util.Abort(_('cannot retrieve svn tool version'))
567 return (int(m.group(1)), int(m.group(2)))
567 return (int(m.group(1)), int(m.group(2)))
568
568
569 def _wcrevs(self):
569 def _wcrevs(self):
570 # Get the working directory revision as well as the last
570 # Get the working directory revision as well as the last
571 # commit revision so we can compare the subrepo state with
571 # commit revision so we can compare the subrepo state with
572 # both. We used to store the working directory one.
572 # both. We used to store the working directory one.
573 output, err = self._svncommand(['info', '--xml'])
573 output, err = self._svncommand(['info', '--xml'])
574 doc = xml.dom.minidom.parseString(output)
574 doc = xml.dom.minidom.parseString(output)
575 entries = doc.getElementsByTagName('entry')
575 entries = doc.getElementsByTagName('entry')
576 lastrev, rev = '0', '0'
576 lastrev, rev = '0', '0'
577 if entries:
577 if entries:
578 rev = str(entries[0].getAttribute('revision')) or '0'
578 rev = str(entries[0].getAttribute('revision')) or '0'
579 commits = entries[0].getElementsByTagName('commit')
579 commits = entries[0].getElementsByTagName('commit')
580 if commits:
580 if commits:
581 lastrev = str(commits[0].getAttribute('revision')) or '0'
581 lastrev = str(commits[0].getAttribute('revision')) or '0'
582 return (lastrev, rev)
582 return (lastrev, rev)
583
583
584 def _wcrev(self):
584 def _wcrev(self):
585 return self._wcrevs()[0]
585 return self._wcrevs()[0]
586
586
587 def _wcchanged(self):
587 def _wcchanged(self):
588 """Return (changes, extchanges) where changes is True
588 """Return (changes, extchanges) where changes is True
589 if the working directory was changed, and extchanges is
589 if the working directory was changed, and extchanges is
590 True if any of these changes concern an external entry.
590 True if any of these changes concern an external entry.
591 """
591 """
592 output, err = self._svncommand(['status', '--xml'])
592 output, err = self._svncommand(['status', '--xml'])
593 externals, changes = [], []
593 externals, changes = [], []
594 doc = xml.dom.minidom.parseString(output)
594 doc = xml.dom.minidom.parseString(output)
595 for e in doc.getElementsByTagName('entry'):
595 for e in doc.getElementsByTagName('entry'):
596 s = e.getElementsByTagName('wc-status')
596 s = e.getElementsByTagName('wc-status')
597 if not s:
597 if not s:
598 continue
598 continue
599 item = s[0].getAttribute('item')
599 item = s[0].getAttribute('item')
600 props = s[0].getAttribute('props')
600 props = s[0].getAttribute('props')
601 path = e.getAttribute('path')
601 path = e.getAttribute('path')
602 if item == 'external':
602 if item == 'external':
603 externals.append(path)
603 externals.append(path)
604 if (item not in ('', 'normal', 'unversioned', 'external')
604 if (item not in ('', 'normal', 'unversioned', 'external')
605 or props not in ('', 'none')):
605 or props not in ('', 'none')):
606 changes.append(path)
606 changes.append(path)
607 for path in changes:
607 for path in changes:
608 for ext in externals:
608 for ext in externals:
609 if path == ext or path.startswith(ext + os.sep):
609 if path == ext or path.startswith(ext + os.sep):
610 return True, True
610 return True, True
611 return bool(changes), False
611 return bool(changes), False
612
612
613 def dirty(self, ignoreupdate=False):
613 def dirty(self, ignoreupdate=False):
614 if not self._wcchanged()[0]:
614 if not self._wcchanged()[0]:
615 if self._state[1] in self._wcrevs() or ignoreupdate:
615 if self._state[1] in self._wcrevs() or ignoreupdate:
616 return False
616 return False
617 return True
617 return True
618
618
619 def commit(self, text, user, date):
619 def commit(self, text, user, date):
620 # user and date are out of our hands since svn is centralized
620 # user and date are out of our hands since svn is centralized
621 changed, extchanged = self._wcchanged()
621 changed, extchanged = self._wcchanged()
622 if not changed:
622 if not changed:
623 return self._wcrev()
623 return self._wcrev()
624 if extchanged:
624 if extchanged:
625 # Do not try to commit externals
625 # Do not try to commit externals
626 raise util.Abort(_('cannot commit svn externals'))
626 raise util.Abort(_('cannot commit svn externals'))
627 commitinfo, err = self._svncommand(['commit', '-m', text])
627 commitinfo, err = self._svncommand(['commit', '-m', text])
628 self._ui.status(commitinfo)
628 self._ui.status(commitinfo)
629 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
629 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
630 if not newrev:
630 if not newrev:
631 raise util.Abort(commitinfo.splitlines()[-1])
631 raise util.Abort(commitinfo.splitlines()[-1])
632 newrev = newrev.groups()[0]
632 newrev = newrev.groups()[0]
633 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
633 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
634 return newrev
634 return newrev
635
635
636 def remove(self):
636 def remove(self):
637 if self.dirty():
637 if self.dirty():
638 self._ui.warn(_('not removing repo %s because '
638 self._ui.warn(_('not removing repo %s because '
639 'it has changes.\n' % self._path))
639 'it has changes.\n' % self._path))
640 return
640 return
641 self._ui.note(_('removing subrepo %s\n') % self._path)
641 self._ui.note(_('removing subrepo %s\n') % self._path)
642
642
643 def onerror(function, path, excinfo):
643 def onerror(function, path, excinfo):
644 if function is not os.remove:
644 if function is not os.remove:
645 raise
645 raise
646 # read-only files cannot be unlinked under Windows
646 # read-only files cannot be unlinked under Windows
647 s = os.stat(path)
647 s = os.stat(path)
648 if (s.st_mode & stat.S_IWRITE) != 0:
648 if (s.st_mode & stat.S_IWRITE) != 0:
649 raise
649 raise
650 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
650 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
651 os.remove(path)
651 os.remove(path)
652
652
653 path = self._ctx._repo.wjoin(self._path)
653 path = self._ctx._repo.wjoin(self._path)
654 shutil.rmtree(path, onerror=onerror)
654 shutil.rmtree(path, onerror=onerror)
655 try:
655 try:
656 os.removedirs(os.path.dirname(path))
656 os.removedirs(os.path.dirname(path))
657 except OSError:
657 except OSError:
658 pass
658 pass
659
659
660 def get(self, state, overwrite=False):
660 def get(self, state, overwrite=False):
661 if overwrite:
661 if overwrite:
662 self._svncommand(['revert', '--recursive'])
662 self._svncommand(['revert', '--recursive'])
663 args = ['checkout']
663 args = ['checkout']
664 if self._svnversion >= (1, 5):
664 if self._svnversion >= (1, 5):
665 args.append('--force')
665 args.append('--force')
666 args.extend([state[0], '--revision', state[1]])
666 args.extend([state[0], '--revision', state[1]])
667 status, err = self._svncommand(args, failok=True)
667 status, err = self._svncommand(args, failok=True)
668 if not re.search('Checked out revision [0-9]+.', status):
668 if not re.search('Checked out revision [0-9]+.', status):
669 if ('is already a working copy for a different URL' in err
669 if ('is already a working copy for a different URL' in err
670 and (self._wcchanged() == (False, False))):
670 and (self._wcchanged() == (False, False))):
671 # obstructed but clean working copy, so just blow it away.
671 # obstructed but clean working copy, so just blow it away.
672 self.remove()
672 self.remove()
673 self.get(state, overwrite=False)
673 self.get(state, overwrite=False)
674 return
674 return
675 raise util.Abort((status or err).splitlines()[-1])
675 raise util.Abort((status or err).splitlines()[-1])
676 self._ui.status(status)
676 self._ui.status(status)
677
677
678 def merge(self, state):
678 def merge(self, state):
679 old = self._state[1]
679 old = self._state[1]
680 new = state[1]
680 new = state[1]
681 if new != self._wcrev():
681 if new != self._wcrev():
682 dirty = old == self._wcrev() or self._wcchanged()[0]
682 dirty = old == self._wcrev() or self._wcchanged()[0]
683 if _updateprompt(self._ui, self, dirty, self._wcrev(), new):
683 if _updateprompt(self._ui, self, dirty, self._wcrev(), new):
684 self.get(state, False)
684 self.get(state, False)
685
685
686 def push(self, force):
686 def push(self, force):
687 # push is a no-op for SVN
687 # push is a no-op for SVN
688 return True
688 return True
689
689
690 def files(self):
690 def files(self):
691 output = self._svncommand(['list'])
691 output = self._svncommand(['list'])
692 # This works because svn forbids \n in filenames.
692 # This works because svn forbids \n in filenames.
693 return output.splitlines()
693 return output.splitlines()
694
694
695 def filedata(self, name):
695 def filedata(self, name):
696 return self._svncommand(['cat'], name)
696 return self._svncommand(['cat'], name)
697
697
698
698
699 class gitsubrepo(abstractsubrepo):
699 class gitsubrepo(abstractsubrepo):
700 def __init__(self, ctx, path, state):
700 def __init__(self, ctx, path, state):
701 # TODO add git version check.
701 # TODO add git version check.
702 self._state = state
702 self._state = state
703 self._ctx = ctx
703 self._ctx = ctx
704 self._path = path
704 self._path = path
705 self._relpath = os.path.join(reporelpath(ctx._repo), path)
705 self._relpath = os.path.join(reporelpath(ctx._repo), path)
706 self._abspath = ctx._repo.wjoin(path)
706 self._abspath = ctx._repo.wjoin(path)
707 self._subparent = ctx._repo
707 self._subparent = ctx._repo
708 self._ui = ctx._repo.ui
708 self._ui = ctx._repo.ui
709
709
710 def _gitcommand(self, commands, env=None, stream=False):
710 def _gitcommand(self, commands, env=None, stream=False):
711 return self._gitdir(commands, env=env, stream=stream)[0]
711 return self._gitdir(commands, env=env, stream=stream)[0]
712
712
713 def _gitdir(self, commands, env=None, stream=False):
713 def _gitdir(self, commands, env=None, stream=False):
714 return self._gitnodir(commands, env=env, stream=stream,
714 return self._gitnodir(commands, env=env, stream=stream,
715 cwd=self._abspath)
715 cwd=self._abspath)
716
716
717 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
717 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
718 """Calls the git command
718 """Calls the git command
719
719
720 The methods tries to call the git command. versions previor to 1.6.0
720 The methods tries to call the git command. versions previor to 1.6.0
721 are not supported and very probably fail.
721 are not supported and very probably fail.
722 """
722 """
723 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
723 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
724 # unless ui.quiet is set, print git's stderr,
724 # unless ui.quiet is set, print git's stderr,
725 # which is mostly progress and useful info
725 # which is mostly progress and useful info
726 errpipe = None
726 errpipe = None
727 if self._ui.quiet:
727 if self._ui.quiet:
728 errpipe = open(os.devnull, 'w')
728 errpipe = open(os.devnull, 'w')
729 p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env,
729 p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env,
730 close_fds=util.closefds,
730 close_fds=util.closefds,
731 stdout=subprocess.PIPE, stderr=errpipe)
731 stdout=subprocess.PIPE, stderr=errpipe)
732 if stream:
732 if stream:
733 return p.stdout, None
733 return p.stdout, None
734
734
735 retdata = p.stdout.read().strip()
735 retdata = p.stdout.read().strip()
736 # wait for the child to exit to avoid race condition.
736 # wait for the child to exit to avoid race condition.
737 p.wait()
737 p.wait()
738
738
739 if p.returncode != 0 and p.returncode != 1:
739 if p.returncode != 0 and p.returncode != 1:
740 # there are certain error codes that are ok
740 # there are certain error codes that are ok
741 command = commands[0]
741 command = commands[0]
742 if command in ('cat-file', 'symbolic-ref'):
742 if command in ('cat-file', 'symbolic-ref'):
743 return retdata, p.returncode
743 return retdata, p.returncode
744 # for all others, abort
744 # for all others, abort
745 raise util.Abort('git %s error %d in %s' %
745 raise util.Abort('git %s error %d in %s' %
746 (command, p.returncode, self._relpath))
746 (command, p.returncode, self._relpath))
747
747
748 return retdata, p.returncode
748 return retdata, p.returncode
749
749
750 def _gitmissing(self):
750 def _gitmissing(self):
751 return not os.path.exists(os.path.join(self._abspath, '.git'))
751 return not os.path.exists(os.path.join(self._abspath, '.git'))
752
752
753 def _gitstate(self):
753 def _gitstate(self):
754 return self._gitcommand(['rev-parse', 'HEAD'])
754 return self._gitcommand(['rev-parse', 'HEAD'])
755
755
756 def _gitcurrentbranch(self):
756 def _gitcurrentbranch(self):
757 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
757 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
758 if err:
758 if err:
759 current = None
759 current = None
760 return current
760 return current
761
761
762 def _gitremote(self, remote):
762 def _gitremote(self, remote):
763 out = self._gitcommand(['remote', 'show', '-n', remote])
763 out = self._gitcommand(['remote', 'show', '-n', remote])
764 line = out.split('\n')[1]
764 line = out.split('\n')[1]
765 i = line.index('URL: ') + len('URL: ')
765 i = line.index('URL: ') + len('URL: ')
766 return line[i:]
766 return line[i:]
767
767
768 def _githavelocally(self, revision):
768 def _githavelocally(self, revision):
769 out, code = self._gitdir(['cat-file', '-e', revision])
769 out, code = self._gitdir(['cat-file', '-e', revision])
770 return code == 0
770 return code == 0
771
771
772 def _gitisancestor(self, r1, r2):
772 def _gitisancestor(self, r1, r2):
773 base = self._gitcommand(['merge-base', r1, r2])
773 base = self._gitcommand(['merge-base', r1, r2])
774 return base == r1
774 return base == r1
775
775
776 def _gitisbare(self):
776 def _gitisbare(self):
777 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
777 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
778
778
779 def _gitbranchmap(self):
779 def _gitbranchmap(self):
780 '''returns 2 things:
780 '''returns 2 things:
781 a map from git branch to revision
781 a map from git branch to revision
782 a map from revision to branches'''
782 a map from revision to branches'''
783 branch2rev = {}
783 branch2rev = {}
784 rev2branch = {}
784 rev2branch = {}
785
785
786 out = self._gitcommand(['for-each-ref', '--format',
786 out = self._gitcommand(['for-each-ref', '--format',
787 '%(objectname) %(refname)'])
787 '%(objectname) %(refname)'])
788 for line in out.split('\n'):
788 for line in out.split('\n'):
789 revision, ref = line.split(' ')
789 revision, ref = line.split(' ')
790 if (not ref.startswith('refs/heads/') and
790 if (not ref.startswith('refs/heads/') and
791 not ref.startswith('refs/remotes/')):
791 not ref.startswith('refs/remotes/')):
792 continue
792 continue
793 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
793 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
794 continue # ignore remote/HEAD redirects
794 continue # ignore remote/HEAD redirects
795 branch2rev[ref] = revision
795 branch2rev[ref] = revision
796 rev2branch.setdefault(revision, []).append(ref)
796 rev2branch.setdefault(revision, []).append(ref)
797 return branch2rev, rev2branch
797 return branch2rev, rev2branch
798
798
799 def _gittracking(self, branches):
799 def _gittracking(self, branches):
800 'return map of remote branch to local tracking branch'
800 'return map of remote branch to local tracking branch'
801 # assumes no more than one local tracking branch for each remote
801 # assumes no more than one local tracking branch for each remote
802 tracking = {}
802 tracking = {}
803 for b in branches:
803 for b in branches:
804 if b.startswith('refs/remotes/'):
804 if b.startswith('refs/remotes/'):
805 continue
805 continue
806 remote = self._gitcommand(['config', 'branch.%s.remote' % b])
806 remote = self._gitcommand(['config', 'branch.%s.remote' % b])
807 if remote:
807 if remote:
808 ref = self._gitcommand(['config', 'branch.%s.merge' % b])
808 ref = self._gitcommand(['config', 'branch.%s.merge' % b])
809 tracking['refs/remotes/%s/%s' %
809 tracking['refs/remotes/%s/%s' %
810 (remote, ref.split('/', 2)[2])] = b
810 (remote, ref.split('/', 2)[2])] = b
811 return tracking
811 return tracking
812
812
813 def _abssource(self, source):
813 def _abssource(self, source):
814 if '://' not in source:
814 if '://' not in source:
815 # recognize the scp syntax as an absolute source
815 # recognize the scp syntax as an absolute source
816 colon = source.find(':')
816 colon = source.find(':')
817 if colon != -1 and '/' not in source[:colon]:
817 if colon != -1 and '/' not in source[:colon]:
818 return source
818 return source
819 self._subsource = source
819 self._subsource = source
820 return _abssource(self)
820 return _abssource(self)
821
821
822 def _fetch(self, source, revision):
822 def _fetch(self, source, revision):
823 if self._gitmissing():
823 if self._gitmissing():
824 source = self._abssource(source)
824 source = self._abssource(source)
825 self._ui.status(_('cloning subrepo %s from %s\n') %
825 self._ui.status(_('cloning subrepo %s from %s\n') %
826 (self._relpath, source))
826 (self._relpath, source))
827 self._gitnodir(['clone', source, self._abspath])
827 self._gitnodir(['clone', source, self._abspath])
828 if self._githavelocally(revision):
828 if self._githavelocally(revision):
829 return
829 return
830 self._ui.status(_('pulling subrepo %s from %s\n') %
830 self._ui.status(_('pulling subrepo %s from %s\n') %
831 (self._relpath, self._gitremote('origin')))
831 (self._relpath, self._gitremote('origin')))
832 # try only origin: the originally cloned repo
832 # try only origin: the originally cloned repo
833 self._gitcommand(['fetch'])
833 self._gitcommand(['fetch'])
834 if not self._githavelocally(revision):
834 if not self._githavelocally(revision):
835 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
835 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
836 (revision, self._relpath))
836 (revision, self._relpath))
837
837
838 def dirty(self, ignoreupdate=False):
838 def dirty(self, ignoreupdate=False):
839 if self._gitmissing():
839 if self._gitmissing():
840 return self._state[1] != ''
840 return self._state[1] != ''
841 if self._gitisbare():
841 if self._gitisbare():
842 return True
842 return True
843 if not ignoreupdate and self._state[1] != self._gitstate():
843 if not ignoreupdate and self._state[1] != self._gitstate():
844 # different version checked out
844 # different version checked out
845 return True
845 return True
846 # check for staged changes or modified files; ignore untracked files
846 # check for staged changes or modified files; ignore untracked files
847 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
847 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
848 return code == 1
848 return code == 1
849
849
850 def get(self, state, overwrite=False):
850 def get(self, state, overwrite=False):
851 source, revision, kind = state
851 source, revision, kind = state
852 if not revision:
852 if not revision:
853 self.remove()
853 self.remove()
854 return
854 return
855 self._fetch(source, revision)
855 self._fetch(source, revision)
856 # if the repo was set to be bare, unbare it
856 # if the repo was set to be bare, unbare it
857 if self._gitisbare():
857 if self._gitisbare():
858 self._gitcommand(['config', 'core.bare', 'false'])
858 self._gitcommand(['config', 'core.bare', 'false'])
859 if self._gitstate() == revision:
859 if self._gitstate() == revision:
860 self._gitcommand(['reset', '--hard', 'HEAD'])
860 self._gitcommand(['reset', '--hard', 'HEAD'])
861 return
861 return
862 elif self._gitstate() == revision:
862 elif self._gitstate() == revision:
863 if overwrite:
863 if overwrite:
864 # first reset the index to unmark new files for commit, because
864 # first reset the index to unmark new files for commit, because
865 # reset --hard will otherwise throw away files added for commit,
865 # reset --hard will otherwise throw away files added for commit,
866 # not just unmark them.
866 # not just unmark them.
867 self._gitcommand(['reset', 'HEAD'])
867 self._gitcommand(['reset', 'HEAD'])
868 self._gitcommand(['reset', '--hard', 'HEAD'])
868 self._gitcommand(['reset', '--hard', 'HEAD'])
869 return
869 return
870 branch2rev, rev2branch = self._gitbranchmap()
870 branch2rev, rev2branch = self._gitbranchmap()
871
871
872 def checkout(args):
872 def checkout(args):
873 cmd = ['checkout']
873 cmd = ['checkout']
874 if overwrite:
874 if overwrite:
875 # first reset the index to unmark new files for commit, because
875 # first reset the index to unmark new files for commit, because
876 # the -f option will otherwise throw away files added for
876 # the -f option will otherwise throw away files added for
877 # commit, not just unmark them.
877 # commit, not just unmark them.
878 self._gitcommand(['reset', 'HEAD'])
878 self._gitcommand(['reset', 'HEAD'])
879 cmd.append('-f')
879 cmd.append('-f')
880 self._gitcommand(cmd + args)
880 self._gitcommand(cmd + args)
881
881
882 def rawcheckout():
882 def rawcheckout():
883 # no branch to checkout, check it out with no branch
883 # no branch to checkout, check it out with no branch
884 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
884 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
885 self._relpath)
885 self._relpath)
886 self._ui.warn(_('check out a git branch if you intend '
886 self._ui.warn(_('check out a git branch if you intend '
887 'to make changes\n'))
887 'to make changes\n'))
888 checkout(['-q', revision])
888 checkout(['-q', revision])
889
889
890 if revision not in rev2branch:
890 if revision not in rev2branch:
891 rawcheckout()
891 rawcheckout()
892 return
892 return
893 branches = rev2branch[revision]
893 branches = rev2branch[revision]
894 firstlocalbranch = None
894 firstlocalbranch = None
895 for b in branches:
895 for b in branches:
896 if b == 'refs/heads/master':
896 if b == 'refs/heads/master':
897 # master trumps all other branches
897 # master trumps all other branches
898 checkout(['refs/heads/master'])
898 checkout(['refs/heads/master'])
899 return
899 return
900 if not firstlocalbranch and not b.startswith('refs/remotes/'):
900 if not firstlocalbranch and not b.startswith('refs/remotes/'):
901 firstlocalbranch = b
901 firstlocalbranch = b
902 if firstlocalbranch:
902 if firstlocalbranch:
903 checkout([firstlocalbranch])
903 checkout([firstlocalbranch])
904 return
904 return
905
905
906 tracking = self._gittracking(branch2rev.keys())
906 tracking = self._gittracking(branch2rev.keys())
907 # choose a remote branch already tracked if possible
907 # choose a remote branch already tracked if possible
908 remote = branches[0]
908 remote = branches[0]
909 if remote not in tracking:
909 if remote not in tracking:
910 for b in branches:
910 for b in branches:
911 if b in tracking:
911 if b in tracking:
912 remote = b
912 remote = b
913 break
913 break
914
914
915 if remote not in tracking:
915 if remote not in tracking:
916 # create a new local tracking branch
916 # create a new local tracking branch
917 local = remote.split('/', 2)[2]
917 local = remote.split('/', 2)[2]
918 checkout(['-b', local, remote])
918 checkout(['-b', local, remote])
919 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
919 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
920 # When updating to a tracked remote branch,
920 # When updating to a tracked remote branch,
921 # if the local tracking branch is downstream of it,
921 # if the local tracking branch is downstream of it,
922 # a normal `git pull` would have performed a "fast-forward merge"
922 # a normal `git pull` would have performed a "fast-forward merge"
923 # which is equivalent to updating the local branch to the remote.
923 # which is equivalent to updating the local branch to the remote.
924 # Since we are only looking at branching at update, we need to
924 # Since we are only looking at branching at update, we need to
925 # detect this situation and perform this action lazily.
925 # detect this situation and perform this action lazily.
926 if tracking[remote] != self._gitcurrentbranch():
926 if tracking[remote] != self._gitcurrentbranch():
927 checkout([tracking[remote]])
927 checkout([tracking[remote]])
928 self._gitcommand(['merge', '--ff', remote])
928 self._gitcommand(['merge', '--ff', remote])
929 else:
929 else:
930 # a real merge would be required, just checkout the revision
930 # a real merge would be required, just checkout the revision
931 rawcheckout()
931 rawcheckout()
932
932
933 def commit(self, text, user, date):
933 def commit(self, text, user, date):
934 if self._gitmissing():
934 if self._gitmissing():
935 raise util.Abort(_("subrepo %s is missing") % self._relpath)
935 raise util.Abort(_("subrepo %s is missing") % self._relpath)
936 cmd = ['commit', '-a', '-m', text]
936 cmd = ['commit', '-a', '-m', text]
937 env = os.environ.copy()
937 env = os.environ.copy()
938 if user:
938 if user:
939 cmd += ['--author', user]
939 cmd += ['--author', user]
940 if date:
940 if date:
941 # git's date parser silently ignores when seconds < 1e9
941 # git's date parser silently ignores when seconds < 1e9
942 # convert to ISO8601
942 # convert to ISO8601
943 env['GIT_AUTHOR_DATE'] = util.datestr(date,
943 env['GIT_AUTHOR_DATE'] = util.datestr(date,
944 '%Y-%m-%dT%H:%M:%S %1%2')
944 '%Y-%m-%dT%H:%M:%S %1%2')
945 self._gitcommand(cmd, env=env)
945 self._gitcommand(cmd, env=env)
946 # make sure commit works otherwise HEAD might not exist under certain
946 # make sure commit works otherwise HEAD might not exist under certain
947 # circumstances
947 # circumstances
948 return self._gitstate()
948 return self._gitstate()
949
949
950 def merge(self, state):
950 def merge(self, state):
951 source, revision, kind = state
951 source, revision, kind = state
952 self._fetch(source, revision)
952 self._fetch(source, revision)
953 base = self._gitcommand(['merge-base', revision, self._state[1]])
953 base = self._gitcommand(['merge-base', revision, self._state[1]])
954 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
954 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
955
955
956 def mergefunc():
956 def mergefunc():
957 if base == revision:
957 if base == revision:
958 self.get(state) # fast forward merge
958 self.get(state) # fast forward merge
959 elif base != self._state[1]:
959 elif base != self._state[1]:
960 self._gitcommand(['merge', '--no-commit', revision])
960 self._gitcommand(['merge', '--no-commit', revision])
961
961
962 if self.dirty():
962 if self.dirty():
963 if self._gitstate() != revision:
963 if self._gitstate() != revision:
964 dirty = self._gitstate() == self._state[1] or code != 0
964 dirty = self._gitstate() == self._state[1] or code != 0
965 if _updateprompt(self._ui, self, dirty,
965 if _updateprompt(self._ui, self, dirty,
966 self._state[1][:7], revision[:7]):
966 self._state[1][:7], revision[:7]):
967 mergefunc()
967 mergefunc()
968 else:
968 else:
969 mergefunc()
969 mergefunc()
970
970
971 def push(self, force):
971 def push(self, force):
972 if not self._state[1]:
972 if not self._state[1]:
973 return True
973 return True
974 if self._gitmissing():
974 if self._gitmissing():
975 raise util.Abort(_("subrepo %s is missing") % self._relpath)
975 raise util.Abort(_("subrepo %s is missing") % self._relpath)
976 # if a branch in origin contains the revision, nothing to do
976 # if a branch in origin contains the revision, nothing to do
977 branch2rev, rev2branch = self._gitbranchmap()
977 branch2rev, rev2branch = self._gitbranchmap()
978 if self._state[1] in rev2branch:
978 if self._state[1] in rev2branch:
979 for b in rev2branch[self._state[1]]:
979 for b in rev2branch[self._state[1]]:
980 if b.startswith('refs/remotes/origin/'):
980 if b.startswith('refs/remotes/origin/'):
981 return True
981 return True
982 for b, revision in branch2rev.iteritems():
982 for b, revision in branch2rev.iteritems():
983 if b.startswith('refs/remotes/origin/'):
983 if b.startswith('refs/remotes/origin/'):
984 if self._gitisancestor(self._state[1], revision):
984 if self._gitisancestor(self._state[1], revision):
985 return True
985 return True
986 # otherwise, try to push the currently checked out branch
986 # otherwise, try to push the currently checked out branch
987 cmd = ['push']
987 cmd = ['push']
988 if force:
988 if force:
989 cmd.append('--force')
989 cmd.append('--force')
990
990
991 current = self._gitcurrentbranch()
991 current = self._gitcurrentbranch()
992 if current:
992 if current:
993 # determine if the current branch is even useful
993 # determine if the current branch is even useful
994 if not self._gitisancestor(self._state[1], current):
994 if not self._gitisancestor(self._state[1], current):
995 self._ui.warn(_('unrelated git branch checked out '
995 self._ui.warn(_('unrelated git branch checked out '
996 'in subrepo %s\n') % self._relpath)
996 'in subrepo %s\n') % self._relpath)
997 return False
997 return False
998 self._ui.status(_('pushing branch %s of subrepo %s\n') %
998 self._ui.status(_('pushing branch %s of subrepo %s\n') %
999 (current.split('/', 2)[2], self._relpath))
999 (current.split('/', 2)[2], self._relpath))
1000 self._gitcommand(cmd + ['origin', current])
1000 self._gitcommand(cmd + ['origin', current])
1001 return True
1001 return True
1002 else:
1002 else:
1003 self._ui.warn(_('no branch checked out in subrepo %s\n'
1003 self._ui.warn(_('no branch checked out in subrepo %s\n'
1004 'cannot push revision %s') %
1004 'cannot push revision %s') %
1005 (self._relpath, self._state[1]))
1005 (self._relpath, self._state[1]))
1006 return False
1006 return False
1007
1007
1008 def remove(self):
1008 def remove(self):
1009 if self._gitmissing():
1009 if self._gitmissing():
1010 return
1010 return
1011 if self.dirty():
1011 if self.dirty():
1012 self._ui.warn(_('not removing repo %s because '
1012 self._ui.warn(_('not removing repo %s because '
1013 'it has changes.\n') % self._relpath)
1013 'it has changes.\n') % self._relpath)
1014 return
1014 return
1015 # we can't fully delete the repository as it may contain
1015 # we can't fully delete the repository as it may contain
1016 # local-only history
1016 # local-only history
1017 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1017 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1018 self._gitcommand(['config', 'core.bare', 'true'])
1018 self._gitcommand(['config', 'core.bare', 'true'])
1019 for f in os.listdir(self._abspath):
1019 for f in os.listdir(self._abspath):
1020 if f == '.git':
1020 if f == '.git':
1021 continue
1021 continue
1022 path = os.path.join(self._abspath, f)
1022 path = os.path.join(self._abspath, f)
1023 if os.path.isdir(path) and not os.path.islink(path):
1023 if os.path.isdir(path) and not os.path.islink(path):
1024 shutil.rmtree(path)
1024 shutil.rmtree(path)
1025 else:
1025 else:
1026 os.remove(path)
1026 os.remove(path)
1027
1027
1028 def archive(self, ui, archiver, prefix):
1028 def archive(self, ui, archiver, prefix):
1029 source, revision = self._state
1029 source, revision = self._state
1030 if not revision:
1030 if not revision:
1031 return
1031 return
1032 self._fetch(source, revision)
1032 self._fetch(source, revision)
1033
1033
1034 # Parse git's native archive command.
1034 # Parse git's native archive command.
1035 # This should be much faster than manually traversing the trees
1035 # This should be much faster than manually traversing the trees
1036 # and objects with many subprocess calls.
1036 # and objects with many subprocess calls.
1037 tarstream = self._gitcommand(['archive', revision], stream=True)
1037 tarstream = self._gitcommand(['archive', revision], stream=True)
1038 tar = tarfile.open(fileobj=tarstream, mode='r|')
1038 tar = tarfile.open(fileobj=tarstream, mode='r|')
1039 relpath = subrelpath(self)
1039 relpath = subrelpath(self)
1040 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1040 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1041 for i, info in enumerate(tar):
1041 for i, info in enumerate(tar):
1042 if info.isdir():
1042 if info.isdir():
1043 continue
1043 continue
1044 if info.issym():
1044 if info.issym():
1045 data = info.linkname
1045 data = info.linkname
1046 else:
1046 else:
1047 data = tar.extractfile(info).read()
1047 data = tar.extractfile(info).read()
1048 archiver.addfile(os.path.join(prefix, self._path, info.name),
1048 archiver.addfile(os.path.join(prefix, self._path, info.name),
1049 info.mode, info.issym(), data)
1049 info.mode, info.issym(), data)
1050 ui.progress(_('archiving (%s)') % relpath, i + 1,
1050 ui.progress(_('archiving (%s)') % relpath, i + 1,
1051 unit=_('files'))
1051 unit=_('files'))
1052 ui.progress(_('archiving (%s)') % relpath, None)
1052 ui.progress(_('archiving (%s)') % relpath, None)
1053
1053
1054
1054
1055 def status(self, rev2, **opts):
1055 def status(self, rev2, **opts):
1056 rev1 = self._state[1]
1056 rev1 = self._state[1]
1057 if self._gitmissing() or not rev1:
1057 if self._gitmissing() or not rev1:
1058 # if the repo is missing, return no results
1058 # if the repo is missing, return no results
1059 return [], [], [], [], [], [], []
1059 return [], [], [], [], [], [], []
1060 modified, added, removed = [], [], []
1060 modified, added, removed = [], [], []
1061 if rev2:
1061 if rev2:
1062 command = ['diff-tree', rev1, rev2]
1062 command = ['diff-tree', rev1, rev2]
1063 else:
1063 else:
1064 command = ['diff-index', rev1]
1064 command = ['diff-index', rev1]
1065 out = self._gitcommand(command)
1065 out = self._gitcommand(command)
1066 for line in out.split('\n'):
1066 for line in out.split('\n'):
1067 tab = line.find('\t')
1067 tab = line.find('\t')
1068 if tab == -1:
1068 if tab == -1:
1069 continue
1069 continue
1070 status, f = line[tab - 1], line[tab + 1:]
1070 status, f = line[tab - 1], line[tab + 1:]
1071 if status == 'M':
1071 if status == 'M':
1072 modified.append(f)
1072 modified.append(f)
1073 elif status == 'A':
1073 elif status == 'A':
1074 added.append(f)
1074 added.append(f)
1075 elif status == 'D':
1075 elif status == 'D':
1076 removed.append(f)
1076 removed.append(f)
1077
1077
1078 deleted = unknown = ignored = clean = []
1078 deleted = unknown = ignored = clean = []
1079 return modified, added, removed, deleted, unknown, ignored, clean
1079 return modified, added, removed, deleted, unknown, ignored, clean
1080
1080
1081 types = {
1081 types = {
1082 'hg': hgsubrepo,
1082 'hg': hgsubrepo,
1083 'svn': svnsubrepo,
1083 'svn': svnsubrepo,
1084 'git': gitsubrepo,
1084 'git': gitsubrepo,
1085 }
1085 }
@@ -1,1601 +1,1612 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, calendar, textwrap, unicodedata, signal
19 import os, time, calendar, textwrap, unicodedata, signal
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explainexit(code)))
200 (cmd, explainexit(code)))
201 fp = open(outname, 'rb')
201 fp = open(outname, 'rb')
202 r = fp.read()
202 r = fp.read()
203 fp.close()
203 fp.close()
204 return r
204 return r
205 finally:
205 finally:
206 try:
206 try:
207 if inname:
207 if inname:
208 os.unlink(inname)
208 os.unlink(inname)
209 except OSError:
209 except OSError:
210 pass
210 pass
211 try:
211 try:
212 if outname:
212 if outname:
213 os.unlink(outname)
213 os.unlink(outname)
214 except OSError:
214 except OSError:
215 pass
215 pass
216
216
217 filtertable = {
217 filtertable = {
218 'tempfile:': tempfilter,
218 'tempfile:': tempfilter,
219 'pipe:': pipefilter,
219 'pipe:': pipefilter,
220 }
220 }
221
221
222 def filter(s, cmd):
222 def filter(s, cmd):
223 "filter a string through a command that transforms its input to its output"
223 "filter a string through a command that transforms its input to its output"
224 for name, fn in filtertable.iteritems():
224 for name, fn in filtertable.iteritems():
225 if cmd.startswith(name):
225 if cmd.startswith(name):
226 return fn(s, cmd[len(name):].lstrip())
226 return fn(s, cmd[len(name):].lstrip())
227 return pipefilter(s, cmd)
227 return pipefilter(s, cmd)
228
228
229 def binary(s):
229 def binary(s):
230 """return true if a string is binary data"""
230 """return true if a string is binary data"""
231 return bool(s and '\0' in s)
231 return bool(s and '\0' in s)
232
232
233 def increasingchunks(source, min=1024, max=65536):
233 def increasingchunks(source, min=1024, max=65536):
234 '''return no less than min bytes per chunk while data remains,
234 '''return no less than min bytes per chunk while data remains,
235 doubling min after each chunk until it reaches max'''
235 doubling min after each chunk until it reaches max'''
236 def log2(x):
236 def log2(x):
237 if not x:
237 if not x:
238 return 0
238 return 0
239 i = 0
239 i = 0
240 while x:
240 while x:
241 x >>= 1
241 x >>= 1
242 i += 1
242 i += 1
243 return i - 1
243 return i - 1
244
244
245 buf = []
245 buf = []
246 blen = 0
246 blen = 0
247 for chunk in source:
247 for chunk in source:
248 buf.append(chunk)
248 buf.append(chunk)
249 blen += len(chunk)
249 blen += len(chunk)
250 if blen >= min:
250 if blen >= min:
251 if min < max:
251 if min < max:
252 min = min << 1
252 min = min << 1
253 nmin = 1 << log2(blen)
253 nmin = 1 << log2(blen)
254 if nmin > min:
254 if nmin > min:
255 min = nmin
255 min = nmin
256 if min > max:
256 if min > max:
257 min = max
257 min = max
258 yield ''.join(buf)
258 yield ''.join(buf)
259 blen = 0
259 blen = 0
260 buf = []
260 buf = []
261 if buf:
261 if buf:
262 yield ''.join(buf)
262 yield ''.join(buf)
263
263
264 Abort = error.Abort
264 Abort = error.Abort
265
265
266 def always(fn):
266 def always(fn):
267 return True
267 return True
268
268
269 def never(fn):
269 def never(fn):
270 return False
270 return False
271
271
272 def pathto(root, n1, n2):
272 def pathto(root, n1, n2):
273 '''return the relative path from one place to another.
273 '''return the relative path from one place to another.
274 root should use os.sep to separate directories
274 root should use os.sep to separate directories
275 n1 should use os.sep to separate directories
275 n1 should use os.sep to separate directories
276 n2 should use "/" to separate directories
276 n2 should use "/" to separate directories
277 returns an os.sep-separated path.
277 returns an os.sep-separated path.
278
278
279 If n1 is a relative path, it's assumed it's
279 If n1 is a relative path, it's assumed it's
280 relative to root.
280 relative to root.
281 n2 should always be relative to root.
281 n2 should always be relative to root.
282 '''
282 '''
283 if not n1:
283 if not n1:
284 return localpath(n2)
284 return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 _hgexecutable = None
298 _hgexecutable = None
299
299
300 def mainfrozen():
300 def mainfrozen():
301 """return True if we are a frozen executable.
301 """return True if we are a frozen executable.
302
302
303 The code supports py2exe (most common, Windows only) and tools/freeze
303 The code supports py2exe (most common, Windows only) and tools/freeze
304 (portable, not much used).
304 (portable, not much used).
305 """
305 """
306 return (hasattr(sys, "frozen") or # new py2exe
306 return (hasattr(sys, "frozen") or # new py2exe
307 hasattr(sys, "importers") or # old py2exe
307 hasattr(sys, "importers") or # old py2exe
308 imp.is_frozen("__main__")) # tools/freeze
308 imp.is_frozen("__main__")) # tools/freeze
309
309
310 def hgexecutable():
310 def hgexecutable():
311 """return location of the 'hg' executable.
311 """return location of the 'hg' executable.
312
312
313 Defaults to $HG or 'hg' in the search path.
313 Defaults to $HG or 'hg' in the search path.
314 """
314 """
315 if _hgexecutable is None:
315 if _hgexecutable is None:
316 hg = os.environ.get('HG')
316 hg = os.environ.get('HG')
317 if hg:
317 if hg:
318 _sethgexecutable(hg)
318 _sethgexecutable(hg)
319 elif mainfrozen():
319 elif mainfrozen():
320 _sethgexecutable(sys.executable)
320 _sethgexecutable(sys.executable)
321 else:
321 else:
322 exe = findexe('hg') or os.path.basename(sys.argv[0])
322 exe = findexe('hg') or os.path.basename(sys.argv[0])
323 _sethgexecutable(exe)
323 _sethgexecutable(exe)
324 return _hgexecutable
324 return _hgexecutable
325
325
326 def _sethgexecutable(path):
326 def _sethgexecutable(path):
327 """set location of the 'hg' executable"""
327 """set location of the 'hg' executable"""
328 global _hgexecutable
328 global _hgexecutable
329 _hgexecutable = path
329 _hgexecutable = path
330
330
331 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
331 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
332 '''enhanced shell command execution.
332 '''enhanced shell command execution.
333 run with environment maybe modified, maybe in different dir.
333 run with environment maybe modified, maybe in different dir.
334
334
335 if command fails and onerr is None, return status. if ui object,
335 if command fails and onerr is None, return status. if ui object,
336 print error message and return status, else raise onerr object as
336 print error message and return status, else raise onerr object as
337 exception.
337 exception.
338
338
339 if out is specified, it is assumed to be a file-like object that has a
339 if out is specified, it is assumed to be a file-like object that has a
340 write() method. stdout and stderr will be redirected to out.'''
340 write() method. stdout and stderr will be redirected to out.'''
341 try:
341 try:
342 sys.stdout.flush()
342 sys.stdout.flush()
343 except Exception:
343 except Exception:
344 pass
344 pass
345 def py2shell(val):
345 def py2shell(val):
346 'convert python object into string that is useful to shell'
346 'convert python object into string that is useful to shell'
347 if val is None or val is False:
347 if val is None or val is False:
348 return '0'
348 return '0'
349 if val is True:
349 if val is True:
350 return '1'
350 return '1'
351 return str(val)
351 return str(val)
352 origcmd = cmd
352 origcmd = cmd
353 cmd = quotecommand(cmd)
353 cmd = quotecommand(cmd)
354 env = dict(os.environ)
354 env = dict(os.environ)
355 env.update((k, py2shell(v)) for k, v in environ.iteritems())
355 env.update((k, py2shell(v)) for k, v in environ.iteritems())
356 env['HG'] = hgexecutable()
356 env['HG'] = hgexecutable()
357 if out is None or out == sys.__stdout__:
357 if out is None or out == sys.__stdout__:
358 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
358 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
359 env=env, cwd=cwd)
359 env=env, cwd=cwd)
360 else:
360 else:
361 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
361 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
362 env=env, cwd=cwd, stdout=subprocess.PIPE,
362 env=env, cwd=cwd, stdout=subprocess.PIPE,
363 stderr=subprocess.STDOUT)
363 stderr=subprocess.STDOUT)
364 for line in proc.stdout:
364 for line in proc.stdout:
365 out.write(line)
365 out.write(line)
366 proc.wait()
366 proc.wait()
367 rc = proc.returncode
367 rc = proc.returncode
368 if sys.platform == 'OpenVMS' and rc & 1:
368 if sys.platform == 'OpenVMS' and rc & 1:
369 rc = 0
369 rc = 0
370 if rc and onerr:
370 if rc and onerr:
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
372 explainexit(rc)[0])
372 explainexit(rc)[0])
373 if errprefix:
373 if errprefix:
374 errmsg = '%s: %s' % (errprefix, errmsg)
374 errmsg = '%s: %s' % (errprefix, errmsg)
375 try:
375 try:
376 onerr.warn(errmsg + '\n')
376 onerr.warn(errmsg + '\n')
377 except AttributeError:
377 except AttributeError:
378 raise onerr(errmsg)
378 raise onerr(errmsg)
379 return rc
379 return rc
380
380
381 def checksignature(func):
381 def checksignature(func):
382 '''wrap a function with code to check for calling errors'''
382 '''wrap a function with code to check for calling errors'''
383 def check(*args, **kwargs):
383 def check(*args, **kwargs):
384 try:
384 try:
385 return func(*args, **kwargs)
385 return func(*args, **kwargs)
386 except TypeError:
386 except TypeError:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
388 raise error.SignatureError
388 raise error.SignatureError
389 raise
389 raise
390
390
391 return check
391 return check
392
392
393 def makedir(path, notindexed):
393 def makedir(path, notindexed):
394 os.mkdir(path)
394 os.mkdir(path)
395
395
396 def unlinkpath(f):
396 def unlinkpath(f):
397 """unlink and remove the directory if it is empty"""
397 """unlink and remove the directory if it is empty"""
398 os.unlink(f)
398 os.unlink(f)
399 # try removing directories that might now be empty
399 # try removing directories that might now be empty
400 try:
400 try:
401 os.removedirs(os.path.dirname(f))
401 os.removedirs(os.path.dirname(f))
402 except OSError:
402 except OSError:
403 pass
403 pass
404
404
405 def copyfile(src, dest):
405 def copyfile(src, dest):
406 "copy a file, preserving mode and atime/mtime"
406 "copy a file, preserving mode and atime/mtime"
407 if os.path.islink(src):
407 if os.path.islink(src):
408 try:
408 try:
409 os.unlink(dest)
409 os.unlink(dest)
410 except OSError:
410 except OSError:
411 pass
411 pass
412 os.symlink(os.readlink(src), dest)
412 os.symlink(os.readlink(src), dest)
413 else:
413 else:
414 try:
414 try:
415 shutil.copyfile(src, dest)
415 shutil.copyfile(src, dest)
416 shutil.copymode(src, dest)
416 shutil.copymode(src, dest)
417 except shutil.Error, inst:
417 except shutil.Error, inst:
418 raise Abort(str(inst))
418 raise Abort(str(inst))
419
419
420 def copyfiles(src, dst, hardlink=None):
420 def copyfiles(src, dst, hardlink=None):
421 """Copy a directory tree using hardlinks if possible"""
421 """Copy a directory tree using hardlinks if possible"""
422
422
423 if hardlink is None:
423 if hardlink is None:
424 hardlink = (os.stat(src).st_dev ==
424 hardlink = (os.stat(src).st_dev ==
425 os.stat(os.path.dirname(dst)).st_dev)
425 os.stat(os.path.dirname(dst)).st_dev)
426
426
427 num = 0
427 num = 0
428 if os.path.isdir(src):
428 if os.path.isdir(src):
429 os.mkdir(dst)
429 os.mkdir(dst)
430 for name, kind in osutil.listdir(src):
430 for name, kind in osutil.listdir(src):
431 srcname = os.path.join(src, name)
431 srcname = os.path.join(src, name)
432 dstname = os.path.join(dst, name)
432 dstname = os.path.join(dst, name)
433 hardlink, n = copyfiles(srcname, dstname, hardlink)
433 hardlink, n = copyfiles(srcname, dstname, hardlink)
434 num += n
434 num += n
435 else:
435 else:
436 if hardlink:
436 if hardlink:
437 try:
437 try:
438 oslink(src, dst)
438 oslink(src, dst)
439 except (IOError, OSError):
439 except (IOError, OSError):
440 hardlink = False
440 hardlink = False
441 shutil.copy(src, dst)
441 shutil.copy(src, dst)
442 else:
442 else:
443 shutil.copy(src, dst)
443 shutil.copy(src, dst)
444 num += 1
444 num += 1
445
445
446 return hardlink, num
446 return hardlink, num
447
447
448 _winreservednames = '''con prn aux nul
448 _winreservednames = '''con prn aux nul
449 com1 com2 com3 com4 com5 com6 com7 com8 com9
449 com1 com2 com3 com4 com5 com6 com7 com8 com9
450 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
450 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
451 _winreservedchars = ':*?"<>|'
451 _winreservedchars = ':*?"<>|'
452 def checkwinfilename(path):
452 def checkwinfilename(path):
453 '''Check that the base-relative path is a valid filename on Windows.
453 '''Check that the base-relative path is a valid filename on Windows.
454 Returns None if the path is ok, or a UI string describing the problem.
454 Returns None if the path is ok, or a UI string describing the problem.
455
455
456 >>> checkwinfilename("just/a/normal/path")
456 >>> checkwinfilename("just/a/normal/path")
457 >>> checkwinfilename("foo/bar/con.xml")
457 >>> checkwinfilename("foo/bar/con.xml")
458 "filename contains 'con', which is reserved on Windows"
458 "filename contains 'con', which is reserved on Windows"
459 >>> checkwinfilename("foo/con.xml/bar")
459 >>> checkwinfilename("foo/con.xml/bar")
460 "filename contains 'con', which is reserved on Windows"
460 "filename contains 'con', which is reserved on Windows"
461 >>> checkwinfilename("foo/bar/xml.con")
461 >>> checkwinfilename("foo/bar/xml.con")
462 >>> checkwinfilename("foo/bar/AUX/bla.txt")
462 >>> checkwinfilename("foo/bar/AUX/bla.txt")
463 "filename contains 'AUX', which is reserved on Windows"
463 "filename contains 'AUX', which is reserved on Windows"
464 >>> checkwinfilename("foo/bar/bla:.txt")
464 >>> checkwinfilename("foo/bar/bla:.txt")
465 "filename contains ':', which is reserved on Windows"
465 "filename contains ':', which is reserved on Windows"
466 >>> checkwinfilename("foo/bar/b\07la.txt")
466 >>> checkwinfilename("foo/bar/b\07la.txt")
467 "filename contains '\\\\x07', which is invalid on Windows"
467 "filename contains '\\\\x07', which is invalid on Windows"
468 >>> checkwinfilename("foo/bar/bla ")
468 >>> checkwinfilename("foo/bar/bla ")
469 "filename ends with ' ', which is not allowed on Windows"
469 "filename ends with ' ', which is not allowed on Windows"
470 '''
470 '''
471 for n in path.replace('\\', '/').split('/'):
471 for n in path.replace('\\', '/').split('/'):
472 if not n:
472 if not n:
473 continue
473 continue
474 for c in n:
474 for c in n:
475 if c in _winreservedchars:
475 if c in _winreservedchars:
476 return _("filename contains '%s', which is reserved "
476 return _("filename contains '%s', which is reserved "
477 "on Windows") % c
477 "on Windows") % c
478 if ord(c) <= 31:
478 if ord(c) <= 31:
479 return _("filename contains %r, which is invalid "
479 return _("filename contains %r, which is invalid "
480 "on Windows") % c
480 "on Windows") % c
481 base = n.split('.')[0]
481 base = n.split('.')[0]
482 if base and base.lower() in _winreservednames:
482 if base and base.lower() in _winreservednames:
483 return _("filename contains '%s', which is reserved "
483 return _("filename contains '%s', which is reserved "
484 "on Windows") % base
484 "on Windows") % base
485 t = n[-1]
485 t = n[-1]
486 if t in '. ':
486 if t in '. ':
487 return _("filename ends with '%s', which is not allowed "
487 return _("filename ends with '%s', which is not allowed "
488 "on Windows") % t
488 "on Windows") % t
489
489
490 def lookupreg(key, name=None, scope=None):
490 def lookupreg(key, name=None, scope=None):
491 return None
491 return None
492
492
493 def hidewindow():
493 def hidewindow():
494 """Hide current shell window.
494 """Hide current shell window.
495
495
496 Used to hide the window opened when starting asynchronous
496 Used to hide the window opened when starting asynchronous
497 child process under Windows, unneeded on other systems.
497 child process under Windows, unneeded on other systems.
498 """
498 """
499 pass
499 pass
500
500
501 if os.name == 'nt':
501 if os.name == 'nt':
502 checkosfilename = checkwinfilename
502 checkosfilename = checkwinfilename
503 from windows import *
503 from windows import *
504 else:
504 else:
505 from posix import *
505 from posix import *
506
506
507 def makelock(info, pathname):
507 def makelock(info, pathname):
508 try:
508 try:
509 return os.symlink(info, pathname)
509 return os.symlink(info, pathname)
510 except OSError, why:
510 except OSError, why:
511 if why.errno == errno.EEXIST:
511 if why.errno == errno.EEXIST:
512 raise
512 raise
513 except AttributeError: # no symlink in os
513 except AttributeError: # no symlink in os
514 pass
514 pass
515
515
516 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
516 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
517 os.write(ld, info)
517 os.write(ld, info)
518 os.close(ld)
518 os.close(ld)
519
519
520 def readlock(pathname):
520 def readlock(pathname):
521 try:
521 try:
522 return os.readlink(pathname)
522 return os.readlink(pathname)
523 except OSError, why:
523 except OSError, why:
524 if why.errno not in (errno.EINVAL, errno.ENOSYS):
524 if why.errno not in (errno.EINVAL, errno.ENOSYS):
525 raise
525 raise
526 except AttributeError: # no symlink in os
526 except AttributeError: # no symlink in os
527 pass
527 pass
528 fp = posixfile(pathname)
528 fp = posixfile(pathname)
529 r = fp.read()
529 r = fp.read()
530 fp.close()
530 fp.close()
531 return r
531 return r
532
532
533 def fstat(fp):
533 def fstat(fp):
534 '''stat file object that may not have fileno method.'''
534 '''stat file object that may not have fileno method.'''
535 try:
535 try:
536 return os.fstat(fp.fileno())
536 return os.fstat(fp.fileno())
537 except AttributeError:
537 except AttributeError:
538 return os.stat(fp.name)
538 return os.stat(fp.name)
539
539
540 # File system features
540 # File system features
541
541
542 def checkcase(path):
542 def checkcase(path):
543 """
543 """
544 Check whether the given path is on a case-sensitive filesystem
544 Check whether the given path is on a case-sensitive filesystem
545
545
546 Requires a path (like /foo/.hg) ending with a foldable final
546 Requires a path (like /foo/.hg) ending with a foldable final
547 directory component.
547 directory component.
548 """
548 """
549 s1 = os.stat(path)
549 s1 = os.stat(path)
550 d, b = os.path.split(path)
550 d, b = os.path.split(path)
551 p2 = os.path.join(d, b.upper())
551 p2 = os.path.join(d, b.upper())
552 if path == p2:
552 if path == p2:
553 p2 = os.path.join(d, b.lower())
553 p2 = os.path.join(d, b.lower())
554 try:
554 try:
555 s2 = os.stat(p2)
555 s2 = os.stat(p2)
556 if s2 == s1:
556 if s2 == s1:
557 return False
557 return False
558 return True
558 return True
559 except OSError:
559 except OSError:
560 return True
560 return True
561
561
562 _fspathcache = {}
562 _fspathcache = {}
563 def fspath(name, root):
563 def fspath(name, root):
564 '''Get name in the case stored in the filesystem
564 '''Get name in the case stored in the filesystem
565
565
566 The name is either relative to root, or it is an absolute path starting
566 The name is either relative to root, or it is an absolute path starting
567 with root. Note that this function is unnecessary, and should not be
567 with root. Note that this function is unnecessary, and should not be
568 called, for case-sensitive filesystems (simply because it's expensive).
568 called, for case-sensitive filesystems (simply because it's expensive).
569 '''
569 '''
570 # If name is absolute, make it relative
570 # If name is absolute, make it relative
571 if name.lower().startswith(root.lower()):
571 if name.lower().startswith(root.lower()):
572 l = len(root)
572 l = len(root)
573 if name[l] == os.sep or name[l] == os.altsep:
573 if name[l] == os.sep or name[l] == os.altsep:
574 l = l + 1
574 l = l + 1
575 name = name[l:]
575 name = name[l:]
576
576
577 if not os.path.lexists(os.path.join(root, name)):
577 if not os.path.lexists(os.path.join(root, name)):
578 return None
578 return None
579
579
580 seps = os.sep
580 seps = os.sep
581 if os.altsep:
581 if os.altsep:
582 seps = seps + os.altsep
582 seps = seps + os.altsep
583 # Protect backslashes. This gets silly very quickly.
583 # Protect backslashes. This gets silly very quickly.
584 seps.replace('\\','\\\\')
584 seps.replace('\\','\\\\')
585 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
585 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
586 dir = os.path.normcase(os.path.normpath(root))
586 dir = os.path.normcase(os.path.normpath(root))
587 result = []
587 result = []
588 for part, sep in pattern.findall(name):
588 for part, sep in pattern.findall(name):
589 if sep:
589 if sep:
590 result.append(sep)
590 result.append(sep)
591 continue
591 continue
592
592
593 if dir not in _fspathcache:
593 if dir not in _fspathcache:
594 _fspathcache[dir] = os.listdir(dir)
594 _fspathcache[dir] = os.listdir(dir)
595 contents = _fspathcache[dir]
595 contents = _fspathcache[dir]
596
596
597 lpart = part.lower()
597 lpart = part.lower()
598 lenp = len(part)
598 lenp = len(part)
599 for n in contents:
599 for n in contents:
600 if lenp == len(n) and n.lower() == lpart:
600 if lenp == len(n) and n.lower() == lpart:
601 result.append(n)
601 result.append(n)
602 break
602 break
603 else:
603 else:
604 # Cannot happen, as the file exists!
604 # Cannot happen, as the file exists!
605 result.append(part)
605 result.append(part)
606 dir = os.path.join(dir, lpart)
606 dir = os.path.join(dir, lpart)
607
607
608 return ''.join(result)
608 return ''.join(result)
609
609
610 def checknlink(testfile):
610 def checknlink(testfile):
611 '''check whether hardlink count reporting works properly'''
611 '''check whether hardlink count reporting works properly'''
612
612
613 # testfile may be open, so we need a separate file for checking to
613 # testfile may be open, so we need a separate file for checking to
614 # work around issue2543 (or testfile may get lost on Samba shares)
614 # work around issue2543 (or testfile may get lost on Samba shares)
615 f1 = testfile + ".hgtmp1"
615 f1 = testfile + ".hgtmp1"
616 if os.path.lexists(f1):
616 if os.path.lexists(f1):
617 return False
617 return False
618 try:
618 try:
619 posixfile(f1, 'w').close()
619 posixfile(f1, 'w').close()
620 except IOError:
620 except IOError:
621 return False
621 return False
622
622
623 f2 = testfile + ".hgtmp2"
623 f2 = testfile + ".hgtmp2"
624 fd = None
624 fd = None
625 try:
625 try:
626 try:
626 try:
627 oslink(f1, f2)
627 oslink(f1, f2)
628 except OSError:
628 except OSError:
629 return False
629 return False
630
630
631 # nlinks() may behave differently for files on Windows shares if
631 # nlinks() may behave differently for files on Windows shares if
632 # the file is open.
632 # the file is open.
633 fd = posixfile(f2)
633 fd = posixfile(f2)
634 return nlinks(f2) > 1
634 return nlinks(f2) > 1
635 finally:
635 finally:
636 if fd is not None:
636 if fd is not None:
637 fd.close()
637 fd.close()
638 for f in (f1, f2):
638 for f in (f1, f2):
639 try:
639 try:
640 os.unlink(f)
640 os.unlink(f)
641 except OSError:
641 except OSError:
642 pass
642 pass
643
643
644 return False
644 return False
645
645
646 def endswithsep(path):
646 def endswithsep(path):
647 '''Check path ends with os.sep or os.altsep.'''
647 '''Check path ends with os.sep or os.altsep.'''
648 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
648 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
649
649
650 def splitpath(path):
650 def splitpath(path):
651 '''Split path by os.sep.
651 '''Split path by os.sep.
652 Note that this function does not use os.altsep because this is
652 Note that this function does not use os.altsep because this is
653 an alternative of simple "xxx.split(os.sep)".
653 an alternative of simple "xxx.split(os.sep)".
654 It is recommended to use os.path.normpath() before using this
654 It is recommended to use os.path.normpath() before using this
655 function if need.'''
655 function if need.'''
656 return path.split(os.sep)
656 return path.split(os.sep)
657
657
658 def gui():
658 def gui():
659 '''Are we running in a GUI?'''
659 '''Are we running in a GUI?'''
660 if sys.platform == 'darwin':
660 if sys.platform == 'darwin':
661 if 'SSH_CONNECTION' in os.environ:
661 if 'SSH_CONNECTION' in os.environ:
662 # handle SSH access to a box where the user is logged in
662 # handle SSH access to a box where the user is logged in
663 return False
663 return False
664 elif getattr(osutil, 'isgui', None):
664 elif getattr(osutil, 'isgui', None):
665 # check if a CoreGraphics session is available
665 # check if a CoreGraphics session is available
666 return osutil.isgui()
666 return osutil.isgui()
667 else:
667 else:
668 # pure build; use a safe default
668 # pure build; use a safe default
669 return True
669 return True
670 else:
670 else:
671 return os.name == "nt" or os.environ.get("DISPLAY")
671 return os.name == "nt" or os.environ.get("DISPLAY")
672
672
673 def mktempcopy(name, emptyok=False, createmode=None):
673 def mktempcopy(name, emptyok=False, createmode=None):
674 """Create a temporary file with the same contents from name
674 """Create a temporary file with the same contents from name
675
675
676 The permission bits are copied from the original file.
676 The permission bits are copied from the original file.
677
677
678 If the temporary file is going to be truncated immediately, you
678 If the temporary file is going to be truncated immediately, you
679 can use emptyok=True as an optimization.
679 can use emptyok=True as an optimization.
680
680
681 Returns the name of the temporary file.
681 Returns the name of the temporary file.
682 """
682 """
683 d, fn = os.path.split(name)
683 d, fn = os.path.split(name)
684 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
684 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
685 os.close(fd)
685 os.close(fd)
686 # Temporary files are created with mode 0600, which is usually not
686 # Temporary files are created with mode 0600, which is usually not
687 # what we want. If the original file already exists, just copy
687 # what we want. If the original file already exists, just copy
688 # its mode. Otherwise, manually obey umask.
688 # its mode. Otherwise, manually obey umask.
689 try:
689 try:
690 st_mode = os.lstat(name).st_mode & 0777
690 st_mode = os.lstat(name).st_mode & 0777
691 except OSError, inst:
691 except OSError, inst:
692 if inst.errno != errno.ENOENT:
692 if inst.errno != errno.ENOENT:
693 raise
693 raise
694 st_mode = createmode
694 st_mode = createmode
695 if st_mode is None:
695 if st_mode is None:
696 st_mode = ~umask
696 st_mode = ~umask
697 st_mode &= 0666
697 st_mode &= 0666
698 os.chmod(temp, st_mode)
698 os.chmod(temp, st_mode)
699 if emptyok:
699 if emptyok:
700 return temp
700 return temp
701 try:
701 try:
702 try:
702 try:
703 ifp = posixfile(name, "rb")
703 ifp = posixfile(name, "rb")
704 except IOError, inst:
704 except IOError, inst:
705 if inst.errno == errno.ENOENT:
705 if inst.errno == errno.ENOENT:
706 return temp
706 return temp
707 if not getattr(inst, 'filename', None):
707 if not getattr(inst, 'filename', None):
708 inst.filename = name
708 inst.filename = name
709 raise
709 raise
710 ofp = posixfile(temp, "wb")
710 ofp = posixfile(temp, "wb")
711 for chunk in filechunkiter(ifp):
711 for chunk in filechunkiter(ifp):
712 ofp.write(chunk)
712 ofp.write(chunk)
713 ifp.close()
713 ifp.close()
714 ofp.close()
714 ofp.close()
715 except:
715 except:
716 try: os.unlink(temp)
716 try: os.unlink(temp)
717 except: pass
717 except: pass
718 raise
718 raise
719 return temp
719 return temp
720
720
721 class atomictempfile(object):
721 class atomictempfile(object):
722 '''writeable file object that atomically updates a file
722 '''writeable file object that atomically updates a file
723
723
724 All writes will go to a temporary copy of the original file. Call
724 All writes will go to a temporary copy of the original file. Call
725 rename() when you are done writing, and atomictempfile will rename
725 rename() when you are done writing, and atomictempfile will rename
726 the temporary copy to the original name, making the changes visible.
726 the temporary copy to the original name, making the changes visible.
727
727
728 Unlike other file-like objects, close() discards your writes by
728 Unlike other file-like objects, close() discards your writes by
729 simply deleting the temporary file.
729 simply deleting the temporary file.
730 '''
730 '''
731 def __init__(self, name, mode='w+b', createmode=None):
731 def __init__(self, name, mode='w+b', createmode=None):
732 self.__name = name # permanent name
732 self.__name = name # permanent name
733 self._tempname = mktempcopy(name, emptyok=('w' in mode),
733 self._tempname = mktempcopy(name, emptyok=('w' in mode),
734 createmode=createmode)
734 createmode=createmode)
735 self._fp = posixfile(self._tempname, mode)
735 self._fp = posixfile(self._tempname, mode)
736
736
737 # delegated methods
737 # delegated methods
738 self.write = self._fp.write
738 self.write = self._fp.write
739 self.fileno = self._fp.fileno
739 self.fileno = self._fp.fileno
740
740
741 def rename(self):
741 def rename(self):
742 if not self._fp.closed:
742 if not self._fp.closed:
743 self._fp.close()
743 self._fp.close()
744 rename(self._tempname, localpath(self.__name))
744 rename(self._tempname, localpath(self.__name))
745
745
746 def close(self):
746 def close(self):
747 if not self._fp.closed:
747 if not self._fp.closed:
748 try:
748 try:
749 os.unlink(self._tempname)
749 os.unlink(self._tempname)
750 except OSError:
750 except OSError:
751 pass
751 pass
752 self._fp.close()
752 self._fp.close()
753
753
754 def __del__(self):
754 def __del__(self):
755 if hasattr(self, '_fp'): # constructor actually did something
755 if hasattr(self, '_fp'): # constructor actually did something
756 self.close()
756 self.close()
757
757
758 def makedirs(name, mode=None):
758 def makedirs(name, mode=None):
759 """recursive directory creation with parent mode inheritance"""
759 """recursive directory creation with parent mode inheritance"""
760 parent = os.path.abspath(os.path.dirname(name))
760 parent = os.path.abspath(os.path.dirname(name))
761 try:
761 try:
762 os.mkdir(name)
762 os.mkdir(name)
763 if mode is not None:
763 if mode is not None:
764 os.chmod(name, mode)
764 os.chmod(name, mode)
765 return
765 return
766 except OSError, err:
766 except OSError, err:
767 if err.errno == errno.EEXIST:
767 if err.errno == errno.EEXIST:
768 return
768 return
769 if not name or parent == name or err.errno != errno.ENOENT:
769 if not name or parent == name or err.errno != errno.ENOENT:
770 raise
770 raise
771 makedirs(parent, mode)
771 makedirs(parent, mode)
772 makedirs(name, mode)
772 makedirs(name, mode)
773
773
774 def readfile(path):
774 def readfile(path):
775 fp = open(path, 'rb')
775 fp = open(path, 'rb')
776 try:
776 try:
777 return fp.read()
777 return fp.read()
778 finally:
778 finally:
779 fp.close()
779 fp.close()
780
780
781 def writefile(path, text):
781 def writefile(path, text):
782 fp = open(path, 'wb')
782 fp = open(path, 'wb')
783 try:
783 try:
784 fp.write(text)
784 fp.write(text)
785 finally:
785 finally:
786 fp.close()
786 fp.close()
787
787
788 def appendfile(path, text):
788 def appendfile(path, text):
789 fp = open(path, 'ab')
789 fp = open(path, 'ab')
790 try:
790 try:
791 fp.write(text)
791 fp.write(text)
792 finally:
792 finally:
793 fp.close()
793 fp.close()
794
794
795 class chunkbuffer(object):
795 class chunkbuffer(object):
796 """Allow arbitrary sized chunks of data to be efficiently read from an
796 """Allow arbitrary sized chunks of data to be efficiently read from an
797 iterator over chunks of arbitrary size."""
797 iterator over chunks of arbitrary size."""
798
798
799 def __init__(self, in_iter):
799 def __init__(self, in_iter):
800 """in_iter is the iterator that's iterating over the input chunks.
800 """in_iter is the iterator that's iterating over the input chunks.
801 targetsize is how big a buffer to try to maintain."""
801 targetsize is how big a buffer to try to maintain."""
802 def splitbig(chunks):
802 def splitbig(chunks):
803 for chunk in chunks:
803 for chunk in chunks:
804 if len(chunk) > 2**20:
804 if len(chunk) > 2**20:
805 pos = 0
805 pos = 0
806 while pos < len(chunk):
806 while pos < len(chunk):
807 end = pos + 2 ** 18
807 end = pos + 2 ** 18
808 yield chunk[pos:end]
808 yield chunk[pos:end]
809 pos = end
809 pos = end
810 else:
810 else:
811 yield chunk
811 yield chunk
812 self.iter = splitbig(in_iter)
812 self.iter = splitbig(in_iter)
813 self._queue = []
813 self._queue = []
814
814
815 def read(self, l):
815 def read(self, l):
816 """Read L bytes of data from the iterator of chunks of data.
816 """Read L bytes of data from the iterator of chunks of data.
817 Returns less than L bytes if the iterator runs dry."""
817 Returns less than L bytes if the iterator runs dry."""
818 left = l
818 left = l
819 buf = ''
819 buf = ''
820 queue = self._queue
820 queue = self._queue
821 while left > 0:
821 while left > 0:
822 # refill the queue
822 # refill the queue
823 if not queue:
823 if not queue:
824 target = 2**18
824 target = 2**18
825 for chunk in self.iter:
825 for chunk in self.iter:
826 queue.append(chunk)
826 queue.append(chunk)
827 target -= len(chunk)
827 target -= len(chunk)
828 if target <= 0:
828 if target <= 0:
829 break
829 break
830 if not queue:
830 if not queue:
831 break
831 break
832
832
833 chunk = queue.pop(0)
833 chunk = queue.pop(0)
834 left -= len(chunk)
834 left -= len(chunk)
835 if left < 0:
835 if left < 0:
836 queue.insert(0, chunk[left:])
836 queue.insert(0, chunk[left:])
837 buf += chunk[:left]
837 buf += chunk[:left]
838 else:
838 else:
839 buf += chunk
839 buf += chunk
840
840
841 return buf
841 return buf
842
842
843 def filechunkiter(f, size=65536, limit=None):
843 def filechunkiter(f, size=65536, limit=None):
844 """Create a generator that produces the data in the file size
844 """Create a generator that produces the data in the file size
845 (default 65536) bytes at a time, up to optional limit (default is
845 (default 65536) bytes at a time, up to optional limit (default is
846 to read all data). Chunks may be less than size bytes if the
846 to read all data). Chunks may be less than size bytes if the
847 chunk is the last chunk in the file, or the file is a socket or
847 chunk is the last chunk in the file, or the file is a socket or
848 some other type of file that sometimes reads less data than is
848 some other type of file that sometimes reads less data than is
849 requested."""
849 requested."""
850 assert size >= 0
850 assert size >= 0
851 assert limit is None or limit >= 0
851 assert limit is None or limit >= 0
852 while True:
852 while True:
853 if limit is None:
853 if limit is None:
854 nbytes = size
854 nbytes = size
855 else:
855 else:
856 nbytes = min(limit, size)
856 nbytes = min(limit, size)
857 s = nbytes and f.read(nbytes)
857 s = nbytes and f.read(nbytes)
858 if not s:
858 if not s:
859 break
859 break
860 if limit:
860 if limit:
861 limit -= len(s)
861 limit -= len(s)
862 yield s
862 yield s
863
863
864 def makedate():
864 def makedate():
865 lt = time.localtime()
865 lt = time.localtime()
866 if lt[8] == 1 and time.daylight:
866 if lt[8] == 1 and time.daylight:
867 tz = time.altzone
867 tz = time.altzone
868 else:
868 else:
869 tz = time.timezone
869 tz = time.timezone
870 t = time.mktime(lt)
870 t = time.mktime(lt)
871 if t < 0:
871 if t < 0:
872 hint = _("check your clock")
872 hint = _("check your clock")
873 raise Abort(_("negative timestamp: %d") % t, hint=hint)
873 raise Abort(_("negative timestamp: %d") % t, hint=hint)
874 return t, tz
874 return t, tz
875
875
876 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
876 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
877 """represent a (unixtime, offset) tuple as a localized time.
877 """represent a (unixtime, offset) tuple as a localized time.
878 unixtime is seconds since the epoch, and offset is the time zone's
878 unixtime is seconds since the epoch, and offset is the time zone's
879 number of seconds away from UTC. if timezone is false, do not
879 number of seconds away from UTC. if timezone is false, do not
880 append time zone to string."""
880 append time zone to string."""
881 t, tz = date or makedate()
881 t, tz = date or makedate()
882 if t < 0:
882 if t < 0:
883 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
883 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
884 tz = 0
884 tz = 0
885 if "%1" in format or "%2" in format:
885 if "%1" in format or "%2" in format:
886 sign = (tz > 0) and "-" or "+"
886 sign = (tz > 0) and "-" or "+"
887 minutes = abs(tz) // 60
887 minutes = abs(tz) // 60
888 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
888 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
889 format = format.replace("%2", "%02d" % (minutes % 60))
889 format = format.replace("%2", "%02d" % (minutes % 60))
890 s = time.strftime(format, time.gmtime(float(t) - tz))
890 s = time.strftime(format, time.gmtime(float(t) - tz))
891 return s
891 return s
892
892
893 def shortdate(date=None):
893 def shortdate(date=None):
894 """turn (timestamp, tzoff) tuple into iso 8631 date."""
894 """turn (timestamp, tzoff) tuple into iso 8631 date."""
895 return datestr(date, format='%Y-%m-%d')
895 return datestr(date, format='%Y-%m-%d')
896
896
897 def strdate(string, format, defaults=[]):
897 def strdate(string, format, defaults=[]):
898 """parse a localized time string and return a (unixtime, offset) tuple.
898 """parse a localized time string and return a (unixtime, offset) tuple.
899 if the string cannot be parsed, ValueError is raised."""
899 if the string cannot be parsed, ValueError is raised."""
900 def timezone(string):
900 def timezone(string):
901 tz = string.split()[-1]
901 tz = string.split()[-1]
902 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
902 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
903 sign = (tz[0] == "+") and 1 or -1
903 sign = (tz[0] == "+") and 1 or -1
904 hours = int(tz[1:3])
904 hours = int(tz[1:3])
905 minutes = int(tz[3:5])
905 minutes = int(tz[3:5])
906 return -sign * (hours * 60 + minutes) * 60
906 return -sign * (hours * 60 + minutes) * 60
907 if tz == "GMT" or tz == "UTC":
907 if tz == "GMT" or tz == "UTC":
908 return 0
908 return 0
909 return None
909 return None
910
910
911 # NOTE: unixtime = localunixtime + offset
911 # NOTE: unixtime = localunixtime + offset
912 offset, date = timezone(string), string
912 offset, date = timezone(string), string
913 if offset is not None:
913 if offset is not None:
914 date = " ".join(string.split()[:-1])
914 date = " ".join(string.split()[:-1])
915
915
916 # add missing elements from defaults
916 # add missing elements from defaults
917 usenow = False # default to using biased defaults
917 usenow = False # default to using biased defaults
918 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
918 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
919 found = [True for p in part if ("%"+p) in format]
919 found = [True for p in part if ("%"+p) in format]
920 if not found:
920 if not found:
921 date += "@" + defaults[part][usenow]
921 date += "@" + defaults[part][usenow]
922 format += "@%" + part[0]
922 format += "@%" + part[0]
923 else:
923 else:
924 # We've found a specific time element, less specific time
924 # We've found a specific time element, less specific time
925 # elements are relative to today
925 # elements are relative to today
926 usenow = True
926 usenow = True
927
927
928 timetuple = time.strptime(date, format)
928 timetuple = time.strptime(date, format)
929 localunixtime = int(calendar.timegm(timetuple))
929 localunixtime = int(calendar.timegm(timetuple))
930 if offset is None:
930 if offset is None:
931 # local timezone
931 # local timezone
932 unixtime = int(time.mktime(timetuple))
932 unixtime = int(time.mktime(timetuple))
933 offset = unixtime - localunixtime
933 offset = unixtime - localunixtime
934 else:
934 else:
935 unixtime = localunixtime + offset
935 unixtime = localunixtime + offset
936 return unixtime, offset
936 return unixtime, offset
937
937
938 def parsedate(date, formats=None, bias={}):
938 def parsedate(date, formats=None, bias={}):
939 """parse a localized date/time and return a (unixtime, offset) tuple.
939 """parse a localized date/time and return a (unixtime, offset) tuple.
940
940
941 The date may be a "unixtime offset" string or in one of the specified
941 The date may be a "unixtime offset" string or in one of the specified
942 formats. If the date already is a (unixtime, offset) tuple, it is returned.
942 formats. If the date already is a (unixtime, offset) tuple, it is returned.
943 """
943 """
944 if not date:
944 if not date:
945 return 0, 0
945 return 0, 0
946 if isinstance(date, tuple) and len(date) == 2:
946 if isinstance(date, tuple) and len(date) == 2:
947 return date
947 return date
948 if not formats:
948 if not formats:
949 formats = defaultdateformats
949 formats = defaultdateformats
950 date = date.strip()
950 date = date.strip()
951 try:
951 try:
952 when, offset = map(int, date.split(' '))
952 when, offset = map(int, date.split(' '))
953 except ValueError:
953 except ValueError:
954 # fill out defaults
954 # fill out defaults
955 now = makedate()
955 now = makedate()
956 defaults = {}
956 defaults = {}
957 for part in ("d", "mb", "yY", "HI", "M", "S"):
957 for part in ("d", "mb", "yY", "HI", "M", "S"):
958 # this piece is for rounding the specific end of unknowns
958 # this piece is for rounding the specific end of unknowns
959 b = bias.get(part)
959 b = bias.get(part)
960 if b is None:
960 if b is None:
961 if part[0] in "HMS":
961 if part[0] in "HMS":
962 b = "00"
962 b = "00"
963 else:
963 else:
964 b = "0"
964 b = "0"
965
965
966 # this piece is for matching the generic end to today's date
966 # this piece is for matching the generic end to today's date
967 n = datestr(now, "%" + part[0])
967 n = datestr(now, "%" + part[0])
968
968
969 defaults[part] = (b, n)
969 defaults[part] = (b, n)
970
970
971 for format in formats:
971 for format in formats:
972 try:
972 try:
973 when, offset = strdate(date, format, defaults)
973 when, offset = strdate(date, format, defaults)
974 except (ValueError, OverflowError):
974 except (ValueError, OverflowError):
975 pass
975 pass
976 else:
976 else:
977 break
977 break
978 else:
978 else:
979 raise Abort(_('invalid date: %r') % date)
979 raise Abort(_('invalid date: %r') % date)
980 # validate explicit (probably user-specified) date and
980 # validate explicit (probably user-specified) date and
981 # time zone offset. values must fit in signed 32 bits for
981 # time zone offset. values must fit in signed 32 bits for
982 # current 32-bit linux runtimes. timezones go from UTC-12
982 # current 32-bit linux runtimes. timezones go from UTC-12
983 # to UTC+14
983 # to UTC+14
984 if abs(when) > 0x7fffffff:
984 if abs(when) > 0x7fffffff:
985 raise Abort(_('date exceeds 32 bits: %d') % when)
985 raise Abort(_('date exceeds 32 bits: %d') % when)
986 if when < 0:
986 if when < 0:
987 raise Abort(_('negative date value: %d') % when)
987 raise Abort(_('negative date value: %d') % when)
988 if offset < -50400 or offset > 43200:
988 if offset < -50400 or offset > 43200:
989 raise Abort(_('impossible time zone offset: %d') % offset)
989 raise Abort(_('impossible time zone offset: %d') % offset)
990 return when, offset
990 return when, offset
991
991
992 def matchdate(date):
992 def matchdate(date):
993 """Return a function that matches a given date match specifier
993 """Return a function that matches a given date match specifier
994
994
995 Formats include:
995 Formats include:
996
996
997 '{date}' match a given date to the accuracy provided
997 '{date}' match a given date to the accuracy provided
998
998
999 '<{date}' on or before a given date
999 '<{date}' on or before a given date
1000
1000
1001 '>{date}' on or after a given date
1001 '>{date}' on or after a given date
1002
1002
1003 >>> p1 = parsedate("10:29:59")
1003 >>> p1 = parsedate("10:29:59")
1004 >>> p2 = parsedate("10:30:00")
1004 >>> p2 = parsedate("10:30:00")
1005 >>> p3 = parsedate("10:30:59")
1005 >>> p3 = parsedate("10:30:59")
1006 >>> p4 = parsedate("10:31:00")
1006 >>> p4 = parsedate("10:31:00")
1007 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1007 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1008 >>> f = matchdate("10:30")
1008 >>> f = matchdate("10:30")
1009 >>> f(p1[0])
1009 >>> f(p1[0])
1010 False
1010 False
1011 >>> f(p2[0])
1011 >>> f(p2[0])
1012 True
1012 True
1013 >>> f(p3[0])
1013 >>> f(p3[0])
1014 True
1014 True
1015 >>> f(p4[0])
1015 >>> f(p4[0])
1016 False
1016 False
1017 >>> f(p5[0])
1017 >>> f(p5[0])
1018 False
1018 False
1019 """
1019 """
1020
1020
1021 def lower(date):
1021 def lower(date):
1022 d = dict(mb="1", d="1")
1022 d = dict(mb="1", d="1")
1023 return parsedate(date, extendeddateformats, d)[0]
1023 return parsedate(date, extendeddateformats, d)[0]
1024
1024
1025 def upper(date):
1025 def upper(date):
1026 d = dict(mb="12", HI="23", M="59", S="59")
1026 d = dict(mb="12", HI="23", M="59", S="59")
1027 for days in ("31", "30", "29"):
1027 for days in ("31", "30", "29"):
1028 try:
1028 try:
1029 d["d"] = days
1029 d["d"] = days
1030 return parsedate(date, extendeddateformats, d)[0]
1030 return parsedate(date, extendeddateformats, d)[0]
1031 except:
1031 except:
1032 pass
1032 pass
1033 d["d"] = "28"
1033 d["d"] = "28"
1034 return parsedate(date, extendeddateformats, d)[0]
1034 return parsedate(date, extendeddateformats, d)[0]
1035
1035
1036 date = date.strip()
1036 date = date.strip()
1037
1037
1038 if not date:
1038 if not date:
1039 raise Abort(_("dates cannot consist entirely of whitespace"))
1039 raise Abort(_("dates cannot consist entirely of whitespace"))
1040 elif date[0] == "<":
1040 elif date[0] == "<":
1041 if not date[1:]:
1041 if not date[1:]:
1042 raise Abort(_("invalid day spec, use '<DATE'"))
1042 raise Abort(_("invalid day spec, use '<DATE'"))
1043 when = upper(date[1:])
1043 when = upper(date[1:])
1044 return lambda x: x <= when
1044 return lambda x: x <= when
1045 elif date[0] == ">":
1045 elif date[0] == ">":
1046 if not date[1:]:
1046 if not date[1:]:
1047 raise Abort(_("invalid day spec, use '>DATE'"))
1047 raise Abort(_("invalid day spec, use '>DATE'"))
1048 when = lower(date[1:])
1048 when = lower(date[1:])
1049 return lambda x: x >= when
1049 return lambda x: x >= when
1050 elif date[0] == "-":
1050 elif date[0] == "-":
1051 try:
1051 try:
1052 days = int(date[1:])
1052 days = int(date[1:])
1053 except ValueError:
1053 except ValueError:
1054 raise Abort(_("invalid day spec: %s") % date[1:])
1054 raise Abort(_("invalid day spec: %s") % date[1:])
1055 if days < 0:
1055 if days < 0:
1056 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1056 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1057 % date[1:])
1057 % date[1:])
1058 when = makedate()[0] - days * 3600 * 24
1058 when = makedate()[0] - days * 3600 * 24
1059 return lambda x: x >= when
1059 return lambda x: x >= when
1060 elif " to " in date:
1060 elif " to " in date:
1061 a, b = date.split(" to ")
1061 a, b = date.split(" to ")
1062 start, stop = lower(a), upper(b)
1062 start, stop = lower(a), upper(b)
1063 return lambda x: x >= start and x <= stop
1063 return lambda x: x >= start and x <= stop
1064 else:
1064 else:
1065 start, stop = lower(date), upper(date)
1065 start, stop = lower(date), upper(date)
1066 return lambda x: x >= start and x <= stop
1066 return lambda x: x >= start and x <= stop
1067
1067
1068 def shortuser(user):
1068 def shortuser(user):
1069 """Return a short representation of a user name or email address."""
1069 """Return a short representation of a user name or email address."""
1070 f = user.find('@')
1070 f = user.find('@')
1071 if f >= 0:
1071 if f >= 0:
1072 user = user[:f]
1072 user = user[:f]
1073 f = user.find('<')
1073 f = user.find('<')
1074 if f >= 0:
1074 if f >= 0:
1075 user = user[f + 1:]
1075 user = user[f + 1:]
1076 f = user.find(' ')
1076 f = user.find(' ')
1077 if f >= 0:
1077 if f >= 0:
1078 user = user[:f]
1078 user = user[:f]
1079 f = user.find('.')
1079 f = user.find('.')
1080 if f >= 0:
1080 if f >= 0:
1081 user = user[:f]
1081 user = user[:f]
1082 return user
1082 return user
1083
1083
1084 def email(author):
1084 def email(author):
1085 '''get email of author.'''
1085 '''get email of author.'''
1086 r = author.find('>')
1086 r = author.find('>')
1087 if r == -1:
1087 if r == -1:
1088 r = None
1088 r = None
1089 return author[author.find('<') + 1:r]
1089 return author[author.find('<') + 1:r]
1090
1090
1091 def _ellipsis(text, maxlength):
1091 def _ellipsis(text, maxlength):
1092 if len(text) <= maxlength:
1092 if len(text) <= maxlength:
1093 return text, False
1093 return text, False
1094 else:
1094 else:
1095 return "%s..." % (text[:maxlength - 3]), True
1095 return "%s..." % (text[:maxlength - 3]), True
1096
1096
1097 def ellipsis(text, maxlength=400):
1097 def ellipsis(text, maxlength=400):
1098 """Trim string to at most maxlength (default: 400) characters."""
1098 """Trim string to at most maxlength (default: 400) characters."""
1099 try:
1099 try:
1100 # use unicode not to split at intermediate multi-byte sequence
1100 # use unicode not to split at intermediate multi-byte sequence
1101 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1101 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1102 maxlength)
1102 maxlength)
1103 if not truncated:
1103 if not truncated:
1104 return text
1104 return text
1105 return utext.encode(encoding.encoding)
1105 return utext.encode(encoding.encoding)
1106 except (UnicodeDecodeError, UnicodeEncodeError):
1106 except (UnicodeDecodeError, UnicodeEncodeError):
1107 return _ellipsis(text, maxlength)[0]
1107 return _ellipsis(text, maxlength)[0]
1108
1108
1109 def bytecount(nbytes):
1109 def bytecount(nbytes):
1110 '''return byte count formatted as readable string, with units'''
1110 '''return byte count formatted as readable string, with units'''
1111
1111
1112 units = (
1112 units = (
1113 (100, 1 << 30, _('%.0f GB')),
1113 (100, 1 << 30, _('%.0f GB')),
1114 (10, 1 << 30, _('%.1f GB')),
1114 (10, 1 << 30, _('%.1f GB')),
1115 (1, 1 << 30, _('%.2f GB')),
1115 (1, 1 << 30, _('%.2f GB')),
1116 (100, 1 << 20, _('%.0f MB')),
1116 (100, 1 << 20, _('%.0f MB')),
1117 (10, 1 << 20, _('%.1f MB')),
1117 (10, 1 << 20, _('%.1f MB')),
1118 (1, 1 << 20, _('%.2f MB')),
1118 (1, 1 << 20, _('%.2f MB')),
1119 (100, 1 << 10, _('%.0f KB')),
1119 (100, 1 << 10, _('%.0f KB')),
1120 (10, 1 << 10, _('%.1f KB')),
1120 (10, 1 << 10, _('%.1f KB')),
1121 (1, 1 << 10, _('%.2f KB')),
1121 (1, 1 << 10, _('%.2f KB')),
1122 (1, 1, _('%.0f bytes')),
1122 (1, 1, _('%.0f bytes')),
1123 )
1123 )
1124
1124
1125 for multiplier, divisor, format in units:
1125 for multiplier, divisor, format in units:
1126 if nbytes >= divisor * multiplier:
1126 if nbytes >= divisor * multiplier:
1127 return format % (nbytes / float(divisor))
1127 return format % (nbytes / float(divisor))
1128 return units[-1][2] % nbytes
1128 return units[-1][2] % nbytes
1129
1129
1130 def uirepr(s):
1130 def uirepr(s):
1131 # Avoid double backslash in Windows path repr()
1131 # Avoid double backslash in Windows path repr()
1132 return repr(s).replace('\\\\', '\\')
1132 return repr(s).replace('\\\\', '\\')
1133
1133
1134 # delay import of textwrap
1134 # delay import of textwrap
1135 def MBTextWrapper(**kwargs):
1135 def MBTextWrapper(**kwargs):
1136 class tw(textwrap.TextWrapper):
1136 class tw(textwrap.TextWrapper):
1137 """
1137 """
1138 Extend TextWrapper for double-width characters.
1138 Extend TextWrapper for double-width characters.
1139
1139
1140 Some Asian characters use two terminal columns instead of one.
1140 Some Asian characters use two terminal columns instead of one.
1141 A good example of this behavior can be seen with u'\u65e5\u672c',
1141 A good example of this behavior can be seen with u'\u65e5\u672c',
1142 the two Japanese characters for "Japan":
1142 the two Japanese characters for "Japan":
1143 len() returns 2, but when printed to a terminal, they eat 4 columns.
1143 len() returns 2, but when printed to a terminal, they eat 4 columns.
1144
1144
1145 (Note that this has nothing to do whatsoever with unicode
1145 (Note that this has nothing to do whatsoever with unicode
1146 representation, or encoding of the underlying string)
1146 representation, or encoding of the underlying string)
1147 """
1147 """
1148 def __init__(self, **kwargs):
1148 def __init__(self, **kwargs):
1149 textwrap.TextWrapper.__init__(self, **kwargs)
1149 textwrap.TextWrapper.__init__(self, **kwargs)
1150
1150
1151 def _cutdown(self, str, space_left):
1151 def _cutdown(self, str, space_left):
1152 l = 0
1152 l = 0
1153 ucstr = unicode(str, encoding.encoding)
1153 ucstr = unicode(str, encoding.encoding)
1154 colwidth = unicodedata.east_asian_width
1154 colwidth = unicodedata.east_asian_width
1155 for i in xrange(len(ucstr)):
1155 for i in xrange(len(ucstr)):
1156 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1156 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1157 if space_left < l:
1157 if space_left < l:
1158 return (ucstr[:i].encode(encoding.encoding),
1158 return (ucstr[:i].encode(encoding.encoding),
1159 ucstr[i:].encode(encoding.encoding))
1159 ucstr[i:].encode(encoding.encoding))
1160 return str, ''
1160 return str, ''
1161
1161
1162 # overriding of base class
1162 # overriding of base class
1163 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1163 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1164 space_left = max(width - cur_len, 1)
1164 space_left = max(width - cur_len, 1)
1165
1165
1166 if self.break_long_words:
1166 if self.break_long_words:
1167 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1167 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1168 cur_line.append(cut)
1168 cur_line.append(cut)
1169 reversed_chunks[-1] = res
1169 reversed_chunks[-1] = res
1170 elif not cur_line:
1170 elif not cur_line:
1171 cur_line.append(reversed_chunks.pop())
1171 cur_line.append(reversed_chunks.pop())
1172
1172
1173 global MBTextWrapper
1173 global MBTextWrapper
1174 MBTextWrapper = tw
1174 MBTextWrapper = tw
1175 return tw(**kwargs)
1175 return tw(**kwargs)
1176
1176
1177 def wrap(line, width, initindent='', hangindent=''):
1177 def wrap(line, width, initindent='', hangindent=''):
1178 maxindent = max(len(hangindent), len(initindent))
1178 maxindent = max(len(hangindent), len(initindent))
1179 if width <= maxindent:
1179 if width <= maxindent:
1180 # adjust for weird terminal size
1180 # adjust for weird terminal size
1181 width = max(78, maxindent + 1)
1181 width = max(78, maxindent + 1)
1182 wrapper = MBTextWrapper(width=width,
1182 wrapper = MBTextWrapper(width=width,
1183 initial_indent=initindent,
1183 initial_indent=initindent,
1184 subsequent_indent=hangindent)
1184 subsequent_indent=hangindent)
1185 return wrapper.fill(line)
1185 return wrapper.fill(line)
1186
1186
1187 def iterlines(iterator):
1187 def iterlines(iterator):
1188 for chunk in iterator:
1188 for chunk in iterator:
1189 for line in chunk.splitlines():
1189 for line in chunk.splitlines():
1190 yield line
1190 yield line
1191
1191
1192 def expandpath(path):
1192 def expandpath(path):
1193 return os.path.expanduser(os.path.expandvars(path))
1193 return os.path.expanduser(os.path.expandvars(path))
1194
1194
1195 def hgcmd():
1195 def hgcmd():
1196 """Return the command used to execute current hg
1196 """Return the command used to execute current hg
1197
1197
1198 This is different from hgexecutable() because on Windows we want
1198 This is different from hgexecutable() because on Windows we want
1199 to avoid things opening new shell windows like batch files, so we
1199 to avoid things opening new shell windows like batch files, so we
1200 get either the python call or current executable.
1200 get either the python call or current executable.
1201 """
1201 """
1202 if mainfrozen():
1202 if mainfrozen():
1203 return [sys.executable]
1203 return [sys.executable]
1204 return gethgcmd()
1204 return gethgcmd()
1205
1205
1206 def rundetached(args, condfn):
1206 def rundetached(args, condfn):
1207 """Execute the argument list in a detached process.
1207 """Execute the argument list in a detached process.
1208
1208
1209 condfn is a callable which is called repeatedly and should return
1209 condfn is a callable which is called repeatedly and should return
1210 True once the child process is known to have started successfully.
1210 True once the child process is known to have started successfully.
1211 At this point, the child process PID is returned. If the child
1211 At this point, the child process PID is returned. If the child
1212 process fails to start or finishes before condfn() evaluates to
1212 process fails to start or finishes before condfn() evaluates to
1213 True, return -1.
1213 True, return -1.
1214 """
1214 """
1215 # Windows case is easier because the child process is either
1215 # Windows case is easier because the child process is either
1216 # successfully starting and validating the condition or exiting
1216 # successfully starting and validating the condition or exiting
1217 # on failure. We just poll on its PID. On Unix, if the child
1217 # on failure. We just poll on its PID. On Unix, if the child
1218 # process fails to start, it will be left in a zombie state until
1218 # process fails to start, it will be left in a zombie state until
1219 # the parent wait on it, which we cannot do since we expect a long
1219 # the parent wait on it, which we cannot do since we expect a long
1220 # running process on success. Instead we listen for SIGCHLD telling
1220 # running process on success. Instead we listen for SIGCHLD telling
1221 # us our child process terminated.
1221 # us our child process terminated.
1222 terminated = set()
1222 terminated = set()
1223 def handler(signum, frame):
1223 def handler(signum, frame):
1224 terminated.add(os.wait())
1224 terminated.add(os.wait())
1225 prevhandler = None
1225 prevhandler = None
1226 if hasattr(signal, 'SIGCHLD'):
1226 if hasattr(signal, 'SIGCHLD'):
1227 prevhandler = signal.signal(signal.SIGCHLD, handler)
1227 prevhandler = signal.signal(signal.SIGCHLD, handler)
1228 try:
1228 try:
1229 pid = spawndetached(args)
1229 pid = spawndetached(args)
1230 while not condfn():
1230 while not condfn():
1231 if ((pid in terminated or not testpid(pid))
1231 if ((pid in terminated or not testpid(pid))
1232 and not condfn()):
1232 and not condfn()):
1233 return -1
1233 return -1
1234 time.sleep(0.1)
1234 time.sleep(0.1)
1235 return pid
1235 return pid
1236 finally:
1236 finally:
1237 if prevhandler is not None:
1237 if prevhandler is not None:
1238 signal.signal(signal.SIGCHLD, prevhandler)
1238 signal.signal(signal.SIGCHLD, prevhandler)
1239
1239
1240 try:
1240 try:
1241 any, all = any, all
1241 any, all = any, all
1242 except NameError:
1242 except NameError:
1243 def any(iterable):
1243 def any(iterable):
1244 for i in iterable:
1244 for i in iterable:
1245 if i:
1245 if i:
1246 return True
1246 return True
1247 return False
1247 return False
1248
1248
1249 def all(iterable):
1249 def all(iterable):
1250 for i in iterable:
1250 for i in iterable:
1251 if not i:
1251 if not i:
1252 return False
1252 return False
1253 return True
1253 return True
1254
1254
1255 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1255 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1256 """Return the result of interpolating items in the mapping into string s.
1256 """Return the result of interpolating items in the mapping into string s.
1257
1257
1258 prefix is a single character string, or a two character string with
1258 prefix is a single character string, or a two character string with
1259 a backslash as the first character if the prefix needs to be escaped in
1259 a backslash as the first character if the prefix needs to be escaped in
1260 a regular expression.
1260 a regular expression.
1261
1261
1262 fn is an optional function that will be applied to the replacement text
1262 fn is an optional function that will be applied to the replacement text
1263 just before replacement.
1263 just before replacement.
1264
1264
1265 escape_prefix is an optional flag that allows using doubled prefix for
1265 escape_prefix is an optional flag that allows using doubled prefix for
1266 its escaping.
1266 its escaping.
1267 """
1267 """
1268 fn = fn or (lambda s: s)
1268 fn = fn or (lambda s: s)
1269 patterns = '|'.join(mapping.keys())
1269 patterns = '|'.join(mapping.keys())
1270 if escape_prefix:
1270 if escape_prefix:
1271 patterns += '|' + prefix
1271 patterns += '|' + prefix
1272 if len(prefix) > 1:
1272 if len(prefix) > 1:
1273 prefix_char = prefix[1:]
1273 prefix_char = prefix[1:]
1274 else:
1274 else:
1275 prefix_char = prefix
1275 prefix_char = prefix
1276 mapping[prefix_char] = prefix_char
1276 mapping[prefix_char] = prefix_char
1277 r = re.compile(r'%s(%s)' % (prefix, patterns))
1277 r = re.compile(r'%s(%s)' % (prefix, patterns))
1278 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1278 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1279
1279
1280 def getport(port):
1280 def getport(port):
1281 """Return the port for a given network service.
1281 """Return the port for a given network service.
1282
1282
1283 If port is an integer, it's returned as is. If it's a string, it's
1283 If port is an integer, it's returned as is. If it's a string, it's
1284 looked up using socket.getservbyname(). If there's no matching
1284 looked up using socket.getservbyname(). If there's no matching
1285 service, util.Abort is raised.
1285 service, util.Abort is raised.
1286 """
1286 """
1287 try:
1287 try:
1288 return int(port)
1288 return int(port)
1289 except ValueError:
1289 except ValueError:
1290 pass
1290 pass
1291
1291
1292 try:
1292 try:
1293 return socket.getservbyname(port)
1293 return socket.getservbyname(port)
1294 except socket.error:
1294 except socket.error:
1295 raise Abort(_("no port number associated with service '%s'") % port)
1295 raise Abort(_("no port number associated with service '%s'") % port)
1296
1296
1297 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1297 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1298 '0': False, 'no': False, 'false': False, 'off': False,
1298 '0': False, 'no': False, 'false': False, 'off': False,
1299 'never': False}
1299 'never': False}
1300
1300
1301 def parsebool(s):
1301 def parsebool(s):
1302 """Parse s into a boolean.
1302 """Parse s into a boolean.
1303
1303
1304 If s is not a valid boolean, returns None.
1304 If s is not a valid boolean, returns None.
1305 """
1305 """
1306 return _booleans.get(s.lower(), None)
1306 return _booleans.get(s.lower(), None)
1307
1307
1308 _hexdig = '0123456789ABCDEFabcdef'
1308 _hexdig = '0123456789ABCDEFabcdef'
1309 _hextochr = dict((a + b, chr(int(a + b, 16)))
1309 _hextochr = dict((a + b, chr(int(a + b, 16)))
1310 for a in _hexdig for b in _hexdig)
1310 for a in _hexdig for b in _hexdig)
1311
1311
1312 def _urlunquote(s):
1312 def _urlunquote(s):
1313 """unquote('abc%20def') -> 'abc def'."""
1313 """unquote('abc%20def') -> 'abc def'."""
1314 res = s.split('%')
1314 res = s.split('%')
1315 # fastpath
1315 # fastpath
1316 if len(res) == 1:
1316 if len(res) == 1:
1317 return s
1317 return s
1318 s = res[0]
1318 s = res[0]
1319 for item in res[1:]:
1319 for item in res[1:]:
1320 try:
1320 try:
1321 s += _hextochr[item[:2]] + item[2:]
1321 s += _hextochr[item[:2]] + item[2:]
1322 except KeyError:
1322 except KeyError:
1323 s += '%' + item
1323 s += '%' + item
1324 except UnicodeDecodeError:
1324 except UnicodeDecodeError:
1325 s += unichr(int(item[:2], 16)) + item[2:]
1325 s += unichr(int(item[:2], 16)) + item[2:]
1326 return s
1326 return s
1327
1327
1328 class url(object):
1328 class url(object):
1329 r"""Reliable URL parser.
1329 r"""Reliable URL parser.
1330
1330
1331 This parses URLs and provides attributes for the following
1331 This parses URLs and provides attributes for the following
1332 components:
1332 components:
1333
1333
1334 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1334 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1335
1335
1336 Missing components are set to None. The only exception is
1336 Missing components are set to None. The only exception is
1337 fragment, which is set to '' if present but empty.
1337 fragment, which is set to '' if present but empty.
1338
1338
1339 If parsefragment is False, fragment is included in query. If
1339 If parsefragment is False, fragment is included in query. If
1340 parsequery is False, query is included in path. If both are
1340 parsequery is False, query is included in path. If both are
1341 False, both fragment and query are included in path.
1341 False, both fragment and query are included in path.
1342
1342
1343 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1343 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1344
1344
1345 Note that for backward compatibility reasons, bundle URLs do not
1345 Note that for backward compatibility reasons, bundle URLs do not
1346 take host names. That means 'bundle://../' has a path of '../'.
1346 take host names. That means 'bundle://../' has a path of '../'.
1347
1347
1348 Examples:
1348 Examples:
1349
1349
1350 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1350 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1351 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1351 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1352 >>> url('ssh://[::1]:2200//home/joe/repo')
1352 >>> url('ssh://[::1]:2200//home/joe/repo')
1353 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1353 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1354 >>> url('file:///home/joe/repo')
1354 >>> url('file:///home/joe/repo')
1355 <url scheme: 'file', path: '/home/joe/repo'>
1355 <url scheme: 'file', path: '/home/joe/repo'>
1356 >>> url('bundle:foo')
1356 >>> url('bundle:foo')
1357 <url scheme: 'bundle', path: 'foo'>
1357 <url scheme: 'bundle', path: 'foo'>
1358 >>> url('bundle://../foo')
1358 >>> url('bundle://../foo')
1359 <url scheme: 'bundle', path: '../foo'>
1359 <url scheme: 'bundle', path: '../foo'>
1360 >>> url(r'c:\foo\bar')
1360 >>> url(r'c:\foo\bar')
1361 <url path: 'c:\\foo\\bar'>
1361 <url path: 'c:\\foo\\bar'>
1362 >>> url(r'\\blah\blah\blah')
1362 >>> url(r'\\blah\blah\blah')
1363 <url path: '\\\\blah\\blah\\blah'>
1363 <url path: '\\\\blah\\blah\\blah'>
1364
1364
1365 Authentication credentials:
1365 Authentication credentials:
1366
1366
1367 >>> url('ssh://joe:xyz@x/repo')
1367 >>> url('ssh://joe:xyz@x/repo')
1368 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1368 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1369 >>> url('ssh://joe@x/repo')
1369 >>> url('ssh://joe@x/repo')
1370 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1370 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1371
1371
1372 Query strings and fragments:
1372 Query strings and fragments:
1373
1373
1374 >>> url('http://host/a?b#c')
1374 >>> url('http://host/a?b#c')
1375 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1375 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1376 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1376 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1377 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1377 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1378 """
1378 """
1379
1379
1380 _safechars = "!~*'()+"
1380 _safechars = "!~*'()+"
1381 _safepchars = "/!~*'()+"
1381 _safepchars = "/!~*'()+"
1382 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1382 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1383
1383
1384 def __init__(self, path, parsequery=True, parsefragment=True):
1384 def __init__(self, path, parsequery=True, parsefragment=True):
1385 # We slowly chomp away at path until we have only the path left
1385 # We slowly chomp away at path until we have only the path left
1386 self.scheme = self.user = self.passwd = self.host = None
1386 self.scheme = self.user = self.passwd = self.host = None
1387 self.port = self.path = self.query = self.fragment = None
1387 self.port = self.path = self.query = self.fragment = None
1388 self._localpath = True
1388 self._localpath = True
1389 self._hostport = ''
1389 self._hostport = ''
1390 self._origpath = path
1390 self._origpath = path
1391
1391
1392 # special case for Windows drive letters and UNC paths
1392 # special case for Windows drive letters and UNC paths
1393 if hasdriveletter(path) or path.startswith(r'\\'):
1393 if hasdriveletter(path) or path.startswith(r'\\'):
1394 self.path = path
1394 self.path = path
1395 return
1395 return
1396
1396
1397 # For compatibility reasons, we can't handle bundle paths as
1397 # For compatibility reasons, we can't handle bundle paths as
1398 # normal URLS
1398 # normal URLS
1399 if path.startswith('bundle:'):
1399 if path.startswith('bundle:'):
1400 self.scheme = 'bundle'
1400 self.scheme = 'bundle'
1401 path = path[7:]
1401 path = path[7:]
1402 if path.startswith('//'):
1402 if path.startswith('//'):
1403 path = path[2:]
1403 path = path[2:]
1404 self.path = path
1404 self.path = path
1405 return
1405 return
1406
1406
1407 if self._matchscheme(path):
1407 if self._matchscheme(path):
1408 parts = path.split(':', 1)
1408 parts = path.split(':', 1)
1409 if parts[0]:
1409 if parts[0]:
1410 self.scheme, path = parts
1410 self.scheme, path = parts
1411 self._localpath = False
1411 self._localpath = False
1412
1412
1413 if not path:
1413 if not path:
1414 path = None
1414 path = None
1415 if self._localpath:
1415 if self._localpath:
1416 self.path = ''
1416 self.path = ''
1417 return
1417 return
1418 else:
1418 else:
1419 if parsefragment and '#' in path:
1419 if parsefragment and '#' in path:
1420 path, self.fragment = path.split('#', 1)
1420 path, self.fragment = path.split('#', 1)
1421 if not path:
1421 if not path:
1422 path = None
1422 path = None
1423 if self._localpath:
1423 if self._localpath:
1424 self.path = path
1424 self.path = path
1425 return
1425 return
1426
1426
1427 if parsequery and '?' in path:
1427 if parsequery and '?' in path:
1428 path, self.query = path.split('?', 1)
1428 path, self.query = path.split('?', 1)
1429 if not path:
1429 if not path:
1430 path = None
1430 path = None
1431 if not self.query:
1431 if not self.query:
1432 self.query = None
1432 self.query = None
1433
1433
1434 # // is required to specify a host/authority
1434 # // is required to specify a host/authority
1435 if path and path.startswith('//'):
1435 if path and path.startswith('//'):
1436 parts = path[2:].split('/', 1)
1436 parts = path[2:].split('/', 1)
1437 if len(parts) > 1:
1437 if len(parts) > 1:
1438 self.host, path = parts
1438 self.host, path = parts
1439 path = path
1439 path = path
1440 else:
1440 else:
1441 self.host = parts[0]
1441 self.host = parts[0]
1442 path = None
1442 path = None
1443 if not self.host:
1443 if not self.host:
1444 self.host = None
1444 self.host = None
1445 if path:
1445 if path:
1446 path = '/' + path
1446 path = '/' + path
1447
1447
1448 if self.host and '@' in self.host:
1448 if self.host and '@' in self.host:
1449 self.user, self.host = self.host.rsplit('@', 1)
1449 self.user, self.host = self.host.rsplit('@', 1)
1450 if ':' in self.user:
1450 if ':' in self.user:
1451 self.user, self.passwd = self.user.split(':', 1)
1451 self.user, self.passwd = self.user.split(':', 1)
1452 if not self.host:
1452 if not self.host:
1453 self.host = None
1453 self.host = None
1454
1454
1455 # Don't split on colons in IPv6 addresses without ports
1455 # Don't split on colons in IPv6 addresses without ports
1456 if (self.host and ':' in self.host and
1456 if (self.host and ':' in self.host and
1457 not (self.host.startswith('[') and self.host.endswith(']'))):
1457 not (self.host.startswith('[') and self.host.endswith(']'))):
1458 self._hostport = self.host
1458 self._hostport = self.host
1459 self.host, self.port = self.host.rsplit(':', 1)
1459 self.host, self.port = self.host.rsplit(':', 1)
1460 if not self.host:
1460 if not self.host:
1461 self.host = None
1461 self.host = None
1462
1462
1463 if (self.host and self.scheme == 'file' and
1463 if (self.host and self.scheme == 'file' and
1464 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1464 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1465 raise Abort(_('file:// URLs can only refer to localhost'))
1465 raise Abort(_('file:// URLs can only refer to localhost'))
1466
1466
1467 self.path = path
1467 self.path = path
1468
1468
1469 for a in ('user', 'passwd', 'host', 'port',
1469 for a in ('user', 'passwd', 'host', 'port',
1470 'path', 'query', 'fragment'):
1470 'path', 'query', 'fragment'):
1471 v = getattr(self, a)
1471 v = getattr(self, a)
1472 if v is not None:
1472 if v is not None:
1473 setattr(self, a, _urlunquote(v))
1473 setattr(self, a, _urlunquote(v))
1474
1474
1475 def __repr__(self):
1475 def __repr__(self):
1476 attrs = []
1476 attrs = []
1477 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1477 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1478 'query', 'fragment'):
1478 'query', 'fragment'):
1479 v = getattr(self, a)
1479 v = getattr(self, a)
1480 if v is not None:
1480 if v is not None:
1481 attrs.append('%s: %r' % (a, v))
1481 attrs.append('%s: %r' % (a, v))
1482 return '<url %s>' % ', '.join(attrs)
1482 return '<url %s>' % ', '.join(attrs)
1483
1483
1484 def __str__(self):
1484 def __str__(self):
1485 r"""Join the URL's components back into a URL string.
1485 r"""Join the URL's components back into a URL string.
1486
1486
1487 Examples:
1487 Examples:
1488
1488
1489 >>> str(url('http://user:pw@host:80/?foo#bar'))
1489 >>> str(url('http://user:pw@host:80/?foo#bar'))
1490 'http://user:pw@host:80/?foo#bar'
1490 'http://user:pw@host:80/?foo#bar'
1491 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1491 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1492 'ssh://user:pw@[::1]:2200//home/joe#'
1492 'ssh://user:pw@[::1]:2200//home/joe#'
1493 >>> str(url('http://localhost:80//'))
1493 >>> str(url('http://localhost:80//'))
1494 'http://localhost:80//'
1494 'http://localhost:80//'
1495 >>> str(url('http://localhost:80/'))
1495 >>> str(url('http://localhost:80/'))
1496 'http://localhost:80/'
1496 'http://localhost:80/'
1497 >>> str(url('http://localhost:80'))
1497 >>> str(url('http://localhost:80'))
1498 'http://localhost:80/'
1498 'http://localhost:80/'
1499 >>> str(url('bundle:foo'))
1499 >>> str(url('bundle:foo'))
1500 'bundle:foo'
1500 'bundle:foo'
1501 >>> str(url('bundle://../foo'))
1501 >>> str(url('bundle://../foo'))
1502 'bundle:../foo'
1502 'bundle:../foo'
1503 >>> str(url('path'))
1503 >>> str(url('path'))
1504 'path'
1504 'path'
1505 >>> str(url('file:///tmp/foo/bar'))
1505 >>> str(url('file:///tmp/foo/bar'))
1506 'file:///tmp/foo/bar'
1506 'file:///tmp/foo/bar'
1507 >>> print url(r'bundle:foo\bar')
1507 >>> print url(r'bundle:foo\bar')
1508 bundle:foo\bar
1508 bundle:foo\bar
1509 """
1509 """
1510 if self._localpath:
1510 if self._localpath:
1511 s = self.path
1511 s = self.path
1512 if self.scheme == 'bundle':
1512 if self.scheme == 'bundle':
1513 s = 'bundle:' + s
1513 s = 'bundle:' + s
1514 if self.fragment:
1514 if self.fragment:
1515 s += '#' + self.fragment
1515 s += '#' + self.fragment
1516 return s
1516 return s
1517
1517
1518 s = self.scheme + ':'
1518 s = self.scheme + ':'
1519 if self.user or self.passwd or self.host:
1519 if self.user or self.passwd or self.host:
1520 s += '//'
1520 s += '//'
1521 elif self.scheme and (not self.path or self.path.startswith('/')):
1521 elif self.scheme and (not self.path or self.path.startswith('/')):
1522 s += '//'
1522 s += '//'
1523 if self.user:
1523 if self.user:
1524 s += urllib.quote(self.user, safe=self._safechars)
1524 s += urllib.quote(self.user, safe=self._safechars)
1525 if self.passwd:
1525 if self.passwd:
1526 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1526 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1527 if self.user or self.passwd:
1527 if self.user or self.passwd:
1528 s += '@'
1528 s += '@'
1529 if self.host:
1529 if self.host:
1530 if not (self.host.startswith('[') and self.host.endswith(']')):
1530 if not (self.host.startswith('[') and self.host.endswith(']')):
1531 s += urllib.quote(self.host)
1531 s += urllib.quote(self.host)
1532 else:
1532 else:
1533 s += self.host
1533 s += self.host
1534 if self.port:
1534 if self.port:
1535 s += ':' + urllib.quote(self.port)
1535 s += ':' + urllib.quote(self.port)
1536 if self.host:
1536 if self.host:
1537 s += '/'
1537 s += '/'
1538 if self.path:
1538 if self.path:
1539 s += urllib.quote(self.path, safe=self._safepchars)
1539 s += urllib.quote(self.path, safe=self._safepchars)
1540 if self.query:
1540 if self.query:
1541 s += '?' + urllib.quote(self.query, safe=self._safepchars)
1541 s += '?' + urllib.quote(self.query, safe=self._safepchars)
1542 if self.fragment is not None:
1542 if self.fragment is not None:
1543 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1543 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1544 return s
1544 return s
1545
1545
1546 def authinfo(self):
1546 def authinfo(self):
1547 user, passwd = self.user, self.passwd
1547 user, passwd = self.user, self.passwd
1548 try:
1548 try:
1549 self.user, self.passwd = None, None
1549 self.user, self.passwd = None, None
1550 s = str(self)
1550 s = str(self)
1551 finally:
1551 finally:
1552 self.user, self.passwd = user, passwd
1552 self.user, self.passwd = user, passwd
1553 if not self.user:
1553 if not self.user:
1554 return (s, None)
1554 return (s, None)
1555 return (s, (None, (str(self), self.host),
1555 return (s, (None, (str(self), self.host),
1556 self.user, self.passwd or ''))
1556 self.user, self.passwd or ''))
1557
1557
1558 def isabs(self):
1559 if self.scheme and self.scheme != 'file':
1560 return True # remote URL
1561 if hasdriveletter(self.path):
1562 return True # absolute for our purposes - can't be joined()
1563 if self.path.startswith(r'\\'):
1564 return True # Windows UNC path
1565 if self.path.startswith('/'):
1566 return True # POSIX-style
1567 return False
1568
1558 def localpath(self):
1569 def localpath(self):
1559 if self.scheme == 'file' or self.scheme == 'bundle':
1570 if self.scheme == 'file' or self.scheme == 'bundle':
1560 path = self.path or '/'
1571 path = self.path or '/'
1561 # For Windows, we need to promote hosts containing drive
1572 # For Windows, we need to promote hosts containing drive
1562 # letters to paths with drive letters.
1573 # letters to paths with drive letters.
1563 if hasdriveletter(self._hostport):
1574 if hasdriveletter(self._hostport):
1564 path = self._hostport + '/' + self.path
1575 path = self._hostport + '/' + self.path
1565 elif self.host is not None and self.path:
1576 elif self.host is not None and self.path:
1566 path = '/' + path
1577 path = '/' + path
1567 # We also need to handle the case of file:///C:/, which
1578 # We also need to handle the case of file:///C:/, which
1568 # should return C:/, not /C:/.
1579 # should return C:/, not /C:/.
1569 elif hasdriveletter(path):
1580 elif hasdriveletter(path):
1570 # Strip leading slash from paths with drive names
1581 # Strip leading slash from paths with drive names
1571 return path[1:]
1582 return path[1:]
1572 return path
1583 return path
1573 return self._origpath
1584 return self._origpath
1574
1585
1575 def hasscheme(path):
1586 def hasscheme(path):
1576 return bool(url(path).scheme)
1587 return bool(url(path).scheme)
1577
1588
1578 def hasdriveletter(path):
1589 def hasdriveletter(path):
1579 return path[1:2] == ':' and path[0:1].isalpha()
1590 return path[1:2] == ':' and path[0:1].isalpha()
1580
1591
1581 def localpath(path):
1592 def localpath(path):
1582 return url(path, parsequery=False, parsefragment=False).localpath()
1593 return url(path, parsequery=False, parsefragment=False).localpath()
1583
1594
1584 def hidepassword(u):
1595 def hidepassword(u):
1585 '''hide user credential in a url string'''
1596 '''hide user credential in a url string'''
1586 u = url(u)
1597 u = url(u)
1587 if u.passwd:
1598 if u.passwd:
1588 u.passwd = '***'
1599 u.passwd = '***'
1589 return str(u)
1600 return str(u)
1590
1601
1591 def removeauth(u):
1602 def removeauth(u):
1592 '''remove all authentication information from a url string'''
1603 '''remove all authentication information from a url string'''
1593 u = url(u)
1604 u = url(u)
1594 u.user = u.passwd = None
1605 u.user = u.passwd = None
1595 return str(u)
1606 return str(u)
1596
1607
1597 def isatty(fd):
1608 def isatty(fd):
1598 try:
1609 try:
1599 return fd.isatty()
1610 return fd.isatty()
1600 except AttributeError:
1611 except AttributeError:
1601 return False
1612 return False
General Comments 0
You need to be logged in to leave comments. Login now