##// END OF EJS Templates
merge with stable
Matt Mackall -
r20356:ec5d4287 merge default
parent child Browse files
Show More
@@ -1,630 +1,631 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, unionrepo, httppeer, sshpeer, statichttprepo
12 import localrepo, bundlerepo, unionrepo, httppeer, sshpeer, statichttprepo
13 import bookmarks, lock, util, extensions, error, node, scmutil, phases, url
13 import bookmarks, lock, util, extensions, error, node, scmutil, phases, url
14 import cmdutil, discovery
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.urllocalpath(path))
20 path = util.expandpath(util.urllocalpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, other, branches, revs):
23 def addbranchrevs(lrepo, other, branches, revs):
24 peer = other.peer() # a courtesy to callers using a localrepo for other
24 peer = other.peer() # a courtesy to callers using a localrepo for other
25 hashbranch, branches = branches
25 hashbranch, branches = branches
26 if not hashbranch and not branches:
26 if not hashbranch and not branches:
27 return revs or None, revs and revs[0] or None
27 return revs or None, revs and revs[0] or None
28 revs = revs and list(revs) or []
28 revs = revs and list(revs) or []
29 if not peer.capable('branchmap'):
29 if not peer.capable('branchmap'):
30 if branches:
30 if branches:
31 raise util.Abort(_("remote branch lookup not supported"))
31 raise util.Abort(_("remote branch lookup not supported"))
32 revs.append(hashbranch)
32 revs.append(hashbranch)
33 return revs, revs[0]
33 return revs, revs[0]
34 branchmap = peer.branchmap()
34 branchmap = peer.branchmap()
35
35
36 def primary(branch):
36 def primary(branch):
37 if branch == '.':
37 if branch == '.':
38 if not lrepo:
38 if not lrepo:
39 raise util.Abort(_("dirstate branch not accessible"))
39 raise util.Abort(_("dirstate branch not accessible"))
40 branch = lrepo.dirstate.branch()
40 branch = lrepo.dirstate.branch()
41 if branch in branchmap:
41 if branch in branchmap:
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
43 return True
43 return True
44 else:
44 else:
45 return False
45 return False
46
46
47 for branch in branches:
47 for branch in branches:
48 if not primary(branch):
48 if not primary(branch):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 if not primary(hashbranch):
51 if not primary(hashbranch):
52 revs.append(hashbranch)
52 revs.append(hashbranch)
53 return revs, revs[0]
53 return revs, revs[0]
54
54
55 def parseurl(path, branches=None):
55 def parseurl(path, branches=None):
56 '''parse url#branch, returning (url, (branch, branches))'''
56 '''parse url#branch, returning (url, (branch, branches))'''
57
57
58 u = util.url(path)
58 u = util.url(path)
59 branch = None
59 branch = None
60 if u.fragment:
60 if u.fragment:
61 branch = u.fragment
61 branch = u.fragment
62 u.fragment = None
62 u.fragment = None
63 return str(u), (branch, branches or [])
63 return str(u), (branch, branches or [])
64
64
65 schemes = {
65 schemes = {
66 'bundle': bundlerepo,
66 'bundle': bundlerepo,
67 'union': unionrepo,
67 'union': unionrepo,
68 'file': _local,
68 'file': _local,
69 'http': httppeer,
69 'http': httppeer,
70 'https': httppeer,
70 'https': httppeer,
71 'ssh': sshpeer,
71 'ssh': sshpeer,
72 'static-http': statichttprepo,
72 'static-http': statichttprepo,
73 }
73 }
74
74
75 def _peerlookup(path):
75 def _peerlookup(path):
76 u = util.url(path)
76 u = util.url(path)
77 scheme = u.scheme or 'file'
77 scheme = u.scheme or 'file'
78 thing = schemes.get(scheme) or schemes['file']
78 thing = schemes.get(scheme) or schemes['file']
79 try:
79 try:
80 return thing(path)
80 return thing(path)
81 except TypeError:
81 except TypeError:
82 return thing
82 return thing
83
83
84 def islocal(repo):
84 def islocal(repo):
85 '''return true if repo or path is local'''
85 '''return true if repo (or path pointing to repo) is local'''
86 if isinstance(repo, str):
86 if isinstance(repo, str):
87 try:
87 try:
88 return _peerlookup(repo).islocal(repo)
88 return _peerlookup(repo).islocal(repo)
89 except AttributeError:
89 except AttributeError:
90 return False
90 return False
91 return repo.local()
91 return repo.local()
92
92
93 def openpath(ui, path):
93 def openpath(ui, path):
94 '''open path with open if local, url.open if remote'''
94 '''open path with open if local, url.open if remote'''
95 if islocal(path):
95 pathurl = util.url(path, parsequery=False, parsefragment=False)
96 return util.posixfile(util.urllocalpath(path), 'rb')
96 if pathurl.islocal():
97 return util.posixfile(pathurl.localpath(), 'rb')
97 else:
98 else:
98 return url.open(ui, path)
99 return url.open(ui, path)
99
100
100 def _peerorrepo(ui, path, create=False):
101 def _peerorrepo(ui, path, create=False):
101 """return a repository object for the specified path"""
102 """return a repository object for the specified path"""
102 obj = _peerlookup(path).instance(ui, path, create)
103 obj = _peerlookup(path).instance(ui, path, create)
103 ui = getattr(obj, "ui", ui)
104 ui = getattr(obj, "ui", ui)
104 for name, module in extensions.extensions(ui):
105 for name, module in extensions.extensions(ui):
105 hook = getattr(module, 'reposetup', None)
106 hook = getattr(module, 'reposetup', None)
106 if hook:
107 if hook:
107 hook(ui, obj)
108 hook(ui, obj)
108 return obj
109 return obj
109
110
110 def repository(ui, path='', create=False):
111 def repository(ui, path='', create=False):
111 """return a repository object for the specified path"""
112 """return a repository object for the specified path"""
112 peer = _peerorrepo(ui, path, create)
113 peer = _peerorrepo(ui, path, create)
113 repo = peer.local()
114 repo = peer.local()
114 if not repo:
115 if not repo:
115 raise util.Abort(_("repository '%s' is not local") %
116 raise util.Abort(_("repository '%s' is not local") %
116 (path or peer.url()))
117 (path or peer.url()))
117 return repo.filtered('visible')
118 return repo.filtered('visible')
118
119
119 def peer(uiorrepo, opts, path, create=False):
120 def peer(uiorrepo, opts, path, create=False):
120 '''return a repository peer for the specified path'''
121 '''return a repository peer for the specified path'''
121 rui = remoteui(uiorrepo, opts)
122 rui = remoteui(uiorrepo, opts)
122 return _peerorrepo(rui, path, create).peer()
123 return _peerorrepo(rui, path, create).peer()
123
124
124 def defaultdest(source):
125 def defaultdest(source):
125 '''return default destination of clone if none is given'''
126 '''return default destination of clone if none is given'''
126 return os.path.basename(os.path.normpath(util.url(source).path or ''))
127 return os.path.basename(os.path.normpath(util.url(source).path or ''))
127
128
128 def share(ui, source, dest=None, update=True):
129 def share(ui, source, dest=None, update=True):
129 '''create a shared repository'''
130 '''create a shared repository'''
130
131
131 if not islocal(source):
132 if not islocal(source):
132 raise util.Abort(_('can only share local repositories'))
133 raise util.Abort(_('can only share local repositories'))
133
134
134 if not dest:
135 if not dest:
135 dest = defaultdest(source)
136 dest = defaultdest(source)
136 else:
137 else:
137 dest = ui.expandpath(dest)
138 dest = ui.expandpath(dest)
138
139
139 if isinstance(source, str):
140 if isinstance(source, str):
140 origsource = ui.expandpath(source)
141 origsource = ui.expandpath(source)
141 source, branches = parseurl(origsource)
142 source, branches = parseurl(origsource)
142 srcrepo = repository(ui, source)
143 srcrepo = repository(ui, source)
143 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
144 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
144 else:
145 else:
145 srcrepo = source.local()
146 srcrepo = source.local()
146 origsource = source = srcrepo.url()
147 origsource = source = srcrepo.url()
147 checkout = None
148 checkout = None
148
149
149 sharedpath = srcrepo.sharedpath # if our source is already sharing
150 sharedpath = srcrepo.sharedpath # if our source is already sharing
150
151
151 root = os.path.realpath(dest)
152 root = os.path.realpath(dest)
152 roothg = os.path.join(root, '.hg')
153 roothg = os.path.join(root, '.hg')
153
154
154 if os.path.exists(roothg):
155 if os.path.exists(roothg):
155 raise util.Abort(_('destination already exists'))
156 raise util.Abort(_('destination already exists'))
156
157
157 if not os.path.isdir(root):
158 if not os.path.isdir(root):
158 os.mkdir(root)
159 os.mkdir(root)
159 util.makedir(roothg, notindexed=True)
160 util.makedir(roothg, notindexed=True)
160
161
161 requirements = ''
162 requirements = ''
162 try:
163 try:
163 requirements = srcrepo.opener.read('requires')
164 requirements = srcrepo.opener.read('requires')
164 except IOError, inst:
165 except IOError, inst:
165 if inst.errno != errno.ENOENT:
166 if inst.errno != errno.ENOENT:
166 raise
167 raise
167
168
168 requirements += 'shared\n'
169 requirements += 'shared\n'
169 util.writefile(os.path.join(roothg, 'requires'), requirements)
170 util.writefile(os.path.join(roothg, 'requires'), requirements)
170 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
171 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
171
172
172 r = repository(ui, root)
173 r = repository(ui, root)
173
174
174 default = srcrepo.ui.config('paths', 'default')
175 default = srcrepo.ui.config('paths', 'default')
175 if default:
176 if default:
176 fp = r.opener("hgrc", "w", text=True)
177 fp = r.opener("hgrc", "w", text=True)
177 fp.write("[paths]\n")
178 fp.write("[paths]\n")
178 fp.write("default = %s\n" % default)
179 fp.write("default = %s\n" % default)
179 fp.close()
180 fp.close()
180
181
181 if update:
182 if update:
182 r.ui.status(_("updating working directory\n"))
183 r.ui.status(_("updating working directory\n"))
183 if update is not True:
184 if update is not True:
184 checkout = update
185 checkout = update
185 for test in (checkout, 'default', 'tip'):
186 for test in (checkout, 'default', 'tip'):
186 if test is None:
187 if test is None:
187 continue
188 continue
188 try:
189 try:
189 uprev = r.lookup(test)
190 uprev = r.lookup(test)
190 break
191 break
191 except error.RepoLookupError:
192 except error.RepoLookupError:
192 continue
193 continue
193 _update(r, uprev)
194 _update(r, uprev)
194
195
195 def copystore(ui, srcrepo, destpath):
196 def copystore(ui, srcrepo, destpath):
196 '''copy files from store of srcrepo in destpath
197 '''copy files from store of srcrepo in destpath
197
198
198 returns destlock
199 returns destlock
199 '''
200 '''
200 destlock = None
201 destlock = None
201 try:
202 try:
202 hardlink = None
203 hardlink = None
203 num = 0
204 num = 0
204 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
205 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
205 srcvfs = scmutil.vfs(srcrepo.sharedpath)
206 srcvfs = scmutil.vfs(srcrepo.sharedpath)
206 dstvfs = scmutil.vfs(destpath)
207 dstvfs = scmutil.vfs(destpath)
207 for f in srcrepo.store.copylist():
208 for f in srcrepo.store.copylist():
208 if srcpublishing and f.endswith('phaseroots'):
209 if srcpublishing and f.endswith('phaseroots'):
209 continue
210 continue
210 dstbase = os.path.dirname(f)
211 dstbase = os.path.dirname(f)
211 if dstbase and not dstvfs.exists(dstbase):
212 if dstbase and not dstvfs.exists(dstbase):
212 dstvfs.mkdir(dstbase)
213 dstvfs.mkdir(dstbase)
213 if srcvfs.exists(f):
214 if srcvfs.exists(f):
214 if f.endswith('data'):
215 if f.endswith('data'):
215 # lock to avoid premature writing to the target
216 # lock to avoid premature writing to the target
216 destlock = lock.lock(dstvfs, dstbase + "/lock")
217 destlock = lock.lock(dstvfs, dstbase + "/lock")
217 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
218 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
218 hardlink)
219 hardlink)
219 num += n
220 num += n
220 if hardlink:
221 if hardlink:
221 ui.debug("linked %d files\n" % num)
222 ui.debug("linked %d files\n" % num)
222 else:
223 else:
223 ui.debug("copied %d files\n" % num)
224 ui.debug("copied %d files\n" % num)
224 return destlock
225 return destlock
225 except: # re-raises
226 except: # re-raises
226 release(destlock)
227 release(destlock)
227 raise
228 raise
228
229
229 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
230 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
230 update=True, stream=False, branch=None):
231 update=True, stream=False, branch=None):
231 """Make a copy of an existing repository.
232 """Make a copy of an existing repository.
232
233
233 Create a copy of an existing repository in a new directory. The
234 Create a copy of an existing repository in a new directory. The
234 source and destination are URLs, as passed to the repository
235 source and destination are URLs, as passed to the repository
235 function. Returns a pair of repository peers, the source and
236 function. Returns a pair of repository peers, the source and
236 newly created destination.
237 newly created destination.
237
238
238 The location of the source is added to the new repository's
239 The location of the source is added to the new repository's
239 .hg/hgrc file, as the default to be used for future pulls and
240 .hg/hgrc file, as the default to be used for future pulls and
240 pushes.
241 pushes.
241
242
242 If an exception is raised, the partly cloned/updated destination
243 If an exception is raised, the partly cloned/updated destination
243 repository will be deleted.
244 repository will be deleted.
244
245
245 Arguments:
246 Arguments:
246
247
247 source: repository object or URL
248 source: repository object or URL
248
249
249 dest: URL of destination repository to create (defaults to base
250 dest: URL of destination repository to create (defaults to base
250 name of source repository)
251 name of source repository)
251
252
252 pull: always pull from source repository, even in local case
253 pull: always pull from source repository, even in local case
253
254
254 stream: stream raw data uncompressed from repository (fast over
255 stream: stream raw data uncompressed from repository (fast over
255 LAN, slow over WAN)
256 LAN, slow over WAN)
256
257
257 rev: revision to clone up to (implies pull=True)
258 rev: revision to clone up to (implies pull=True)
258
259
259 update: update working directory after clone completes, if
260 update: update working directory after clone completes, if
260 destination is local repository (True means update to default rev,
261 destination is local repository (True means update to default rev,
261 anything else is treated as a revision)
262 anything else is treated as a revision)
262
263
263 branch: branches to clone
264 branch: branches to clone
264 """
265 """
265
266
266 if isinstance(source, str):
267 if isinstance(source, str):
267 origsource = ui.expandpath(source)
268 origsource = ui.expandpath(source)
268 source, branch = parseurl(origsource, branch)
269 source, branch = parseurl(origsource, branch)
269 srcpeer = peer(ui, peeropts, source)
270 srcpeer = peer(ui, peeropts, source)
270 else:
271 else:
271 srcpeer = source.peer() # in case we were called with a localrepo
272 srcpeer = source.peer() # in case we were called with a localrepo
272 branch = (None, branch or [])
273 branch = (None, branch or [])
273 origsource = source = srcpeer.url()
274 origsource = source = srcpeer.url()
274 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
275 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
275
276
276 if dest is None:
277 if dest is None:
277 dest = defaultdest(source)
278 dest = defaultdest(source)
278 ui.status(_("destination directory: %s\n") % dest)
279 ui.status(_("destination directory: %s\n") % dest)
279 else:
280 else:
280 dest = ui.expandpath(dest)
281 dest = ui.expandpath(dest)
281
282
282 dest = util.urllocalpath(dest)
283 dest = util.urllocalpath(dest)
283 source = util.urllocalpath(source)
284 source = util.urllocalpath(source)
284
285
285 if not dest:
286 if not dest:
286 raise util.Abort(_("empty destination path is not valid"))
287 raise util.Abort(_("empty destination path is not valid"))
287 if os.path.exists(dest):
288 if os.path.exists(dest):
288 if not os.path.isdir(dest):
289 if not os.path.isdir(dest):
289 raise util.Abort(_("destination '%s' already exists") % dest)
290 raise util.Abort(_("destination '%s' already exists") % dest)
290 elif os.listdir(dest):
291 elif os.listdir(dest):
291 raise util.Abort(_("destination '%s' is not empty") % dest)
292 raise util.Abort(_("destination '%s' is not empty") % dest)
292
293
293 srclock = destlock = cleandir = None
294 srclock = destlock = cleandir = None
294 srcrepo = srcpeer.local()
295 srcrepo = srcpeer.local()
295 try:
296 try:
296 abspath = origsource
297 abspath = origsource
297 if islocal(origsource):
298 if islocal(origsource):
298 abspath = os.path.abspath(util.urllocalpath(origsource))
299 abspath = os.path.abspath(util.urllocalpath(origsource))
299
300
300 if islocal(dest):
301 if islocal(dest):
301 cleandir = dest
302 cleandir = dest
302
303
303 copy = False
304 copy = False
304 if (srcrepo and srcrepo.cancopy() and islocal(dest)
305 if (srcrepo and srcrepo.cancopy() and islocal(dest)
305 and not phases.hassecret(srcrepo)):
306 and not phases.hassecret(srcrepo)):
306 copy = not pull and not rev
307 copy = not pull and not rev
307
308
308 if copy:
309 if copy:
309 try:
310 try:
310 # we use a lock here because if we race with commit, we
311 # we use a lock here because if we race with commit, we
311 # can end up with extra data in the cloned revlogs that's
312 # can end up with extra data in the cloned revlogs that's
312 # not pointed to by changesets, thus causing verify to
313 # not pointed to by changesets, thus causing verify to
313 # fail
314 # fail
314 srclock = srcrepo.lock(wait=False)
315 srclock = srcrepo.lock(wait=False)
315 except error.LockError:
316 except error.LockError:
316 copy = False
317 copy = False
317
318
318 if copy:
319 if copy:
319 srcrepo.hook('preoutgoing', throw=True, source='clone')
320 srcrepo.hook('preoutgoing', throw=True, source='clone')
320 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
321 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
321 if not os.path.exists(dest):
322 if not os.path.exists(dest):
322 os.mkdir(dest)
323 os.mkdir(dest)
323 else:
324 else:
324 # only clean up directories we create ourselves
325 # only clean up directories we create ourselves
325 cleandir = hgdir
326 cleandir = hgdir
326 try:
327 try:
327 destpath = hgdir
328 destpath = hgdir
328 util.makedir(destpath, notindexed=True)
329 util.makedir(destpath, notindexed=True)
329 except OSError, inst:
330 except OSError, inst:
330 if inst.errno == errno.EEXIST:
331 if inst.errno == errno.EEXIST:
331 cleandir = None
332 cleandir = None
332 raise util.Abort(_("destination '%s' already exists")
333 raise util.Abort(_("destination '%s' already exists")
333 % dest)
334 % dest)
334 raise
335 raise
335
336
336 destlock = copystore(ui, srcrepo, destpath)
337 destlock = copystore(ui, srcrepo, destpath)
337
338
338 # Recomputing branch cache might be slow on big repos,
339 # Recomputing branch cache might be slow on big repos,
339 # so just copy it
340 # so just copy it
340 dstcachedir = os.path.join(destpath, 'cache')
341 dstcachedir = os.path.join(destpath, 'cache')
341 srcbranchcache = srcrepo.sjoin('cache/branch2')
342 srcbranchcache = srcrepo.sjoin('cache/branch2')
342 dstbranchcache = os.path.join(dstcachedir, 'branch2')
343 dstbranchcache = os.path.join(dstcachedir, 'branch2')
343 if os.path.exists(srcbranchcache):
344 if os.path.exists(srcbranchcache):
344 if not os.path.exists(dstcachedir):
345 if not os.path.exists(dstcachedir):
345 os.mkdir(dstcachedir)
346 os.mkdir(dstcachedir)
346 util.copyfile(srcbranchcache, dstbranchcache)
347 util.copyfile(srcbranchcache, dstbranchcache)
347
348
348 # we need to re-init the repo after manually copying the data
349 # we need to re-init the repo after manually copying the data
349 # into it
350 # into it
350 destpeer = peer(srcrepo, peeropts, dest)
351 destpeer = peer(srcrepo, peeropts, dest)
351 srcrepo.hook('outgoing', source='clone',
352 srcrepo.hook('outgoing', source='clone',
352 node=node.hex(node.nullid))
353 node=node.hex(node.nullid))
353 else:
354 else:
354 try:
355 try:
355 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
356 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
356 # only pass ui when no srcrepo
357 # only pass ui when no srcrepo
357 except OSError, inst:
358 except OSError, inst:
358 if inst.errno == errno.EEXIST:
359 if inst.errno == errno.EEXIST:
359 cleandir = None
360 cleandir = None
360 raise util.Abort(_("destination '%s' already exists")
361 raise util.Abort(_("destination '%s' already exists")
361 % dest)
362 % dest)
362 raise
363 raise
363
364
364 revs = None
365 revs = None
365 if rev:
366 if rev:
366 if not srcpeer.capable('lookup'):
367 if not srcpeer.capable('lookup'):
367 raise util.Abort(_("src repository does not support "
368 raise util.Abort(_("src repository does not support "
368 "revision lookup and so doesn't "
369 "revision lookup and so doesn't "
369 "support clone by revision"))
370 "support clone by revision"))
370 revs = [srcpeer.lookup(r) for r in rev]
371 revs = [srcpeer.lookup(r) for r in rev]
371 checkout = revs[0]
372 checkout = revs[0]
372 if destpeer.local():
373 if destpeer.local():
373 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
374 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
374 elif srcrepo:
375 elif srcrepo:
375 srcrepo.push(destpeer, revs=revs)
376 srcrepo.push(destpeer, revs=revs)
376 else:
377 else:
377 raise util.Abort(_("clone from remote to remote not supported"))
378 raise util.Abort(_("clone from remote to remote not supported"))
378
379
379 cleandir = None
380 cleandir = None
380
381
381 # clone all bookmarks except divergent ones
382 # clone all bookmarks except divergent ones
382 destrepo = destpeer.local()
383 destrepo = destpeer.local()
383 if destrepo and srcpeer.capable("pushkey"):
384 if destrepo and srcpeer.capable("pushkey"):
384 rb = srcpeer.listkeys('bookmarks')
385 rb = srcpeer.listkeys('bookmarks')
385 marks = destrepo._bookmarks
386 marks = destrepo._bookmarks
386 for k, n in rb.iteritems():
387 for k, n in rb.iteritems():
387 try:
388 try:
388 m = destrepo.lookup(n)
389 m = destrepo.lookup(n)
389 marks[k] = m
390 marks[k] = m
390 except error.RepoLookupError:
391 except error.RepoLookupError:
391 pass
392 pass
392 if rb:
393 if rb:
393 marks.write()
394 marks.write()
394 elif srcrepo and destpeer.capable("pushkey"):
395 elif srcrepo and destpeer.capable("pushkey"):
395 for k, n in srcrepo._bookmarks.iteritems():
396 for k, n in srcrepo._bookmarks.iteritems():
396 destpeer.pushkey('bookmarks', k, '', hex(n))
397 destpeer.pushkey('bookmarks', k, '', hex(n))
397
398
398 if destrepo:
399 if destrepo:
399 fp = destrepo.opener("hgrc", "w", text=True)
400 fp = destrepo.opener("hgrc", "w", text=True)
400 fp.write("[paths]\n")
401 fp.write("[paths]\n")
401 u = util.url(abspath)
402 u = util.url(abspath)
402 u.passwd = None
403 u.passwd = None
403 defaulturl = str(u)
404 defaulturl = str(u)
404 fp.write("default = %s\n" % defaulturl)
405 fp.write("default = %s\n" % defaulturl)
405 fp.close()
406 fp.close()
406
407
407 destrepo.ui.setconfig('paths', 'default', defaulturl)
408 destrepo.ui.setconfig('paths', 'default', defaulturl)
408
409
409 if update:
410 if update:
410 if update is not True:
411 if update is not True:
411 checkout = srcpeer.lookup(update)
412 checkout = srcpeer.lookup(update)
412 uprev = None
413 uprev = None
413 status = None
414 status = None
414 if checkout is not None:
415 if checkout is not None:
415 try:
416 try:
416 uprev = destrepo.lookup(checkout)
417 uprev = destrepo.lookup(checkout)
417 except error.RepoLookupError:
418 except error.RepoLookupError:
418 pass
419 pass
419 if uprev is None:
420 if uprev is None:
420 try:
421 try:
421 uprev = destrepo._bookmarks['@']
422 uprev = destrepo._bookmarks['@']
422 update = '@'
423 update = '@'
423 bn = destrepo[uprev].branch()
424 bn = destrepo[uprev].branch()
424 if bn == 'default':
425 if bn == 'default':
425 status = _("updating to bookmark @\n")
426 status = _("updating to bookmark @\n")
426 else:
427 else:
427 status = _("updating to bookmark @ on branch %s\n"
428 status = _("updating to bookmark @ on branch %s\n"
428 % bn)
429 % bn)
429 except KeyError:
430 except KeyError:
430 try:
431 try:
431 uprev = destrepo.branchtip('default')
432 uprev = destrepo.branchtip('default')
432 except error.RepoLookupError:
433 except error.RepoLookupError:
433 uprev = destrepo.lookup('tip')
434 uprev = destrepo.lookup('tip')
434 if not status:
435 if not status:
435 bn = destrepo[uprev].branch()
436 bn = destrepo[uprev].branch()
436 status = _("updating to branch %s\n") % bn
437 status = _("updating to branch %s\n") % bn
437 destrepo.ui.status(status)
438 destrepo.ui.status(status)
438 _update(destrepo, uprev)
439 _update(destrepo, uprev)
439 if update in destrepo._bookmarks:
440 if update in destrepo._bookmarks:
440 bookmarks.setcurrent(destrepo, update)
441 bookmarks.setcurrent(destrepo, update)
441 finally:
442 finally:
442 release(srclock, destlock)
443 release(srclock, destlock)
443 if cleandir is not None:
444 if cleandir is not None:
444 shutil.rmtree(cleandir, True)
445 shutil.rmtree(cleandir, True)
445 if srcpeer is not None:
446 if srcpeer is not None:
446 srcpeer.close()
447 srcpeer.close()
447 return srcpeer, destpeer
448 return srcpeer, destpeer
448
449
449 def _showstats(repo, stats):
450 def _showstats(repo, stats):
450 repo.ui.status(_("%d files updated, %d files merged, "
451 repo.ui.status(_("%d files updated, %d files merged, "
451 "%d files removed, %d files unresolved\n") % stats)
452 "%d files removed, %d files unresolved\n") % stats)
452
453
453 def updaterepo(repo, node, overwrite):
454 def updaterepo(repo, node, overwrite):
454 """Update the working directory to node.
455 """Update the working directory to node.
455
456
456 When overwrite is set, changes are clobbered, merged else
457 When overwrite is set, changes are clobbered, merged else
457
458
458 returns stats (see pydoc mercurial.merge.applyupdates)"""
459 returns stats (see pydoc mercurial.merge.applyupdates)"""
459 return mergemod.update(repo, node, False, overwrite, None)
460 return mergemod.update(repo, node, False, overwrite, None)
460
461
461 def update(repo, node):
462 def update(repo, node):
462 """update the working directory to node, merging linear changes"""
463 """update the working directory to node, merging linear changes"""
463 stats = updaterepo(repo, node, False)
464 stats = updaterepo(repo, node, False)
464 _showstats(repo, stats)
465 _showstats(repo, stats)
465 if stats[3]:
466 if stats[3]:
466 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
467 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
467 return stats[3] > 0
468 return stats[3] > 0
468
469
469 # naming conflict in clone()
470 # naming conflict in clone()
470 _update = update
471 _update = update
471
472
472 def clean(repo, node, show_stats=True):
473 def clean(repo, node, show_stats=True):
473 """forcibly switch the working directory to node, clobbering changes"""
474 """forcibly switch the working directory to node, clobbering changes"""
474 stats = updaterepo(repo, node, True)
475 stats = updaterepo(repo, node, True)
475 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
476 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
476 if show_stats:
477 if show_stats:
477 _showstats(repo, stats)
478 _showstats(repo, stats)
478 return stats[3] > 0
479 return stats[3] > 0
479
480
480 def merge(repo, node, force=None, remind=True):
481 def merge(repo, node, force=None, remind=True):
481 """Branch merge with node, resolving changes. Return true if any
482 """Branch merge with node, resolving changes. Return true if any
482 unresolved conflicts."""
483 unresolved conflicts."""
483 stats = mergemod.update(repo, node, True, force, False)
484 stats = mergemod.update(repo, node, True, force, False)
484 _showstats(repo, stats)
485 _showstats(repo, stats)
485 if stats[3]:
486 if stats[3]:
486 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
487 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
487 "or 'hg update -C .' to abandon\n"))
488 "or 'hg update -C .' to abandon\n"))
488 elif remind:
489 elif remind:
489 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
490 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
490 return stats[3] > 0
491 return stats[3] > 0
491
492
492 def _incoming(displaychlist, subreporecurse, ui, repo, source,
493 def _incoming(displaychlist, subreporecurse, ui, repo, source,
493 opts, buffered=False):
494 opts, buffered=False):
494 """
495 """
495 Helper for incoming / gincoming.
496 Helper for incoming / gincoming.
496 displaychlist gets called with
497 displaychlist gets called with
497 (remoterepo, incomingchangesetlist, displayer) parameters,
498 (remoterepo, incomingchangesetlist, displayer) parameters,
498 and is supposed to contain only code that can't be unified.
499 and is supposed to contain only code that can't be unified.
499 """
500 """
500 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
501 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
501 other = peer(repo, opts, source)
502 other = peer(repo, opts, source)
502 ui.status(_('comparing with %s\n') % util.hidepassword(source))
503 ui.status(_('comparing with %s\n') % util.hidepassword(source))
503 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
504 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
504
505
505 if revs:
506 if revs:
506 revs = [other.lookup(rev) for rev in revs]
507 revs = [other.lookup(rev) for rev in revs]
507 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
508 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
508 revs, opts["bundle"], opts["force"])
509 revs, opts["bundle"], opts["force"])
509 try:
510 try:
510 if not chlist:
511 if not chlist:
511 ui.status(_("no changes found\n"))
512 ui.status(_("no changes found\n"))
512 return subreporecurse()
513 return subreporecurse()
513
514
514 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
515 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
515 displaychlist(other, chlist, displayer)
516 displaychlist(other, chlist, displayer)
516 displayer.close()
517 displayer.close()
517 finally:
518 finally:
518 cleanupfn()
519 cleanupfn()
519 subreporecurse()
520 subreporecurse()
520 return 0 # exit code is zero since we found incoming changes
521 return 0 # exit code is zero since we found incoming changes
521
522
522 def incoming(ui, repo, source, opts):
523 def incoming(ui, repo, source, opts):
523 def subreporecurse():
524 def subreporecurse():
524 ret = 1
525 ret = 1
525 if opts.get('subrepos'):
526 if opts.get('subrepos'):
526 ctx = repo[None]
527 ctx = repo[None]
527 for subpath in sorted(ctx.substate):
528 for subpath in sorted(ctx.substate):
528 sub = ctx.sub(subpath)
529 sub = ctx.sub(subpath)
529 ret = min(ret, sub.incoming(ui, source, opts))
530 ret = min(ret, sub.incoming(ui, source, opts))
530 return ret
531 return ret
531
532
532 def display(other, chlist, displayer):
533 def display(other, chlist, displayer):
533 limit = cmdutil.loglimit(opts)
534 limit = cmdutil.loglimit(opts)
534 if opts.get('newest_first'):
535 if opts.get('newest_first'):
535 chlist.reverse()
536 chlist.reverse()
536 count = 0
537 count = 0
537 for n in chlist:
538 for n in chlist:
538 if limit is not None and count >= limit:
539 if limit is not None and count >= limit:
539 break
540 break
540 parents = [p for p in other.changelog.parents(n) if p != nullid]
541 parents = [p for p in other.changelog.parents(n) if p != nullid]
541 if opts.get('no_merges') and len(parents) == 2:
542 if opts.get('no_merges') and len(parents) == 2:
542 continue
543 continue
543 count += 1
544 count += 1
544 displayer.show(other[n])
545 displayer.show(other[n])
545 return _incoming(display, subreporecurse, ui, repo, source, opts)
546 return _incoming(display, subreporecurse, ui, repo, source, opts)
546
547
547 def _outgoing(ui, repo, dest, opts):
548 def _outgoing(ui, repo, dest, opts):
548 dest = ui.expandpath(dest or 'default-push', dest or 'default')
549 dest = ui.expandpath(dest or 'default-push', dest or 'default')
549 dest, branches = parseurl(dest, opts.get('branch'))
550 dest, branches = parseurl(dest, opts.get('branch'))
550 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
551 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
551 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
552 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
552 if revs:
553 if revs:
553 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
554 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
554
555
555 other = peer(repo, opts, dest)
556 other = peer(repo, opts, dest)
556 outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
557 outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
557 force=opts.get('force'))
558 force=opts.get('force'))
558 o = outgoing.missing
559 o = outgoing.missing
559 if not o:
560 if not o:
560 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
561 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
561 return None
562 return None
562 return o
563 return o
563
564
564 def outgoing(ui, repo, dest, opts):
565 def outgoing(ui, repo, dest, opts):
565 def recurse():
566 def recurse():
566 ret = 1
567 ret = 1
567 if opts.get('subrepos'):
568 if opts.get('subrepos'):
568 ctx = repo[None]
569 ctx = repo[None]
569 for subpath in sorted(ctx.substate):
570 for subpath in sorted(ctx.substate):
570 sub = ctx.sub(subpath)
571 sub = ctx.sub(subpath)
571 ret = min(ret, sub.outgoing(ui, dest, opts))
572 ret = min(ret, sub.outgoing(ui, dest, opts))
572 return ret
573 return ret
573
574
574 limit = cmdutil.loglimit(opts)
575 limit = cmdutil.loglimit(opts)
575 o = _outgoing(ui, repo, dest, opts)
576 o = _outgoing(ui, repo, dest, opts)
576 if o is None:
577 if o is None:
577 return recurse()
578 return recurse()
578
579
579 if opts.get('newest_first'):
580 if opts.get('newest_first'):
580 o.reverse()
581 o.reverse()
581 displayer = cmdutil.show_changeset(ui, repo, opts)
582 displayer = cmdutil.show_changeset(ui, repo, opts)
582 count = 0
583 count = 0
583 for n in o:
584 for n in o:
584 if limit is not None and count >= limit:
585 if limit is not None and count >= limit:
585 break
586 break
586 parents = [p for p in repo.changelog.parents(n) if p != nullid]
587 parents = [p for p in repo.changelog.parents(n) if p != nullid]
587 if opts.get('no_merges') and len(parents) == 2:
588 if opts.get('no_merges') and len(parents) == 2:
588 continue
589 continue
589 count += 1
590 count += 1
590 displayer.show(repo[n])
591 displayer.show(repo[n])
591 displayer.close()
592 displayer.close()
592 recurse()
593 recurse()
593 return 0 # exit code is zero since we found outgoing changes
594 return 0 # exit code is zero since we found outgoing changes
594
595
595 def revert(repo, node, choose):
596 def revert(repo, node, choose):
596 """revert changes to revision in node without updating dirstate"""
597 """revert changes to revision in node without updating dirstate"""
597 return mergemod.update(repo, node, False, True, choose)[3] > 0
598 return mergemod.update(repo, node, False, True, choose)[3] > 0
598
599
599 def verify(repo):
600 def verify(repo):
600 """verify the consistency of a repository"""
601 """verify the consistency of a repository"""
601 return verifymod.verify(repo)
602 return verifymod.verify(repo)
602
603
603 def remoteui(src, opts):
604 def remoteui(src, opts):
604 'build a remote ui from ui or repo and opts'
605 'build a remote ui from ui or repo and opts'
605 if util.safehasattr(src, 'baseui'): # looks like a repository
606 if util.safehasattr(src, 'baseui'): # looks like a repository
606 dst = src.baseui.copy() # drop repo-specific config
607 dst = src.baseui.copy() # drop repo-specific config
607 src = src.ui # copy target options from repo
608 src = src.ui # copy target options from repo
608 else: # assume it's a global ui object
609 else: # assume it's a global ui object
609 dst = src.copy() # keep all global options
610 dst = src.copy() # keep all global options
610
611
611 # copy ssh-specific options
612 # copy ssh-specific options
612 for o in 'ssh', 'remotecmd':
613 for o in 'ssh', 'remotecmd':
613 v = opts.get(o) or src.config('ui', o)
614 v = opts.get(o) or src.config('ui', o)
614 if v:
615 if v:
615 dst.setconfig("ui", o, v)
616 dst.setconfig("ui", o, v)
616
617
617 # copy bundle-specific options
618 # copy bundle-specific options
618 r = src.config('bundle', 'mainreporoot')
619 r = src.config('bundle', 'mainreporoot')
619 if r:
620 if r:
620 dst.setconfig('bundle', 'mainreporoot', r)
621 dst.setconfig('bundle', 'mainreporoot', r)
621
622
622 # copy selected local settings to the remote ui
623 # copy selected local settings to the remote ui
623 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
624 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
624 for key, val in src.configitems(sect):
625 for key, val in src.configitems(sect):
625 dst.setconfig(sect, key, val)
626 dst.setconfig(sect, key, val)
626 v = src.config('web', 'cacerts')
627 v = src.config('web', 'cacerts')
627 if v:
628 if v:
628 dst.setconfig('web', 'cacerts', util.expandpath(v))
629 dst.setconfig('web', 'cacerts', util.expandpath(v))
629
630
630 return dst
631 return dst
@@ -1,2002 +1,2007 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, datetime, calendar, textwrap, signal, collections
19 import os, time, datetime, calendar, textwrap, signal, collections
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 if os.name == 'nt':
22 if os.name == 'nt':
23 import windows as platform
23 import windows as platform
24 else:
24 else:
25 import posix as platform
25 import posix as platform
26
26
27 cachestat = platform.cachestat
27 cachestat = platform.cachestat
28 checkexec = platform.checkexec
28 checkexec = platform.checkexec
29 checklink = platform.checklink
29 checklink = platform.checklink
30 copymode = platform.copymode
30 copymode = platform.copymode
31 executablepath = platform.executablepath
31 executablepath = platform.executablepath
32 expandglobs = platform.expandglobs
32 expandglobs = platform.expandglobs
33 explainexit = platform.explainexit
33 explainexit = platform.explainexit
34 findexe = platform.findexe
34 findexe = platform.findexe
35 gethgcmd = platform.gethgcmd
35 gethgcmd = platform.gethgcmd
36 getuser = platform.getuser
36 getuser = platform.getuser
37 groupmembers = platform.groupmembers
37 groupmembers = platform.groupmembers
38 groupname = platform.groupname
38 groupname = platform.groupname
39 hidewindow = platform.hidewindow
39 hidewindow = platform.hidewindow
40 isexec = platform.isexec
40 isexec = platform.isexec
41 isowner = platform.isowner
41 isowner = platform.isowner
42 localpath = platform.localpath
42 localpath = platform.localpath
43 lookupreg = platform.lookupreg
43 lookupreg = platform.lookupreg
44 makedir = platform.makedir
44 makedir = platform.makedir
45 nlinks = platform.nlinks
45 nlinks = platform.nlinks
46 normpath = platform.normpath
46 normpath = platform.normpath
47 normcase = platform.normcase
47 normcase = platform.normcase
48 openhardlinks = platform.openhardlinks
48 openhardlinks = platform.openhardlinks
49 oslink = platform.oslink
49 oslink = platform.oslink
50 parsepatchoutput = platform.parsepatchoutput
50 parsepatchoutput = platform.parsepatchoutput
51 pconvert = platform.pconvert
51 pconvert = platform.pconvert
52 popen = platform.popen
52 popen = platform.popen
53 posixfile = platform.posixfile
53 posixfile = platform.posixfile
54 quotecommand = platform.quotecommand
54 quotecommand = platform.quotecommand
55 rename = platform.rename
55 rename = platform.rename
56 samedevice = platform.samedevice
56 samedevice = platform.samedevice
57 samefile = platform.samefile
57 samefile = platform.samefile
58 samestat = platform.samestat
58 samestat = platform.samestat
59 setbinary = platform.setbinary
59 setbinary = platform.setbinary
60 setflags = platform.setflags
60 setflags = platform.setflags
61 setsignalhandler = platform.setsignalhandler
61 setsignalhandler = platform.setsignalhandler
62 shellquote = platform.shellquote
62 shellquote = platform.shellquote
63 spawndetached = platform.spawndetached
63 spawndetached = platform.spawndetached
64 split = platform.split
64 split = platform.split
65 sshargs = platform.sshargs
65 sshargs = platform.sshargs
66 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
66 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
67 statisexec = platform.statisexec
67 statisexec = platform.statisexec
68 statislink = platform.statislink
68 statislink = platform.statislink
69 termwidth = platform.termwidth
69 termwidth = platform.termwidth
70 testpid = platform.testpid
70 testpid = platform.testpid
71 umask = platform.umask
71 umask = platform.umask
72 unlink = platform.unlink
72 unlink = platform.unlink
73 unlinkpath = platform.unlinkpath
73 unlinkpath = platform.unlinkpath
74 username = platform.username
74 username = platform.username
75
75
76 # Python compatibility
76 # Python compatibility
77
77
78 _notset = object()
78 _notset = object()
79
79
80 def safehasattr(thing, attr):
80 def safehasattr(thing, attr):
81 return getattr(thing, attr, _notset) is not _notset
81 return getattr(thing, attr, _notset) is not _notset
82
82
83 def sha1(s=''):
83 def sha1(s=''):
84 '''
84 '''
85 Low-overhead wrapper around Python's SHA support
85 Low-overhead wrapper around Python's SHA support
86
86
87 >>> f = _fastsha1
87 >>> f = _fastsha1
88 >>> a = sha1()
88 >>> a = sha1()
89 >>> a = f()
89 >>> a = f()
90 >>> a.hexdigest()
90 >>> a.hexdigest()
91 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
91 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
92 '''
92 '''
93
93
94 return _fastsha1(s)
94 return _fastsha1(s)
95
95
96 def _fastsha1(s=''):
96 def _fastsha1(s=''):
97 # This function will import sha1 from hashlib or sha (whichever is
97 # This function will import sha1 from hashlib or sha (whichever is
98 # available) and overwrite itself with it on the first call.
98 # available) and overwrite itself with it on the first call.
99 # Subsequent calls will go directly to the imported function.
99 # Subsequent calls will go directly to the imported function.
100 if sys.version_info >= (2, 5):
100 if sys.version_info >= (2, 5):
101 from hashlib import sha1 as _sha1
101 from hashlib import sha1 as _sha1
102 else:
102 else:
103 from sha import sha as _sha1
103 from sha import sha as _sha1
104 global _fastsha1, sha1
104 global _fastsha1, sha1
105 _fastsha1 = sha1 = _sha1
105 _fastsha1 = sha1 = _sha1
106 return _sha1(s)
106 return _sha1(s)
107
107
108 try:
108 try:
109 buffer = buffer
109 buffer = buffer
110 except NameError:
110 except NameError:
111 if sys.version_info[0] < 3:
111 if sys.version_info[0] < 3:
112 def buffer(sliceable, offset=0):
112 def buffer(sliceable, offset=0):
113 return sliceable[offset:]
113 return sliceable[offset:]
114 else:
114 else:
115 def buffer(sliceable, offset=0):
115 def buffer(sliceable, offset=0):
116 return memoryview(sliceable)[offset:]
116 return memoryview(sliceable)[offset:]
117
117
118 import subprocess
118 import subprocess
119 closefds = os.name == 'posix'
119 closefds = os.name == 'posix'
120
120
121 def popen2(cmd, env=None, newlines=False):
121 def popen2(cmd, env=None, newlines=False):
122 # Setting bufsize to -1 lets the system decide the buffer size.
122 # Setting bufsize to -1 lets the system decide the buffer size.
123 # The default for bufsize is 0, meaning unbuffered. This leads to
123 # The default for bufsize is 0, meaning unbuffered. This leads to
124 # poor performance on Mac OS X: http://bugs.python.org/issue4194
124 # poor performance on Mac OS X: http://bugs.python.org/issue4194
125 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
125 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
126 close_fds=closefds,
126 close_fds=closefds,
127 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
127 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
128 universal_newlines=newlines,
128 universal_newlines=newlines,
129 env=env)
129 env=env)
130 return p.stdin, p.stdout
130 return p.stdin, p.stdout
131
131
132 def popen3(cmd, env=None, newlines=False):
132 def popen3(cmd, env=None, newlines=False):
133 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
133 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
134 return stdin, stdout, stderr
134 return stdin, stdout, stderr
135
135
136 def popen4(cmd, env=None, newlines=False):
136 def popen4(cmd, env=None, newlines=False):
137 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
137 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
138 close_fds=closefds,
138 close_fds=closefds,
139 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
139 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
140 stderr=subprocess.PIPE,
140 stderr=subprocess.PIPE,
141 universal_newlines=newlines,
141 universal_newlines=newlines,
142 env=env)
142 env=env)
143 return p.stdin, p.stdout, p.stderr, p
143 return p.stdin, p.stdout, p.stderr, p
144
144
145 def version():
145 def version():
146 """Return version information if available."""
146 """Return version information if available."""
147 try:
147 try:
148 import __version__
148 import __version__
149 return __version__.version
149 return __version__.version
150 except ImportError:
150 except ImportError:
151 return 'unknown'
151 return 'unknown'
152
152
153 # used by parsedate
153 # used by parsedate
154 defaultdateformats = (
154 defaultdateformats = (
155 '%Y-%m-%d %H:%M:%S',
155 '%Y-%m-%d %H:%M:%S',
156 '%Y-%m-%d %I:%M:%S%p',
156 '%Y-%m-%d %I:%M:%S%p',
157 '%Y-%m-%d %H:%M',
157 '%Y-%m-%d %H:%M',
158 '%Y-%m-%d %I:%M%p',
158 '%Y-%m-%d %I:%M%p',
159 '%Y-%m-%d',
159 '%Y-%m-%d',
160 '%m-%d',
160 '%m-%d',
161 '%m/%d',
161 '%m/%d',
162 '%m/%d/%y',
162 '%m/%d/%y',
163 '%m/%d/%Y',
163 '%m/%d/%Y',
164 '%a %b %d %H:%M:%S %Y',
164 '%a %b %d %H:%M:%S %Y',
165 '%a %b %d %I:%M:%S%p %Y',
165 '%a %b %d %I:%M:%S%p %Y',
166 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
166 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
167 '%b %d %H:%M:%S %Y',
167 '%b %d %H:%M:%S %Y',
168 '%b %d %I:%M:%S%p %Y',
168 '%b %d %I:%M:%S%p %Y',
169 '%b %d %H:%M:%S',
169 '%b %d %H:%M:%S',
170 '%b %d %I:%M:%S%p',
170 '%b %d %I:%M:%S%p',
171 '%b %d %H:%M',
171 '%b %d %H:%M',
172 '%b %d %I:%M%p',
172 '%b %d %I:%M%p',
173 '%b %d %Y',
173 '%b %d %Y',
174 '%b %d',
174 '%b %d',
175 '%H:%M:%S',
175 '%H:%M:%S',
176 '%I:%M:%S%p',
176 '%I:%M:%S%p',
177 '%H:%M',
177 '%H:%M',
178 '%I:%M%p',
178 '%I:%M%p',
179 )
179 )
180
180
181 extendeddateformats = defaultdateformats + (
181 extendeddateformats = defaultdateformats + (
182 "%Y",
182 "%Y",
183 "%Y-%m",
183 "%Y-%m",
184 "%b",
184 "%b",
185 "%b %Y",
185 "%b %Y",
186 )
186 )
187
187
188 def cachefunc(func):
188 def cachefunc(func):
189 '''cache the result of function calls'''
189 '''cache the result of function calls'''
190 # XXX doesn't handle keywords args
190 # XXX doesn't handle keywords args
191 cache = {}
191 cache = {}
192 if func.func_code.co_argcount == 1:
192 if func.func_code.co_argcount == 1:
193 # we gain a small amount of time because
193 # we gain a small amount of time because
194 # we don't need to pack/unpack the list
194 # we don't need to pack/unpack the list
195 def f(arg):
195 def f(arg):
196 if arg not in cache:
196 if arg not in cache:
197 cache[arg] = func(arg)
197 cache[arg] = func(arg)
198 return cache[arg]
198 return cache[arg]
199 else:
199 else:
200 def f(*args):
200 def f(*args):
201 if args not in cache:
201 if args not in cache:
202 cache[args] = func(*args)
202 cache[args] = func(*args)
203 return cache[args]
203 return cache[args]
204
204
205 return f
205 return f
206
206
207 try:
207 try:
208 collections.deque.remove
208 collections.deque.remove
209 deque = collections.deque
209 deque = collections.deque
210 except AttributeError:
210 except AttributeError:
211 # python 2.4 lacks deque.remove
211 # python 2.4 lacks deque.remove
212 class deque(collections.deque):
212 class deque(collections.deque):
213 def remove(self, val):
213 def remove(self, val):
214 for i, v in enumerate(self):
214 for i, v in enumerate(self):
215 if v == val:
215 if v == val:
216 del self[i]
216 del self[i]
217 break
217 break
218
218
219 class lrucachedict(object):
219 class lrucachedict(object):
220 '''cache most recent gets from or sets to this dictionary'''
220 '''cache most recent gets from or sets to this dictionary'''
221 def __init__(self, maxsize):
221 def __init__(self, maxsize):
222 self._cache = {}
222 self._cache = {}
223 self._maxsize = maxsize
223 self._maxsize = maxsize
224 self._order = deque()
224 self._order = deque()
225
225
226 def __getitem__(self, key):
226 def __getitem__(self, key):
227 value = self._cache[key]
227 value = self._cache[key]
228 self._order.remove(key)
228 self._order.remove(key)
229 self._order.append(key)
229 self._order.append(key)
230 return value
230 return value
231
231
232 def __setitem__(self, key, value):
232 def __setitem__(self, key, value):
233 if key not in self._cache:
233 if key not in self._cache:
234 if len(self._cache) >= self._maxsize:
234 if len(self._cache) >= self._maxsize:
235 del self._cache[self._order.popleft()]
235 del self._cache[self._order.popleft()]
236 else:
236 else:
237 self._order.remove(key)
237 self._order.remove(key)
238 self._cache[key] = value
238 self._cache[key] = value
239 self._order.append(key)
239 self._order.append(key)
240
240
241 def __contains__(self, key):
241 def __contains__(self, key):
242 return key in self._cache
242 return key in self._cache
243
243
244 def clear(self):
244 def clear(self):
245 self._cache.clear()
245 self._cache.clear()
246 self._order = deque()
246 self._order = deque()
247
247
248 def lrucachefunc(func):
248 def lrucachefunc(func):
249 '''cache most recent results of function calls'''
249 '''cache most recent results of function calls'''
250 cache = {}
250 cache = {}
251 order = deque()
251 order = deque()
252 if func.func_code.co_argcount == 1:
252 if func.func_code.co_argcount == 1:
253 def f(arg):
253 def f(arg):
254 if arg not in cache:
254 if arg not in cache:
255 if len(cache) > 20:
255 if len(cache) > 20:
256 del cache[order.popleft()]
256 del cache[order.popleft()]
257 cache[arg] = func(arg)
257 cache[arg] = func(arg)
258 else:
258 else:
259 order.remove(arg)
259 order.remove(arg)
260 order.append(arg)
260 order.append(arg)
261 return cache[arg]
261 return cache[arg]
262 else:
262 else:
263 def f(*args):
263 def f(*args):
264 if args not in cache:
264 if args not in cache:
265 if len(cache) > 20:
265 if len(cache) > 20:
266 del cache[order.popleft()]
266 del cache[order.popleft()]
267 cache[args] = func(*args)
267 cache[args] = func(*args)
268 else:
268 else:
269 order.remove(args)
269 order.remove(args)
270 order.append(args)
270 order.append(args)
271 return cache[args]
271 return cache[args]
272
272
273 return f
273 return f
274
274
275 class propertycache(object):
275 class propertycache(object):
276 def __init__(self, func):
276 def __init__(self, func):
277 self.func = func
277 self.func = func
278 self.name = func.__name__
278 self.name = func.__name__
279 def __get__(self, obj, type=None):
279 def __get__(self, obj, type=None):
280 result = self.func(obj)
280 result = self.func(obj)
281 self.cachevalue(obj, result)
281 self.cachevalue(obj, result)
282 return result
282 return result
283
283
284 def cachevalue(self, obj, value):
284 def cachevalue(self, obj, value):
285 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
285 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
286 obj.__dict__[self.name] = value
286 obj.__dict__[self.name] = value
287
287
288 def pipefilter(s, cmd):
288 def pipefilter(s, cmd):
289 '''filter string S through command CMD, returning its output'''
289 '''filter string S through command CMD, returning its output'''
290 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
290 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
291 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
291 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
292 pout, perr = p.communicate(s)
292 pout, perr = p.communicate(s)
293 return pout
293 return pout
294
294
295 def tempfilter(s, cmd):
295 def tempfilter(s, cmd):
296 '''filter string S through a pair of temporary files with CMD.
296 '''filter string S through a pair of temporary files with CMD.
297 CMD is used as a template to create the real command to be run,
297 CMD is used as a template to create the real command to be run,
298 with the strings INFILE and OUTFILE replaced by the real names of
298 with the strings INFILE and OUTFILE replaced by the real names of
299 the temporary files generated.'''
299 the temporary files generated.'''
300 inname, outname = None, None
300 inname, outname = None, None
301 try:
301 try:
302 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
302 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
303 fp = os.fdopen(infd, 'wb')
303 fp = os.fdopen(infd, 'wb')
304 fp.write(s)
304 fp.write(s)
305 fp.close()
305 fp.close()
306 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
306 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
307 os.close(outfd)
307 os.close(outfd)
308 cmd = cmd.replace('INFILE', inname)
308 cmd = cmd.replace('INFILE', inname)
309 cmd = cmd.replace('OUTFILE', outname)
309 cmd = cmd.replace('OUTFILE', outname)
310 code = os.system(cmd)
310 code = os.system(cmd)
311 if sys.platform == 'OpenVMS' and code & 1:
311 if sys.platform == 'OpenVMS' and code & 1:
312 code = 0
312 code = 0
313 if code:
313 if code:
314 raise Abort(_("command '%s' failed: %s") %
314 raise Abort(_("command '%s' failed: %s") %
315 (cmd, explainexit(code)))
315 (cmd, explainexit(code)))
316 fp = open(outname, 'rb')
316 fp = open(outname, 'rb')
317 r = fp.read()
317 r = fp.read()
318 fp.close()
318 fp.close()
319 return r
319 return r
320 finally:
320 finally:
321 try:
321 try:
322 if inname:
322 if inname:
323 os.unlink(inname)
323 os.unlink(inname)
324 except OSError:
324 except OSError:
325 pass
325 pass
326 try:
326 try:
327 if outname:
327 if outname:
328 os.unlink(outname)
328 os.unlink(outname)
329 except OSError:
329 except OSError:
330 pass
330 pass
331
331
332 filtertable = {
332 filtertable = {
333 'tempfile:': tempfilter,
333 'tempfile:': tempfilter,
334 'pipe:': pipefilter,
334 'pipe:': pipefilter,
335 }
335 }
336
336
337 def filter(s, cmd):
337 def filter(s, cmd):
338 "filter a string through a command that transforms its input to its output"
338 "filter a string through a command that transforms its input to its output"
339 for name, fn in filtertable.iteritems():
339 for name, fn in filtertable.iteritems():
340 if cmd.startswith(name):
340 if cmd.startswith(name):
341 return fn(s, cmd[len(name):].lstrip())
341 return fn(s, cmd[len(name):].lstrip())
342 return pipefilter(s, cmd)
342 return pipefilter(s, cmd)
343
343
344 def binary(s):
344 def binary(s):
345 """return true if a string is binary data"""
345 """return true if a string is binary data"""
346 return bool(s and '\0' in s)
346 return bool(s and '\0' in s)
347
347
348 def increasingchunks(source, min=1024, max=65536):
348 def increasingchunks(source, min=1024, max=65536):
349 '''return no less than min bytes per chunk while data remains,
349 '''return no less than min bytes per chunk while data remains,
350 doubling min after each chunk until it reaches max'''
350 doubling min after each chunk until it reaches max'''
351 def log2(x):
351 def log2(x):
352 if not x:
352 if not x:
353 return 0
353 return 0
354 i = 0
354 i = 0
355 while x:
355 while x:
356 x >>= 1
356 x >>= 1
357 i += 1
357 i += 1
358 return i - 1
358 return i - 1
359
359
360 buf = []
360 buf = []
361 blen = 0
361 blen = 0
362 for chunk in source:
362 for chunk in source:
363 buf.append(chunk)
363 buf.append(chunk)
364 blen += len(chunk)
364 blen += len(chunk)
365 if blen >= min:
365 if blen >= min:
366 if min < max:
366 if min < max:
367 min = min << 1
367 min = min << 1
368 nmin = 1 << log2(blen)
368 nmin = 1 << log2(blen)
369 if nmin > min:
369 if nmin > min:
370 min = nmin
370 min = nmin
371 if min > max:
371 if min > max:
372 min = max
372 min = max
373 yield ''.join(buf)
373 yield ''.join(buf)
374 blen = 0
374 blen = 0
375 buf = []
375 buf = []
376 if buf:
376 if buf:
377 yield ''.join(buf)
377 yield ''.join(buf)
378
378
379 Abort = error.Abort
379 Abort = error.Abort
380
380
381 def always(fn):
381 def always(fn):
382 return True
382 return True
383
383
384 def never(fn):
384 def never(fn):
385 return False
385 return False
386
386
387 def pathto(root, n1, n2):
387 def pathto(root, n1, n2):
388 '''return the relative path from one place to another.
388 '''return the relative path from one place to another.
389 root should use os.sep to separate directories
389 root should use os.sep to separate directories
390 n1 should use os.sep to separate directories
390 n1 should use os.sep to separate directories
391 n2 should use "/" to separate directories
391 n2 should use "/" to separate directories
392 returns an os.sep-separated path.
392 returns an os.sep-separated path.
393
393
394 If n1 is a relative path, it's assumed it's
394 If n1 is a relative path, it's assumed it's
395 relative to root.
395 relative to root.
396 n2 should always be relative to root.
396 n2 should always be relative to root.
397 '''
397 '''
398 if not n1:
398 if not n1:
399 return localpath(n2)
399 return localpath(n2)
400 if os.path.isabs(n1):
400 if os.path.isabs(n1):
401 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
401 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
402 return os.path.join(root, localpath(n2))
402 return os.path.join(root, localpath(n2))
403 n2 = '/'.join((pconvert(root), n2))
403 n2 = '/'.join((pconvert(root), n2))
404 a, b = splitpath(n1), n2.split('/')
404 a, b = splitpath(n1), n2.split('/')
405 a.reverse()
405 a.reverse()
406 b.reverse()
406 b.reverse()
407 while a and b and a[-1] == b[-1]:
407 while a and b and a[-1] == b[-1]:
408 a.pop()
408 a.pop()
409 b.pop()
409 b.pop()
410 b.reverse()
410 b.reverse()
411 return os.sep.join((['..'] * len(a)) + b) or '.'
411 return os.sep.join((['..'] * len(a)) + b) or '.'
412
412
413 _hgexecutable = None
413 _hgexecutable = None
414
414
415 def mainfrozen():
415 def mainfrozen():
416 """return True if we are a frozen executable.
416 """return True if we are a frozen executable.
417
417
418 The code supports py2exe (most common, Windows only) and tools/freeze
418 The code supports py2exe (most common, Windows only) and tools/freeze
419 (portable, not much used).
419 (portable, not much used).
420 """
420 """
421 return (safehasattr(sys, "frozen") or # new py2exe
421 return (safehasattr(sys, "frozen") or # new py2exe
422 safehasattr(sys, "importers") or # old py2exe
422 safehasattr(sys, "importers") or # old py2exe
423 imp.is_frozen("__main__")) # tools/freeze
423 imp.is_frozen("__main__")) # tools/freeze
424
424
425 def hgexecutable():
425 def hgexecutable():
426 """return location of the 'hg' executable.
426 """return location of the 'hg' executable.
427
427
428 Defaults to $HG or 'hg' in the search path.
428 Defaults to $HG or 'hg' in the search path.
429 """
429 """
430 if _hgexecutable is None:
430 if _hgexecutable is None:
431 hg = os.environ.get('HG')
431 hg = os.environ.get('HG')
432 mainmod = sys.modules['__main__']
432 mainmod = sys.modules['__main__']
433 if hg:
433 if hg:
434 _sethgexecutable(hg)
434 _sethgexecutable(hg)
435 elif mainfrozen():
435 elif mainfrozen():
436 _sethgexecutable(sys.executable)
436 _sethgexecutable(sys.executable)
437 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
437 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
438 _sethgexecutable(mainmod.__file__)
438 _sethgexecutable(mainmod.__file__)
439 else:
439 else:
440 exe = findexe('hg') or os.path.basename(sys.argv[0])
440 exe = findexe('hg') or os.path.basename(sys.argv[0])
441 _sethgexecutable(exe)
441 _sethgexecutable(exe)
442 return _hgexecutable
442 return _hgexecutable
443
443
444 def _sethgexecutable(path):
444 def _sethgexecutable(path):
445 """set location of the 'hg' executable"""
445 """set location of the 'hg' executable"""
446 global _hgexecutable
446 global _hgexecutable
447 _hgexecutable = path
447 _hgexecutable = path
448
448
449 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
449 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
450 '''enhanced shell command execution.
450 '''enhanced shell command execution.
451 run with environment maybe modified, maybe in different dir.
451 run with environment maybe modified, maybe in different dir.
452
452
453 if command fails and onerr is None, return status. if ui object,
453 if command fails and onerr is None, return status. if ui object,
454 print error message and return status, else raise onerr object as
454 print error message and return status, else raise onerr object as
455 exception.
455 exception.
456
456
457 if out is specified, it is assumed to be a file-like object that has a
457 if out is specified, it is assumed to be a file-like object that has a
458 write() method. stdout and stderr will be redirected to out.'''
458 write() method. stdout and stderr will be redirected to out.'''
459 try:
459 try:
460 sys.stdout.flush()
460 sys.stdout.flush()
461 except Exception:
461 except Exception:
462 pass
462 pass
463 def py2shell(val):
463 def py2shell(val):
464 'convert python object into string that is useful to shell'
464 'convert python object into string that is useful to shell'
465 if val is None or val is False:
465 if val is None or val is False:
466 return '0'
466 return '0'
467 if val is True:
467 if val is True:
468 return '1'
468 return '1'
469 return str(val)
469 return str(val)
470 origcmd = cmd
470 origcmd = cmd
471 cmd = quotecommand(cmd)
471 cmd = quotecommand(cmd)
472 if sys.platform == 'plan9' and (sys.version_info[0] == 2
472 if sys.platform == 'plan9' and (sys.version_info[0] == 2
473 and sys.version_info[1] < 7):
473 and sys.version_info[1] < 7):
474 # subprocess kludge to work around issues in half-baked Python
474 # subprocess kludge to work around issues in half-baked Python
475 # ports, notably bichued/python:
475 # ports, notably bichued/python:
476 if not cwd is None:
476 if not cwd is None:
477 os.chdir(cwd)
477 os.chdir(cwd)
478 rc = os.system(cmd)
478 rc = os.system(cmd)
479 else:
479 else:
480 env = dict(os.environ)
480 env = dict(os.environ)
481 env.update((k, py2shell(v)) for k, v in environ.iteritems())
481 env.update((k, py2shell(v)) for k, v in environ.iteritems())
482 env['HG'] = hgexecutable()
482 env['HG'] = hgexecutable()
483 if out is None or out == sys.__stdout__:
483 if out is None or out == sys.__stdout__:
484 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
484 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
485 env=env, cwd=cwd)
485 env=env, cwd=cwd)
486 else:
486 else:
487 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
487 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
488 env=env, cwd=cwd, stdout=subprocess.PIPE,
488 env=env, cwd=cwd, stdout=subprocess.PIPE,
489 stderr=subprocess.STDOUT)
489 stderr=subprocess.STDOUT)
490 for line in proc.stdout:
490 for line in proc.stdout:
491 out.write(line)
491 out.write(line)
492 proc.wait()
492 proc.wait()
493 rc = proc.returncode
493 rc = proc.returncode
494 if sys.platform == 'OpenVMS' and rc & 1:
494 if sys.platform == 'OpenVMS' and rc & 1:
495 rc = 0
495 rc = 0
496 if rc and onerr:
496 if rc and onerr:
497 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
497 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
498 explainexit(rc)[0])
498 explainexit(rc)[0])
499 if errprefix:
499 if errprefix:
500 errmsg = '%s: %s' % (errprefix, errmsg)
500 errmsg = '%s: %s' % (errprefix, errmsg)
501 try:
501 try:
502 onerr.warn(errmsg + '\n')
502 onerr.warn(errmsg + '\n')
503 except AttributeError:
503 except AttributeError:
504 raise onerr(errmsg)
504 raise onerr(errmsg)
505 return rc
505 return rc
506
506
507 def checksignature(func):
507 def checksignature(func):
508 '''wrap a function with code to check for calling errors'''
508 '''wrap a function with code to check for calling errors'''
509 def check(*args, **kwargs):
509 def check(*args, **kwargs):
510 try:
510 try:
511 return func(*args, **kwargs)
511 return func(*args, **kwargs)
512 except TypeError:
512 except TypeError:
513 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
513 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
514 raise error.SignatureError
514 raise error.SignatureError
515 raise
515 raise
516
516
517 return check
517 return check
518
518
519 def copyfile(src, dest):
519 def copyfile(src, dest):
520 "copy a file, preserving mode and atime/mtime"
520 "copy a file, preserving mode and atime/mtime"
521 if os.path.lexists(dest):
521 if os.path.lexists(dest):
522 unlink(dest)
522 unlink(dest)
523 if os.path.islink(src):
523 if os.path.islink(src):
524 os.symlink(os.readlink(src), dest)
524 os.symlink(os.readlink(src), dest)
525 else:
525 else:
526 try:
526 try:
527 shutil.copyfile(src, dest)
527 shutil.copyfile(src, dest)
528 shutil.copymode(src, dest)
528 shutil.copymode(src, dest)
529 except shutil.Error, inst:
529 except shutil.Error, inst:
530 raise Abort(str(inst))
530 raise Abort(str(inst))
531
531
532 def copyfiles(src, dst, hardlink=None):
532 def copyfiles(src, dst, hardlink=None):
533 """Copy a directory tree using hardlinks if possible"""
533 """Copy a directory tree using hardlinks if possible"""
534
534
535 if hardlink is None:
535 if hardlink is None:
536 hardlink = (os.stat(src).st_dev ==
536 hardlink = (os.stat(src).st_dev ==
537 os.stat(os.path.dirname(dst)).st_dev)
537 os.stat(os.path.dirname(dst)).st_dev)
538
538
539 num = 0
539 num = 0
540 if os.path.isdir(src):
540 if os.path.isdir(src):
541 os.mkdir(dst)
541 os.mkdir(dst)
542 for name, kind in osutil.listdir(src):
542 for name, kind in osutil.listdir(src):
543 srcname = os.path.join(src, name)
543 srcname = os.path.join(src, name)
544 dstname = os.path.join(dst, name)
544 dstname = os.path.join(dst, name)
545 hardlink, n = copyfiles(srcname, dstname, hardlink)
545 hardlink, n = copyfiles(srcname, dstname, hardlink)
546 num += n
546 num += n
547 else:
547 else:
548 if hardlink:
548 if hardlink:
549 try:
549 try:
550 oslink(src, dst)
550 oslink(src, dst)
551 except (IOError, OSError):
551 except (IOError, OSError):
552 hardlink = False
552 hardlink = False
553 shutil.copy(src, dst)
553 shutil.copy(src, dst)
554 else:
554 else:
555 shutil.copy(src, dst)
555 shutil.copy(src, dst)
556 num += 1
556 num += 1
557
557
558 return hardlink, num
558 return hardlink, num
559
559
560 _winreservednames = '''con prn aux nul
560 _winreservednames = '''con prn aux nul
561 com1 com2 com3 com4 com5 com6 com7 com8 com9
561 com1 com2 com3 com4 com5 com6 com7 com8 com9
562 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
562 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
563 _winreservedchars = ':*?"<>|'
563 _winreservedchars = ':*?"<>|'
564 def checkwinfilename(path):
564 def checkwinfilename(path):
565 r'''Check that the base-relative path is a valid filename on Windows.
565 r'''Check that the base-relative path is a valid filename on Windows.
566 Returns None if the path is ok, or a UI string describing the problem.
566 Returns None if the path is ok, or a UI string describing the problem.
567
567
568 >>> checkwinfilename("just/a/normal/path")
568 >>> checkwinfilename("just/a/normal/path")
569 >>> checkwinfilename("foo/bar/con.xml")
569 >>> checkwinfilename("foo/bar/con.xml")
570 "filename contains 'con', which is reserved on Windows"
570 "filename contains 'con', which is reserved on Windows"
571 >>> checkwinfilename("foo/con.xml/bar")
571 >>> checkwinfilename("foo/con.xml/bar")
572 "filename contains 'con', which is reserved on Windows"
572 "filename contains 'con', which is reserved on Windows"
573 >>> checkwinfilename("foo/bar/xml.con")
573 >>> checkwinfilename("foo/bar/xml.con")
574 >>> checkwinfilename("foo/bar/AUX/bla.txt")
574 >>> checkwinfilename("foo/bar/AUX/bla.txt")
575 "filename contains 'AUX', which is reserved on Windows"
575 "filename contains 'AUX', which is reserved on Windows"
576 >>> checkwinfilename("foo/bar/bla:.txt")
576 >>> checkwinfilename("foo/bar/bla:.txt")
577 "filename contains ':', which is reserved on Windows"
577 "filename contains ':', which is reserved on Windows"
578 >>> checkwinfilename("foo/bar/b\07la.txt")
578 >>> checkwinfilename("foo/bar/b\07la.txt")
579 "filename contains '\\x07', which is invalid on Windows"
579 "filename contains '\\x07', which is invalid on Windows"
580 >>> checkwinfilename("foo/bar/bla ")
580 >>> checkwinfilename("foo/bar/bla ")
581 "filename ends with ' ', which is not allowed on Windows"
581 "filename ends with ' ', which is not allowed on Windows"
582 >>> checkwinfilename("../bar")
582 >>> checkwinfilename("../bar")
583 >>> checkwinfilename("foo\\")
583 >>> checkwinfilename("foo\\")
584 "filename ends with '\\', which is invalid on Windows"
584 "filename ends with '\\', which is invalid on Windows"
585 >>> checkwinfilename("foo\\/bar")
585 >>> checkwinfilename("foo\\/bar")
586 "directory name ends with '\\', which is invalid on Windows"
586 "directory name ends with '\\', which is invalid on Windows"
587 '''
587 '''
588 if path.endswith('\\'):
588 if path.endswith('\\'):
589 return _("filename ends with '\\', which is invalid on Windows")
589 return _("filename ends with '\\', which is invalid on Windows")
590 if '\\/' in path:
590 if '\\/' in path:
591 return _("directory name ends with '\\', which is invalid on Windows")
591 return _("directory name ends with '\\', which is invalid on Windows")
592 for n in path.replace('\\', '/').split('/'):
592 for n in path.replace('\\', '/').split('/'):
593 if not n:
593 if not n:
594 continue
594 continue
595 for c in n:
595 for c in n:
596 if c in _winreservedchars:
596 if c in _winreservedchars:
597 return _("filename contains '%s', which is reserved "
597 return _("filename contains '%s', which is reserved "
598 "on Windows") % c
598 "on Windows") % c
599 if ord(c) <= 31:
599 if ord(c) <= 31:
600 return _("filename contains %r, which is invalid "
600 return _("filename contains %r, which is invalid "
601 "on Windows") % c
601 "on Windows") % c
602 base = n.split('.')[0]
602 base = n.split('.')[0]
603 if base and base.lower() in _winreservednames:
603 if base and base.lower() in _winreservednames:
604 return _("filename contains '%s', which is reserved "
604 return _("filename contains '%s', which is reserved "
605 "on Windows") % base
605 "on Windows") % base
606 t = n[-1]
606 t = n[-1]
607 if t in '. ' and n not in '..':
607 if t in '. ' and n not in '..':
608 return _("filename ends with '%s', which is not allowed "
608 return _("filename ends with '%s', which is not allowed "
609 "on Windows") % t
609 "on Windows") % t
610
610
611 if os.name == 'nt':
611 if os.name == 'nt':
612 checkosfilename = checkwinfilename
612 checkosfilename = checkwinfilename
613 else:
613 else:
614 checkosfilename = platform.checkosfilename
614 checkosfilename = platform.checkosfilename
615
615
616 def makelock(info, pathname):
616 def makelock(info, pathname):
617 try:
617 try:
618 return os.symlink(info, pathname)
618 return os.symlink(info, pathname)
619 except OSError, why:
619 except OSError, why:
620 if why.errno == errno.EEXIST:
620 if why.errno == errno.EEXIST:
621 raise
621 raise
622 except AttributeError: # no symlink in os
622 except AttributeError: # no symlink in os
623 pass
623 pass
624
624
625 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
625 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
626 os.write(ld, info)
626 os.write(ld, info)
627 os.close(ld)
627 os.close(ld)
628
628
629 def readlock(pathname):
629 def readlock(pathname):
630 try:
630 try:
631 return os.readlink(pathname)
631 return os.readlink(pathname)
632 except OSError, why:
632 except OSError, why:
633 if why.errno not in (errno.EINVAL, errno.ENOSYS):
633 if why.errno not in (errno.EINVAL, errno.ENOSYS):
634 raise
634 raise
635 except AttributeError: # no symlink in os
635 except AttributeError: # no symlink in os
636 pass
636 pass
637 fp = posixfile(pathname)
637 fp = posixfile(pathname)
638 r = fp.read()
638 r = fp.read()
639 fp.close()
639 fp.close()
640 return r
640 return r
641
641
642 def fstat(fp):
642 def fstat(fp):
643 '''stat file object that may not have fileno method.'''
643 '''stat file object that may not have fileno method.'''
644 try:
644 try:
645 return os.fstat(fp.fileno())
645 return os.fstat(fp.fileno())
646 except AttributeError:
646 except AttributeError:
647 return os.stat(fp.name)
647 return os.stat(fp.name)
648
648
649 # File system features
649 # File system features
650
650
651 def checkcase(path):
651 def checkcase(path):
652 """
652 """
653 Return true if the given path is on a case-sensitive filesystem
653 Return true if the given path is on a case-sensitive filesystem
654
654
655 Requires a path (like /foo/.hg) ending with a foldable final
655 Requires a path (like /foo/.hg) ending with a foldable final
656 directory component.
656 directory component.
657 """
657 """
658 s1 = os.stat(path)
658 s1 = os.stat(path)
659 d, b = os.path.split(path)
659 d, b = os.path.split(path)
660 b2 = b.upper()
660 b2 = b.upper()
661 if b == b2:
661 if b == b2:
662 b2 = b.lower()
662 b2 = b.lower()
663 if b == b2:
663 if b == b2:
664 return True # no evidence against case sensitivity
664 return True # no evidence against case sensitivity
665 p2 = os.path.join(d, b2)
665 p2 = os.path.join(d, b2)
666 try:
666 try:
667 s2 = os.stat(p2)
667 s2 = os.stat(p2)
668 if s2 == s1:
668 if s2 == s1:
669 return False
669 return False
670 return True
670 return True
671 except OSError:
671 except OSError:
672 return True
672 return True
673
673
674 try:
674 try:
675 import re2
675 import re2
676 _re2 = None
676 _re2 = None
677 except ImportError:
677 except ImportError:
678 _re2 = False
678 _re2 = False
679
679
680 def compilere(pat, flags=0):
680 def compilere(pat, flags=0):
681 '''Compile a regular expression, using re2 if possible
681 '''Compile a regular expression, using re2 if possible
682
682
683 For best performance, use only re2-compatible regexp features. The
683 For best performance, use only re2-compatible regexp features. The
684 only flags from the re module that are re2-compatible are
684 only flags from the re module that are re2-compatible are
685 IGNORECASE and MULTILINE.'''
685 IGNORECASE and MULTILINE.'''
686 global _re2
686 global _re2
687 if _re2 is None:
687 if _re2 is None:
688 try:
688 try:
689 # check if match works, see issue3964
689 # check if match works, see issue3964
690 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
690 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
691 except ImportError:
691 except ImportError:
692 _re2 = False
692 _re2 = False
693 if _re2 and (flags & ~(re.IGNORECASE | re.MULTILINE)) == 0:
693 if _re2 and (flags & ~(re.IGNORECASE | re.MULTILINE)) == 0:
694 if flags & re.IGNORECASE:
694 if flags & re.IGNORECASE:
695 pat = '(?i)' + pat
695 pat = '(?i)' + pat
696 if flags & re.MULTILINE:
696 if flags & re.MULTILINE:
697 pat = '(?m)' + pat
697 pat = '(?m)' + pat
698 try:
698 try:
699 return re2.compile(pat)
699 return re2.compile(pat)
700 except re2.error:
700 except re2.error:
701 pass
701 pass
702 return re.compile(pat, flags)
702 return re.compile(pat, flags)
703
703
704 _fspathcache = {}
704 _fspathcache = {}
705 def fspath(name, root):
705 def fspath(name, root):
706 '''Get name in the case stored in the filesystem
706 '''Get name in the case stored in the filesystem
707
707
708 The name should be relative to root, and be normcase-ed for efficiency.
708 The name should be relative to root, and be normcase-ed for efficiency.
709
709
710 Note that this function is unnecessary, and should not be
710 Note that this function is unnecessary, and should not be
711 called, for case-sensitive filesystems (simply because it's expensive).
711 called, for case-sensitive filesystems (simply because it's expensive).
712
712
713 The root should be normcase-ed, too.
713 The root should be normcase-ed, too.
714 '''
714 '''
715 def find(p, contents):
715 def find(p, contents):
716 for n in contents:
716 for n in contents:
717 if normcase(n) == p:
717 if normcase(n) == p:
718 return n
718 return n
719 return None
719 return None
720
720
721 seps = os.sep
721 seps = os.sep
722 if os.altsep:
722 if os.altsep:
723 seps = seps + os.altsep
723 seps = seps + os.altsep
724 # Protect backslashes. This gets silly very quickly.
724 # Protect backslashes. This gets silly very quickly.
725 seps.replace('\\','\\\\')
725 seps.replace('\\','\\\\')
726 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
726 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
727 dir = os.path.normpath(root)
727 dir = os.path.normpath(root)
728 result = []
728 result = []
729 for part, sep in pattern.findall(name):
729 for part, sep in pattern.findall(name):
730 if sep:
730 if sep:
731 result.append(sep)
731 result.append(sep)
732 continue
732 continue
733
733
734 if dir not in _fspathcache:
734 if dir not in _fspathcache:
735 _fspathcache[dir] = os.listdir(dir)
735 _fspathcache[dir] = os.listdir(dir)
736 contents = _fspathcache[dir]
736 contents = _fspathcache[dir]
737
737
738 found = find(part, contents)
738 found = find(part, contents)
739 if not found:
739 if not found:
740 # retry "once per directory" per "dirstate.walk" which
740 # retry "once per directory" per "dirstate.walk" which
741 # may take place for each patches of "hg qpush", for example
741 # may take place for each patches of "hg qpush", for example
742 contents = os.listdir(dir)
742 contents = os.listdir(dir)
743 _fspathcache[dir] = contents
743 _fspathcache[dir] = contents
744 found = find(part, contents)
744 found = find(part, contents)
745
745
746 result.append(found or part)
746 result.append(found or part)
747 dir = os.path.join(dir, part)
747 dir = os.path.join(dir, part)
748
748
749 return ''.join(result)
749 return ''.join(result)
750
750
751 def checknlink(testfile):
751 def checknlink(testfile):
752 '''check whether hardlink count reporting works properly'''
752 '''check whether hardlink count reporting works properly'''
753
753
754 # testfile may be open, so we need a separate file for checking to
754 # testfile may be open, so we need a separate file for checking to
755 # work around issue2543 (or testfile may get lost on Samba shares)
755 # work around issue2543 (or testfile may get lost on Samba shares)
756 f1 = testfile + ".hgtmp1"
756 f1 = testfile + ".hgtmp1"
757 if os.path.lexists(f1):
757 if os.path.lexists(f1):
758 return False
758 return False
759 try:
759 try:
760 posixfile(f1, 'w').close()
760 posixfile(f1, 'w').close()
761 except IOError:
761 except IOError:
762 return False
762 return False
763
763
764 f2 = testfile + ".hgtmp2"
764 f2 = testfile + ".hgtmp2"
765 fd = None
765 fd = None
766 try:
766 try:
767 try:
767 try:
768 oslink(f1, f2)
768 oslink(f1, f2)
769 except OSError:
769 except OSError:
770 return False
770 return False
771
771
772 # nlinks() may behave differently for files on Windows shares if
772 # nlinks() may behave differently for files on Windows shares if
773 # the file is open.
773 # the file is open.
774 fd = posixfile(f2)
774 fd = posixfile(f2)
775 return nlinks(f2) > 1
775 return nlinks(f2) > 1
776 finally:
776 finally:
777 if fd is not None:
777 if fd is not None:
778 fd.close()
778 fd.close()
779 for f in (f1, f2):
779 for f in (f1, f2):
780 try:
780 try:
781 os.unlink(f)
781 os.unlink(f)
782 except OSError:
782 except OSError:
783 pass
783 pass
784
784
785 def endswithsep(path):
785 def endswithsep(path):
786 '''Check path ends with os.sep or os.altsep.'''
786 '''Check path ends with os.sep or os.altsep.'''
787 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
787 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
788
788
789 def splitpath(path):
789 def splitpath(path):
790 '''Split path by os.sep.
790 '''Split path by os.sep.
791 Note that this function does not use os.altsep because this is
791 Note that this function does not use os.altsep because this is
792 an alternative of simple "xxx.split(os.sep)".
792 an alternative of simple "xxx.split(os.sep)".
793 It is recommended to use os.path.normpath() before using this
793 It is recommended to use os.path.normpath() before using this
794 function if need.'''
794 function if need.'''
795 return path.split(os.sep)
795 return path.split(os.sep)
796
796
797 def gui():
797 def gui():
798 '''Are we running in a GUI?'''
798 '''Are we running in a GUI?'''
799 if sys.platform == 'darwin':
799 if sys.platform == 'darwin':
800 if 'SSH_CONNECTION' in os.environ:
800 if 'SSH_CONNECTION' in os.environ:
801 # handle SSH access to a box where the user is logged in
801 # handle SSH access to a box where the user is logged in
802 return False
802 return False
803 elif getattr(osutil, 'isgui', None):
803 elif getattr(osutil, 'isgui', None):
804 # check if a CoreGraphics session is available
804 # check if a CoreGraphics session is available
805 return osutil.isgui()
805 return osutil.isgui()
806 else:
806 else:
807 # pure build; use a safe default
807 # pure build; use a safe default
808 return True
808 return True
809 else:
809 else:
810 return os.name == "nt" or os.environ.get("DISPLAY")
810 return os.name == "nt" or os.environ.get("DISPLAY")
811
811
812 def mktempcopy(name, emptyok=False, createmode=None):
812 def mktempcopy(name, emptyok=False, createmode=None):
813 """Create a temporary file with the same contents from name
813 """Create a temporary file with the same contents from name
814
814
815 The permission bits are copied from the original file.
815 The permission bits are copied from the original file.
816
816
817 If the temporary file is going to be truncated immediately, you
817 If the temporary file is going to be truncated immediately, you
818 can use emptyok=True as an optimization.
818 can use emptyok=True as an optimization.
819
819
820 Returns the name of the temporary file.
820 Returns the name of the temporary file.
821 """
821 """
822 d, fn = os.path.split(name)
822 d, fn = os.path.split(name)
823 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
823 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
824 os.close(fd)
824 os.close(fd)
825 # Temporary files are created with mode 0600, which is usually not
825 # Temporary files are created with mode 0600, which is usually not
826 # what we want. If the original file already exists, just copy
826 # what we want. If the original file already exists, just copy
827 # its mode. Otherwise, manually obey umask.
827 # its mode. Otherwise, manually obey umask.
828 copymode(name, temp, createmode)
828 copymode(name, temp, createmode)
829 if emptyok:
829 if emptyok:
830 return temp
830 return temp
831 try:
831 try:
832 try:
832 try:
833 ifp = posixfile(name, "rb")
833 ifp = posixfile(name, "rb")
834 except IOError, inst:
834 except IOError, inst:
835 if inst.errno == errno.ENOENT:
835 if inst.errno == errno.ENOENT:
836 return temp
836 return temp
837 if not getattr(inst, 'filename', None):
837 if not getattr(inst, 'filename', None):
838 inst.filename = name
838 inst.filename = name
839 raise
839 raise
840 ofp = posixfile(temp, "wb")
840 ofp = posixfile(temp, "wb")
841 for chunk in filechunkiter(ifp):
841 for chunk in filechunkiter(ifp):
842 ofp.write(chunk)
842 ofp.write(chunk)
843 ifp.close()
843 ifp.close()
844 ofp.close()
844 ofp.close()
845 except: # re-raises
845 except: # re-raises
846 try: os.unlink(temp)
846 try: os.unlink(temp)
847 except OSError: pass
847 except OSError: pass
848 raise
848 raise
849 return temp
849 return temp
850
850
851 class atomictempfile(object):
851 class atomictempfile(object):
852 '''writable file object that atomically updates a file
852 '''writable file object that atomically updates a file
853
853
854 All writes will go to a temporary copy of the original file. Call
854 All writes will go to a temporary copy of the original file. Call
855 close() when you are done writing, and atomictempfile will rename
855 close() when you are done writing, and atomictempfile will rename
856 the temporary copy to the original name, making the changes
856 the temporary copy to the original name, making the changes
857 visible. If the object is destroyed without being closed, all your
857 visible. If the object is destroyed without being closed, all your
858 writes are discarded.
858 writes are discarded.
859 '''
859 '''
860 def __init__(self, name, mode='w+b', createmode=None):
860 def __init__(self, name, mode='w+b', createmode=None):
861 self.__name = name # permanent name
861 self.__name = name # permanent name
862 self._tempname = mktempcopy(name, emptyok=('w' in mode),
862 self._tempname = mktempcopy(name, emptyok=('w' in mode),
863 createmode=createmode)
863 createmode=createmode)
864 self._fp = posixfile(self._tempname, mode)
864 self._fp = posixfile(self._tempname, mode)
865
865
866 # delegated methods
866 # delegated methods
867 self.write = self._fp.write
867 self.write = self._fp.write
868 self.seek = self._fp.seek
868 self.seek = self._fp.seek
869 self.tell = self._fp.tell
869 self.tell = self._fp.tell
870 self.fileno = self._fp.fileno
870 self.fileno = self._fp.fileno
871
871
872 def close(self):
872 def close(self):
873 if not self._fp.closed:
873 if not self._fp.closed:
874 self._fp.close()
874 self._fp.close()
875 rename(self._tempname, localpath(self.__name))
875 rename(self._tempname, localpath(self.__name))
876
876
877 def discard(self):
877 def discard(self):
878 if not self._fp.closed:
878 if not self._fp.closed:
879 try:
879 try:
880 os.unlink(self._tempname)
880 os.unlink(self._tempname)
881 except OSError:
881 except OSError:
882 pass
882 pass
883 self._fp.close()
883 self._fp.close()
884
884
885 def __del__(self):
885 def __del__(self):
886 if safehasattr(self, '_fp'): # constructor actually did something
886 if safehasattr(self, '_fp'): # constructor actually did something
887 self.discard()
887 self.discard()
888
888
889 def makedirs(name, mode=None, notindexed=False):
889 def makedirs(name, mode=None, notindexed=False):
890 """recursive directory creation with parent mode inheritance"""
890 """recursive directory creation with parent mode inheritance"""
891 try:
891 try:
892 makedir(name, notindexed)
892 makedir(name, notindexed)
893 except OSError, err:
893 except OSError, err:
894 if err.errno == errno.EEXIST:
894 if err.errno == errno.EEXIST:
895 return
895 return
896 if err.errno != errno.ENOENT or not name:
896 if err.errno != errno.ENOENT or not name:
897 raise
897 raise
898 parent = os.path.dirname(os.path.abspath(name))
898 parent = os.path.dirname(os.path.abspath(name))
899 if parent == name:
899 if parent == name:
900 raise
900 raise
901 makedirs(parent, mode, notindexed)
901 makedirs(parent, mode, notindexed)
902 makedir(name, notindexed)
902 makedir(name, notindexed)
903 if mode is not None:
903 if mode is not None:
904 os.chmod(name, mode)
904 os.chmod(name, mode)
905
905
906 def ensuredirs(name, mode=None):
906 def ensuredirs(name, mode=None):
907 """race-safe recursive directory creation"""
907 """race-safe recursive directory creation"""
908 if os.path.isdir(name):
908 if os.path.isdir(name):
909 return
909 return
910 parent = os.path.dirname(os.path.abspath(name))
910 parent = os.path.dirname(os.path.abspath(name))
911 if parent != name:
911 if parent != name:
912 ensuredirs(parent, mode)
912 ensuredirs(parent, mode)
913 try:
913 try:
914 os.mkdir(name)
914 os.mkdir(name)
915 except OSError, err:
915 except OSError, err:
916 if err.errno == errno.EEXIST and os.path.isdir(name):
916 if err.errno == errno.EEXIST and os.path.isdir(name):
917 # someone else seems to have won a directory creation race
917 # someone else seems to have won a directory creation race
918 return
918 return
919 raise
919 raise
920 if mode is not None:
920 if mode is not None:
921 os.chmod(name, mode)
921 os.chmod(name, mode)
922
922
923 def readfile(path):
923 def readfile(path):
924 fp = open(path, 'rb')
924 fp = open(path, 'rb')
925 try:
925 try:
926 return fp.read()
926 return fp.read()
927 finally:
927 finally:
928 fp.close()
928 fp.close()
929
929
930 def writefile(path, text):
930 def writefile(path, text):
931 fp = open(path, 'wb')
931 fp = open(path, 'wb')
932 try:
932 try:
933 fp.write(text)
933 fp.write(text)
934 finally:
934 finally:
935 fp.close()
935 fp.close()
936
936
937 def appendfile(path, text):
937 def appendfile(path, text):
938 fp = open(path, 'ab')
938 fp = open(path, 'ab')
939 try:
939 try:
940 fp.write(text)
940 fp.write(text)
941 finally:
941 finally:
942 fp.close()
942 fp.close()
943
943
944 class chunkbuffer(object):
944 class chunkbuffer(object):
945 """Allow arbitrary sized chunks of data to be efficiently read from an
945 """Allow arbitrary sized chunks of data to be efficiently read from an
946 iterator over chunks of arbitrary size."""
946 iterator over chunks of arbitrary size."""
947
947
948 def __init__(self, in_iter):
948 def __init__(self, in_iter):
949 """in_iter is the iterator that's iterating over the input chunks.
949 """in_iter is the iterator that's iterating over the input chunks.
950 targetsize is how big a buffer to try to maintain."""
950 targetsize is how big a buffer to try to maintain."""
951 def splitbig(chunks):
951 def splitbig(chunks):
952 for chunk in chunks:
952 for chunk in chunks:
953 if len(chunk) > 2**20:
953 if len(chunk) > 2**20:
954 pos = 0
954 pos = 0
955 while pos < len(chunk):
955 while pos < len(chunk):
956 end = pos + 2 ** 18
956 end = pos + 2 ** 18
957 yield chunk[pos:end]
957 yield chunk[pos:end]
958 pos = end
958 pos = end
959 else:
959 else:
960 yield chunk
960 yield chunk
961 self.iter = splitbig(in_iter)
961 self.iter = splitbig(in_iter)
962 self._queue = deque()
962 self._queue = deque()
963
963
964 def read(self, l):
964 def read(self, l):
965 """Read L bytes of data from the iterator of chunks of data.
965 """Read L bytes of data from the iterator of chunks of data.
966 Returns less than L bytes if the iterator runs dry."""
966 Returns less than L bytes if the iterator runs dry."""
967 left = l
967 left = l
968 buf = []
968 buf = []
969 queue = self._queue
969 queue = self._queue
970 while left > 0:
970 while left > 0:
971 # refill the queue
971 # refill the queue
972 if not queue:
972 if not queue:
973 target = 2**18
973 target = 2**18
974 for chunk in self.iter:
974 for chunk in self.iter:
975 queue.append(chunk)
975 queue.append(chunk)
976 target -= len(chunk)
976 target -= len(chunk)
977 if target <= 0:
977 if target <= 0:
978 break
978 break
979 if not queue:
979 if not queue:
980 break
980 break
981
981
982 chunk = queue.popleft()
982 chunk = queue.popleft()
983 left -= len(chunk)
983 left -= len(chunk)
984 if left < 0:
984 if left < 0:
985 queue.appendleft(chunk[left:])
985 queue.appendleft(chunk[left:])
986 buf.append(chunk[:left])
986 buf.append(chunk[:left])
987 else:
987 else:
988 buf.append(chunk)
988 buf.append(chunk)
989
989
990 return ''.join(buf)
990 return ''.join(buf)
991
991
992 def filechunkiter(f, size=65536, limit=None):
992 def filechunkiter(f, size=65536, limit=None):
993 """Create a generator that produces the data in the file size
993 """Create a generator that produces the data in the file size
994 (default 65536) bytes at a time, up to optional limit (default is
994 (default 65536) bytes at a time, up to optional limit (default is
995 to read all data). Chunks may be less than size bytes if the
995 to read all data). Chunks may be less than size bytes if the
996 chunk is the last chunk in the file, or the file is a socket or
996 chunk is the last chunk in the file, or the file is a socket or
997 some other type of file that sometimes reads less data than is
997 some other type of file that sometimes reads less data than is
998 requested."""
998 requested."""
999 assert size >= 0
999 assert size >= 0
1000 assert limit is None or limit >= 0
1000 assert limit is None or limit >= 0
1001 while True:
1001 while True:
1002 if limit is None:
1002 if limit is None:
1003 nbytes = size
1003 nbytes = size
1004 else:
1004 else:
1005 nbytes = min(limit, size)
1005 nbytes = min(limit, size)
1006 s = nbytes and f.read(nbytes)
1006 s = nbytes and f.read(nbytes)
1007 if not s:
1007 if not s:
1008 break
1008 break
1009 if limit:
1009 if limit:
1010 limit -= len(s)
1010 limit -= len(s)
1011 yield s
1011 yield s
1012
1012
1013 def makedate(timestamp=None):
1013 def makedate(timestamp=None):
1014 '''Return a unix timestamp (or the current time) as a (unixtime,
1014 '''Return a unix timestamp (or the current time) as a (unixtime,
1015 offset) tuple based off the local timezone.'''
1015 offset) tuple based off the local timezone.'''
1016 if timestamp is None:
1016 if timestamp is None:
1017 timestamp = time.time()
1017 timestamp = time.time()
1018 if timestamp < 0:
1018 if timestamp < 0:
1019 hint = _("check your clock")
1019 hint = _("check your clock")
1020 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1020 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1021 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1021 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1022 datetime.datetime.fromtimestamp(timestamp))
1022 datetime.datetime.fromtimestamp(timestamp))
1023 tz = delta.days * 86400 + delta.seconds
1023 tz = delta.days * 86400 + delta.seconds
1024 return timestamp, tz
1024 return timestamp, tz
1025
1025
1026 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1026 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1027 """represent a (unixtime, offset) tuple as a localized time.
1027 """represent a (unixtime, offset) tuple as a localized time.
1028 unixtime is seconds since the epoch, and offset is the time zone's
1028 unixtime is seconds since the epoch, and offset is the time zone's
1029 number of seconds away from UTC. if timezone is false, do not
1029 number of seconds away from UTC. if timezone is false, do not
1030 append time zone to string."""
1030 append time zone to string."""
1031 t, tz = date or makedate()
1031 t, tz = date or makedate()
1032 if t < 0:
1032 if t < 0:
1033 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1033 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1034 tz = 0
1034 tz = 0
1035 if "%1" in format or "%2" in format or "%z" in format:
1035 if "%1" in format or "%2" in format or "%z" in format:
1036 sign = (tz > 0) and "-" or "+"
1036 sign = (tz > 0) and "-" or "+"
1037 minutes = abs(tz) // 60
1037 minutes = abs(tz) // 60
1038 format = format.replace("%z", "%1%2")
1038 format = format.replace("%z", "%1%2")
1039 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1039 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1040 format = format.replace("%2", "%02d" % (minutes % 60))
1040 format = format.replace("%2", "%02d" % (minutes % 60))
1041 try:
1041 try:
1042 t = time.gmtime(float(t) - tz)
1042 t = time.gmtime(float(t) - tz)
1043 except ValueError:
1043 except ValueError:
1044 # time was out of range
1044 # time was out of range
1045 t = time.gmtime(sys.maxint)
1045 t = time.gmtime(sys.maxint)
1046 s = time.strftime(format, t)
1046 s = time.strftime(format, t)
1047 return s
1047 return s
1048
1048
1049 def shortdate(date=None):
1049 def shortdate(date=None):
1050 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1050 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1051 return datestr(date, format='%Y-%m-%d')
1051 return datestr(date, format='%Y-%m-%d')
1052
1052
1053 def strdate(string, format, defaults=[]):
1053 def strdate(string, format, defaults=[]):
1054 """parse a localized time string and return a (unixtime, offset) tuple.
1054 """parse a localized time string and return a (unixtime, offset) tuple.
1055 if the string cannot be parsed, ValueError is raised."""
1055 if the string cannot be parsed, ValueError is raised."""
1056 def timezone(string):
1056 def timezone(string):
1057 tz = string.split()[-1]
1057 tz = string.split()[-1]
1058 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1058 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1059 sign = (tz[0] == "+") and 1 or -1
1059 sign = (tz[0] == "+") and 1 or -1
1060 hours = int(tz[1:3])
1060 hours = int(tz[1:3])
1061 minutes = int(tz[3:5])
1061 minutes = int(tz[3:5])
1062 return -sign * (hours * 60 + minutes) * 60
1062 return -sign * (hours * 60 + minutes) * 60
1063 if tz == "GMT" or tz == "UTC":
1063 if tz == "GMT" or tz == "UTC":
1064 return 0
1064 return 0
1065 return None
1065 return None
1066
1066
1067 # NOTE: unixtime = localunixtime + offset
1067 # NOTE: unixtime = localunixtime + offset
1068 offset, date = timezone(string), string
1068 offset, date = timezone(string), string
1069 if offset is not None:
1069 if offset is not None:
1070 date = " ".join(string.split()[:-1])
1070 date = " ".join(string.split()[:-1])
1071
1071
1072 # add missing elements from defaults
1072 # add missing elements from defaults
1073 usenow = False # default to using biased defaults
1073 usenow = False # default to using biased defaults
1074 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1074 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1075 found = [True for p in part if ("%"+p) in format]
1075 found = [True for p in part if ("%"+p) in format]
1076 if not found:
1076 if not found:
1077 date += "@" + defaults[part][usenow]
1077 date += "@" + defaults[part][usenow]
1078 format += "@%" + part[0]
1078 format += "@%" + part[0]
1079 else:
1079 else:
1080 # We've found a specific time element, less specific time
1080 # We've found a specific time element, less specific time
1081 # elements are relative to today
1081 # elements are relative to today
1082 usenow = True
1082 usenow = True
1083
1083
1084 timetuple = time.strptime(date, format)
1084 timetuple = time.strptime(date, format)
1085 localunixtime = int(calendar.timegm(timetuple))
1085 localunixtime = int(calendar.timegm(timetuple))
1086 if offset is None:
1086 if offset is None:
1087 # local timezone
1087 # local timezone
1088 unixtime = int(time.mktime(timetuple))
1088 unixtime = int(time.mktime(timetuple))
1089 offset = unixtime - localunixtime
1089 offset = unixtime - localunixtime
1090 else:
1090 else:
1091 unixtime = localunixtime + offset
1091 unixtime = localunixtime + offset
1092 return unixtime, offset
1092 return unixtime, offset
1093
1093
1094 def parsedate(date, formats=None, bias={}):
1094 def parsedate(date, formats=None, bias={}):
1095 """parse a localized date/time and return a (unixtime, offset) tuple.
1095 """parse a localized date/time and return a (unixtime, offset) tuple.
1096
1096
1097 The date may be a "unixtime offset" string or in one of the specified
1097 The date may be a "unixtime offset" string or in one of the specified
1098 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1098 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1099
1099
1100 >>> parsedate(' today ') == parsedate(\
1100 >>> parsedate(' today ') == parsedate(\
1101 datetime.date.today().strftime('%b %d'))
1101 datetime.date.today().strftime('%b %d'))
1102 True
1102 True
1103 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1103 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1104 datetime.timedelta(days=1)\
1104 datetime.timedelta(days=1)\
1105 ).strftime('%b %d'))
1105 ).strftime('%b %d'))
1106 True
1106 True
1107 >>> now, tz = makedate()
1107 >>> now, tz = makedate()
1108 >>> strnow, strtz = parsedate('now')
1108 >>> strnow, strtz = parsedate('now')
1109 >>> (strnow - now) < 1
1109 >>> (strnow - now) < 1
1110 True
1110 True
1111 >>> tz == strtz
1111 >>> tz == strtz
1112 True
1112 True
1113 """
1113 """
1114 if not date:
1114 if not date:
1115 return 0, 0
1115 return 0, 0
1116 if isinstance(date, tuple) and len(date) == 2:
1116 if isinstance(date, tuple) and len(date) == 2:
1117 return date
1117 return date
1118 if not formats:
1118 if not formats:
1119 formats = defaultdateformats
1119 formats = defaultdateformats
1120 date = date.strip()
1120 date = date.strip()
1121
1121
1122 if date == _('now'):
1122 if date == _('now'):
1123 return makedate()
1123 return makedate()
1124 if date == _('today'):
1124 if date == _('today'):
1125 date = datetime.date.today().strftime('%b %d')
1125 date = datetime.date.today().strftime('%b %d')
1126 elif date == _('yesterday'):
1126 elif date == _('yesterday'):
1127 date = (datetime.date.today() -
1127 date = (datetime.date.today() -
1128 datetime.timedelta(days=1)).strftime('%b %d')
1128 datetime.timedelta(days=1)).strftime('%b %d')
1129
1129
1130 try:
1130 try:
1131 when, offset = map(int, date.split(' '))
1131 when, offset = map(int, date.split(' '))
1132 except ValueError:
1132 except ValueError:
1133 # fill out defaults
1133 # fill out defaults
1134 now = makedate()
1134 now = makedate()
1135 defaults = {}
1135 defaults = {}
1136 for part in ("d", "mb", "yY", "HI", "M", "S"):
1136 for part in ("d", "mb", "yY", "HI", "M", "S"):
1137 # this piece is for rounding the specific end of unknowns
1137 # this piece is for rounding the specific end of unknowns
1138 b = bias.get(part)
1138 b = bias.get(part)
1139 if b is None:
1139 if b is None:
1140 if part[0] in "HMS":
1140 if part[0] in "HMS":
1141 b = "00"
1141 b = "00"
1142 else:
1142 else:
1143 b = "0"
1143 b = "0"
1144
1144
1145 # this piece is for matching the generic end to today's date
1145 # this piece is for matching the generic end to today's date
1146 n = datestr(now, "%" + part[0])
1146 n = datestr(now, "%" + part[0])
1147
1147
1148 defaults[part] = (b, n)
1148 defaults[part] = (b, n)
1149
1149
1150 for format in formats:
1150 for format in formats:
1151 try:
1151 try:
1152 when, offset = strdate(date, format, defaults)
1152 when, offset = strdate(date, format, defaults)
1153 except (ValueError, OverflowError):
1153 except (ValueError, OverflowError):
1154 pass
1154 pass
1155 else:
1155 else:
1156 break
1156 break
1157 else:
1157 else:
1158 raise Abort(_('invalid date: %r') % date)
1158 raise Abort(_('invalid date: %r') % date)
1159 # validate explicit (probably user-specified) date and
1159 # validate explicit (probably user-specified) date and
1160 # time zone offset. values must fit in signed 32 bits for
1160 # time zone offset. values must fit in signed 32 bits for
1161 # current 32-bit linux runtimes. timezones go from UTC-12
1161 # current 32-bit linux runtimes. timezones go from UTC-12
1162 # to UTC+14
1162 # to UTC+14
1163 if abs(when) > 0x7fffffff:
1163 if abs(when) > 0x7fffffff:
1164 raise Abort(_('date exceeds 32 bits: %d') % when)
1164 raise Abort(_('date exceeds 32 bits: %d') % when)
1165 if when < 0:
1165 if when < 0:
1166 raise Abort(_('negative date value: %d') % when)
1166 raise Abort(_('negative date value: %d') % when)
1167 if offset < -50400 or offset > 43200:
1167 if offset < -50400 or offset > 43200:
1168 raise Abort(_('impossible time zone offset: %d') % offset)
1168 raise Abort(_('impossible time zone offset: %d') % offset)
1169 return when, offset
1169 return when, offset
1170
1170
1171 def matchdate(date):
1171 def matchdate(date):
1172 """Return a function that matches a given date match specifier
1172 """Return a function that matches a given date match specifier
1173
1173
1174 Formats include:
1174 Formats include:
1175
1175
1176 '{date}' match a given date to the accuracy provided
1176 '{date}' match a given date to the accuracy provided
1177
1177
1178 '<{date}' on or before a given date
1178 '<{date}' on or before a given date
1179
1179
1180 '>{date}' on or after a given date
1180 '>{date}' on or after a given date
1181
1181
1182 >>> p1 = parsedate("10:29:59")
1182 >>> p1 = parsedate("10:29:59")
1183 >>> p2 = parsedate("10:30:00")
1183 >>> p2 = parsedate("10:30:00")
1184 >>> p3 = parsedate("10:30:59")
1184 >>> p3 = parsedate("10:30:59")
1185 >>> p4 = parsedate("10:31:00")
1185 >>> p4 = parsedate("10:31:00")
1186 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1186 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1187 >>> f = matchdate("10:30")
1187 >>> f = matchdate("10:30")
1188 >>> f(p1[0])
1188 >>> f(p1[0])
1189 False
1189 False
1190 >>> f(p2[0])
1190 >>> f(p2[0])
1191 True
1191 True
1192 >>> f(p3[0])
1192 >>> f(p3[0])
1193 True
1193 True
1194 >>> f(p4[0])
1194 >>> f(p4[0])
1195 False
1195 False
1196 >>> f(p5[0])
1196 >>> f(p5[0])
1197 False
1197 False
1198 """
1198 """
1199
1199
1200 def lower(date):
1200 def lower(date):
1201 d = dict(mb="1", d="1")
1201 d = dict(mb="1", d="1")
1202 return parsedate(date, extendeddateformats, d)[0]
1202 return parsedate(date, extendeddateformats, d)[0]
1203
1203
1204 def upper(date):
1204 def upper(date):
1205 d = dict(mb="12", HI="23", M="59", S="59")
1205 d = dict(mb="12", HI="23", M="59", S="59")
1206 for days in ("31", "30", "29"):
1206 for days in ("31", "30", "29"):
1207 try:
1207 try:
1208 d["d"] = days
1208 d["d"] = days
1209 return parsedate(date, extendeddateformats, d)[0]
1209 return parsedate(date, extendeddateformats, d)[0]
1210 except Abort:
1210 except Abort:
1211 pass
1211 pass
1212 d["d"] = "28"
1212 d["d"] = "28"
1213 return parsedate(date, extendeddateformats, d)[0]
1213 return parsedate(date, extendeddateformats, d)[0]
1214
1214
1215 date = date.strip()
1215 date = date.strip()
1216
1216
1217 if not date:
1217 if not date:
1218 raise Abort(_("dates cannot consist entirely of whitespace"))
1218 raise Abort(_("dates cannot consist entirely of whitespace"))
1219 elif date[0] == "<":
1219 elif date[0] == "<":
1220 if not date[1:]:
1220 if not date[1:]:
1221 raise Abort(_("invalid day spec, use '<DATE'"))
1221 raise Abort(_("invalid day spec, use '<DATE'"))
1222 when = upper(date[1:])
1222 when = upper(date[1:])
1223 return lambda x: x <= when
1223 return lambda x: x <= when
1224 elif date[0] == ">":
1224 elif date[0] == ">":
1225 if not date[1:]:
1225 if not date[1:]:
1226 raise Abort(_("invalid day spec, use '>DATE'"))
1226 raise Abort(_("invalid day spec, use '>DATE'"))
1227 when = lower(date[1:])
1227 when = lower(date[1:])
1228 return lambda x: x >= when
1228 return lambda x: x >= when
1229 elif date[0] == "-":
1229 elif date[0] == "-":
1230 try:
1230 try:
1231 days = int(date[1:])
1231 days = int(date[1:])
1232 except ValueError:
1232 except ValueError:
1233 raise Abort(_("invalid day spec: %s") % date[1:])
1233 raise Abort(_("invalid day spec: %s") % date[1:])
1234 if days < 0:
1234 if days < 0:
1235 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1235 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1236 % date[1:])
1236 % date[1:])
1237 when = makedate()[0] - days * 3600 * 24
1237 when = makedate()[0] - days * 3600 * 24
1238 return lambda x: x >= when
1238 return lambda x: x >= when
1239 elif " to " in date:
1239 elif " to " in date:
1240 a, b = date.split(" to ")
1240 a, b = date.split(" to ")
1241 start, stop = lower(a), upper(b)
1241 start, stop = lower(a), upper(b)
1242 return lambda x: x >= start and x <= stop
1242 return lambda x: x >= start and x <= stop
1243 else:
1243 else:
1244 start, stop = lower(date), upper(date)
1244 start, stop = lower(date), upper(date)
1245 return lambda x: x >= start and x <= stop
1245 return lambda x: x >= start and x <= stop
1246
1246
1247 def shortuser(user):
1247 def shortuser(user):
1248 """Return a short representation of a user name or email address."""
1248 """Return a short representation of a user name or email address."""
1249 f = user.find('@')
1249 f = user.find('@')
1250 if f >= 0:
1250 if f >= 0:
1251 user = user[:f]
1251 user = user[:f]
1252 f = user.find('<')
1252 f = user.find('<')
1253 if f >= 0:
1253 if f >= 0:
1254 user = user[f + 1:]
1254 user = user[f + 1:]
1255 f = user.find(' ')
1255 f = user.find(' ')
1256 if f >= 0:
1256 if f >= 0:
1257 user = user[:f]
1257 user = user[:f]
1258 f = user.find('.')
1258 f = user.find('.')
1259 if f >= 0:
1259 if f >= 0:
1260 user = user[:f]
1260 user = user[:f]
1261 return user
1261 return user
1262
1262
1263 def emailuser(user):
1263 def emailuser(user):
1264 """Return the user portion of an email address."""
1264 """Return the user portion of an email address."""
1265 f = user.find('@')
1265 f = user.find('@')
1266 if f >= 0:
1266 if f >= 0:
1267 user = user[:f]
1267 user = user[:f]
1268 f = user.find('<')
1268 f = user.find('<')
1269 if f >= 0:
1269 if f >= 0:
1270 user = user[f + 1:]
1270 user = user[f + 1:]
1271 return user
1271 return user
1272
1272
1273 def email(author):
1273 def email(author):
1274 '''get email of author.'''
1274 '''get email of author.'''
1275 r = author.find('>')
1275 r = author.find('>')
1276 if r == -1:
1276 if r == -1:
1277 r = None
1277 r = None
1278 return author[author.find('<') + 1:r]
1278 return author[author.find('<') + 1:r]
1279
1279
1280 def _ellipsis(text, maxlength):
1280 def _ellipsis(text, maxlength):
1281 if len(text) <= maxlength:
1281 if len(text) <= maxlength:
1282 return text, False
1282 return text, False
1283 else:
1283 else:
1284 return "%s..." % (text[:maxlength - 3]), True
1284 return "%s..." % (text[:maxlength - 3]), True
1285
1285
1286 def ellipsis(text, maxlength=400):
1286 def ellipsis(text, maxlength=400):
1287 """Trim string to at most maxlength (default: 400) characters."""
1287 """Trim string to at most maxlength (default: 400) characters."""
1288 try:
1288 try:
1289 # use unicode not to split at intermediate multi-byte sequence
1289 # use unicode not to split at intermediate multi-byte sequence
1290 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1290 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1291 maxlength)
1291 maxlength)
1292 if not truncated:
1292 if not truncated:
1293 return text
1293 return text
1294 return utext.encode(encoding.encoding)
1294 return utext.encode(encoding.encoding)
1295 except (UnicodeDecodeError, UnicodeEncodeError):
1295 except (UnicodeDecodeError, UnicodeEncodeError):
1296 return _ellipsis(text, maxlength)[0]
1296 return _ellipsis(text, maxlength)[0]
1297
1297
1298 def unitcountfn(*unittable):
1298 def unitcountfn(*unittable):
1299 '''return a function that renders a readable count of some quantity'''
1299 '''return a function that renders a readable count of some quantity'''
1300
1300
1301 def go(count):
1301 def go(count):
1302 for multiplier, divisor, format in unittable:
1302 for multiplier, divisor, format in unittable:
1303 if count >= divisor * multiplier:
1303 if count >= divisor * multiplier:
1304 return format % (count / float(divisor))
1304 return format % (count / float(divisor))
1305 return unittable[-1][2] % count
1305 return unittable[-1][2] % count
1306
1306
1307 return go
1307 return go
1308
1308
1309 bytecount = unitcountfn(
1309 bytecount = unitcountfn(
1310 (100, 1 << 30, _('%.0f GB')),
1310 (100, 1 << 30, _('%.0f GB')),
1311 (10, 1 << 30, _('%.1f GB')),
1311 (10, 1 << 30, _('%.1f GB')),
1312 (1, 1 << 30, _('%.2f GB')),
1312 (1, 1 << 30, _('%.2f GB')),
1313 (100, 1 << 20, _('%.0f MB')),
1313 (100, 1 << 20, _('%.0f MB')),
1314 (10, 1 << 20, _('%.1f MB')),
1314 (10, 1 << 20, _('%.1f MB')),
1315 (1, 1 << 20, _('%.2f MB')),
1315 (1, 1 << 20, _('%.2f MB')),
1316 (100, 1 << 10, _('%.0f KB')),
1316 (100, 1 << 10, _('%.0f KB')),
1317 (10, 1 << 10, _('%.1f KB')),
1317 (10, 1 << 10, _('%.1f KB')),
1318 (1, 1 << 10, _('%.2f KB')),
1318 (1, 1 << 10, _('%.2f KB')),
1319 (1, 1, _('%.0f bytes')),
1319 (1, 1, _('%.0f bytes')),
1320 )
1320 )
1321
1321
1322 def uirepr(s):
1322 def uirepr(s):
1323 # Avoid double backslash in Windows path repr()
1323 # Avoid double backslash in Windows path repr()
1324 return repr(s).replace('\\\\', '\\')
1324 return repr(s).replace('\\\\', '\\')
1325
1325
1326 # delay import of textwrap
1326 # delay import of textwrap
1327 def MBTextWrapper(**kwargs):
1327 def MBTextWrapper(**kwargs):
1328 class tw(textwrap.TextWrapper):
1328 class tw(textwrap.TextWrapper):
1329 """
1329 """
1330 Extend TextWrapper for width-awareness.
1330 Extend TextWrapper for width-awareness.
1331
1331
1332 Neither number of 'bytes' in any encoding nor 'characters' is
1332 Neither number of 'bytes' in any encoding nor 'characters' is
1333 appropriate to calculate terminal columns for specified string.
1333 appropriate to calculate terminal columns for specified string.
1334
1334
1335 Original TextWrapper implementation uses built-in 'len()' directly,
1335 Original TextWrapper implementation uses built-in 'len()' directly,
1336 so overriding is needed to use width information of each characters.
1336 so overriding is needed to use width information of each characters.
1337
1337
1338 In addition, characters classified into 'ambiguous' width are
1338 In addition, characters classified into 'ambiguous' width are
1339 treated as wide in East Asian area, but as narrow in other.
1339 treated as wide in East Asian area, but as narrow in other.
1340
1340
1341 This requires use decision to determine width of such characters.
1341 This requires use decision to determine width of such characters.
1342 """
1342 """
1343 def __init__(self, **kwargs):
1343 def __init__(self, **kwargs):
1344 textwrap.TextWrapper.__init__(self, **kwargs)
1344 textwrap.TextWrapper.__init__(self, **kwargs)
1345
1345
1346 # for compatibility between 2.4 and 2.6
1346 # for compatibility between 2.4 and 2.6
1347 if getattr(self, 'drop_whitespace', None) is None:
1347 if getattr(self, 'drop_whitespace', None) is None:
1348 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1348 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1349
1349
1350 def _cutdown(self, ucstr, space_left):
1350 def _cutdown(self, ucstr, space_left):
1351 l = 0
1351 l = 0
1352 colwidth = encoding.ucolwidth
1352 colwidth = encoding.ucolwidth
1353 for i in xrange(len(ucstr)):
1353 for i in xrange(len(ucstr)):
1354 l += colwidth(ucstr[i])
1354 l += colwidth(ucstr[i])
1355 if space_left < l:
1355 if space_left < l:
1356 return (ucstr[:i], ucstr[i:])
1356 return (ucstr[:i], ucstr[i:])
1357 return ucstr, ''
1357 return ucstr, ''
1358
1358
1359 # overriding of base class
1359 # overriding of base class
1360 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1360 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1361 space_left = max(width - cur_len, 1)
1361 space_left = max(width - cur_len, 1)
1362
1362
1363 if self.break_long_words:
1363 if self.break_long_words:
1364 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1364 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1365 cur_line.append(cut)
1365 cur_line.append(cut)
1366 reversed_chunks[-1] = res
1366 reversed_chunks[-1] = res
1367 elif not cur_line:
1367 elif not cur_line:
1368 cur_line.append(reversed_chunks.pop())
1368 cur_line.append(reversed_chunks.pop())
1369
1369
1370 # this overriding code is imported from TextWrapper of python 2.6
1370 # this overriding code is imported from TextWrapper of python 2.6
1371 # to calculate columns of string by 'encoding.ucolwidth()'
1371 # to calculate columns of string by 'encoding.ucolwidth()'
1372 def _wrap_chunks(self, chunks):
1372 def _wrap_chunks(self, chunks):
1373 colwidth = encoding.ucolwidth
1373 colwidth = encoding.ucolwidth
1374
1374
1375 lines = []
1375 lines = []
1376 if self.width <= 0:
1376 if self.width <= 0:
1377 raise ValueError("invalid width %r (must be > 0)" % self.width)
1377 raise ValueError("invalid width %r (must be > 0)" % self.width)
1378
1378
1379 # Arrange in reverse order so items can be efficiently popped
1379 # Arrange in reverse order so items can be efficiently popped
1380 # from a stack of chucks.
1380 # from a stack of chucks.
1381 chunks.reverse()
1381 chunks.reverse()
1382
1382
1383 while chunks:
1383 while chunks:
1384
1384
1385 # Start the list of chunks that will make up the current line.
1385 # Start the list of chunks that will make up the current line.
1386 # cur_len is just the length of all the chunks in cur_line.
1386 # cur_len is just the length of all the chunks in cur_line.
1387 cur_line = []
1387 cur_line = []
1388 cur_len = 0
1388 cur_len = 0
1389
1389
1390 # Figure out which static string will prefix this line.
1390 # Figure out which static string will prefix this line.
1391 if lines:
1391 if lines:
1392 indent = self.subsequent_indent
1392 indent = self.subsequent_indent
1393 else:
1393 else:
1394 indent = self.initial_indent
1394 indent = self.initial_indent
1395
1395
1396 # Maximum width for this line.
1396 # Maximum width for this line.
1397 width = self.width - len(indent)
1397 width = self.width - len(indent)
1398
1398
1399 # First chunk on line is whitespace -- drop it, unless this
1399 # First chunk on line is whitespace -- drop it, unless this
1400 # is the very beginning of the text (i.e. no lines started yet).
1400 # is the very beginning of the text (i.e. no lines started yet).
1401 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1401 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1402 del chunks[-1]
1402 del chunks[-1]
1403
1403
1404 while chunks:
1404 while chunks:
1405 l = colwidth(chunks[-1])
1405 l = colwidth(chunks[-1])
1406
1406
1407 # Can at least squeeze this chunk onto the current line.
1407 # Can at least squeeze this chunk onto the current line.
1408 if cur_len + l <= width:
1408 if cur_len + l <= width:
1409 cur_line.append(chunks.pop())
1409 cur_line.append(chunks.pop())
1410 cur_len += l
1410 cur_len += l
1411
1411
1412 # Nope, this line is full.
1412 # Nope, this line is full.
1413 else:
1413 else:
1414 break
1414 break
1415
1415
1416 # The current line is full, and the next chunk is too big to
1416 # The current line is full, and the next chunk is too big to
1417 # fit on *any* line (not just this one).
1417 # fit on *any* line (not just this one).
1418 if chunks and colwidth(chunks[-1]) > width:
1418 if chunks and colwidth(chunks[-1]) > width:
1419 self._handle_long_word(chunks, cur_line, cur_len, width)
1419 self._handle_long_word(chunks, cur_line, cur_len, width)
1420
1420
1421 # If the last chunk on this line is all whitespace, drop it.
1421 # If the last chunk on this line is all whitespace, drop it.
1422 if (self.drop_whitespace and
1422 if (self.drop_whitespace and
1423 cur_line and cur_line[-1].strip() == ''):
1423 cur_line and cur_line[-1].strip() == ''):
1424 del cur_line[-1]
1424 del cur_line[-1]
1425
1425
1426 # Convert current line back to a string and store it in list
1426 # Convert current line back to a string and store it in list
1427 # of all lines (return value).
1427 # of all lines (return value).
1428 if cur_line:
1428 if cur_line:
1429 lines.append(indent + ''.join(cur_line))
1429 lines.append(indent + ''.join(cur_line))
1430
1430
1431 return lines
1431 return lines
1432
1432
1433 global MBTextWrapper
1433 global MBTextWrapper
1434 MBTextWrapper = tw
1434 MBTextWrapper = tw
1435 return tw(**kwargs)
1435 return tw(**kwargs)
1436
1436
1437 def wrap(line, width, initindent='', hangindent=''):
1437 def wrap(line, width, initindent='', hangindent=''):
1438 maxindent = max(len(hangindent), len(initindent))
1438 maxindent = max(len(hangindent), len(initindent))
1439 if width <= maxindent:
1439 if width <= maxindent:
1440 # adjust for weird terminal size
1440 # adjust for weird terminal size
1441 width = max(78, maxindent + 1)
1441 width = max(78, maxindent + 1)
1442 line = line.decode(encoding.encoding, encoding.encodingmode)
1442 line = line.decode(encoding.encoding, encoding.encodingmode)
1443 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1443 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1444 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1444 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1445 wrapper = MBTextWrapper(width=width,
1445 wrapper = MBTextWrapper(width=width,
1446 initial_indent=initindent,
1446 initial_indent=initindent,
1447 subsequent_indent=hangindent)
1447 subsequent_indent=hangindent)
1448 return wrapper.fill(line).encode(encoding.encoding)
1448 return wrapper.fill(line).encode(encoding.encoding)
1449
1449
1450 def iterlines(iterator):
1450 def iterlines(iterator):
1451 for chunk in iterator:
1451 for chunk in iterator:
1452 for line in chunk.splitlines():
1452 for line in chunk.splitlines():
1453 yield line
1453 yield line
1454
1454
1455 def expandpath(path):
1455 def expandpath(path):
1456 return os.path.expanduser(os.path.expandvars(path))
1456 return os.path.expanduser(os.path.expandvars(path))
1457
1457
1458 def hgcmd():
1458 def hgcmd():
1459 """Return the command used to execute current hg
1459 """Return the command used to execute current hg
1460
1460
1461 This is different from hgexecutable() because on Windows we want
1461 This is different from hgexecutable() because on Windows we want
1462 to avoid things opening new shell windows like batch files, so we
1462 to avoid things opening new shell windows like batch files, so we
1463 get either the python call or current executable.
1463 get either the python call or current executable.
1464 """
1464 """
1465 if mainfrozen():
1465 if mainfrozen():
1466 return [sys.executable]
1466 return [sys.executable]
1467 return gethgcmd()
1467 return gethgcmd()
1468
1468
1469 def rundetached(args, condfn):
1469 def rundetached(args, condfn):
1470 """Execute the argument list in a detached process.
1470 """Execute the argument list in a detached process.
1471
1471
1472 condfn is a callable which is called repeatedly and should return
1472 condfn is a callable which is called repeatedly and should return
1473 True once the child process is known to have started successfully.
1473 True once the child process is known to have started successfully.
1474 At this point, the child process PID is returned. If the child
1474 At this point, the child process PID is returned. If the child
1475 process fails to start or finishes before condfn() evaluates to
1475 process fails to start or finishes before condfn() evaluates to
1476 True, return -1.
1476 True, return -1.
1477 """
1477 """
1478 # Windows case is easier because the child process is either
1478 # Windows case is easier because the child process is either
1479 # successfully starting and validating the condition or exiting
1479 # successfully starting and validating the condition or exiting
1480 # on failure. We just poll on its PID. On Unix, if the child
1480 # on failure. We just poll on its PID. On Unix, if the child
1481 # process fails to start, it will be left in a zombie state until
1481 # process fails to start, it will be left in a zombie state until
1482 # the parent wait on it, which we cannot do since we expect a long
1482 # the parent wait on it, which we cannot do since we expect a long
1483 # running process on success. Instead we listen for SIGCHLD telling
1483 # running process on success. Instead we listen for SIGCHLD telling
1484 # us our child process terminated.
1484 # us our child process terminated.
1485 terminated = set()
1485 terminated = set()
1486 def handler(signum, frame):
1486 def handler(signum, frame):
1487 terminated.add(os.wait())
1487 terminated.add(os.wait())
1488 prevhandler = None
1488 prevhandler = None
1489 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1489 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1490 if SIGCHLD is not None:
1490 if SIGCHLD is not None:
1491 prevhandler = signal.signal(SIGCHLD, handler)
1491 prevhandler = signal.signal(SIGCHLD, handler)
1492 try:
1492 try:
1493 pid = spawndetached(args)
1493 pid = spawndetached(args)
1494 while not condfn():
1494 while not condfn():
1495 if ((pid in terminated or not testpid(pid))
1495 if ((pid in terminated or not testpid(pid))
1496 and not condfn()):
1496 and not condfn()):
1497 return -1
1497 return -1
1498 time.sleep(0.1)
1498 time.sleep(0.1)
1499 return pid
1499 return pid
1500 finally:
1500 finally:
1501 if prevhandler is not None:
1501 if prevhandler is not None:
1502 signal.signal(signal.SIGCHLD, prevhandler)
1502 signal.signal(signal.SIGCHLD, prevhandler)
1503
1503
1504 try:
1504 try:
1505 any, all = any, all
1505 any, all = any, all
1506 except NameError:
1506 except NameError:
1507 def any(iterable):
1507 def any(iterable):
1508 for i in iterable:
1508 for i in iterable:
1509 if i:
1509 if i:
1510 return True
1510 return True
1511 return False
1511 return False
1512
1512
1513 def all(iterable):
1513 def all(iterable):
1514 for i in iterable:
1514 for i in iterable:
1515 if not i:
1515 if not i:
1516 return False
1516 return False
1517 return True
1517 return True
1518
1518
1519 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1519 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1520 """Return the result of interpolating items in the mapping into string s.
1520 """Return the result of interpolating items in the mapping into string s.
1521
1521
1522 prefix is a single character string, or a two character string with
1522 prefix is a single character string, or a two character string with
1523 a backslash as the first character if the prefix needs to be escaped in
1523 a backslash as the first character if the prefix needs to be escaped in
1524 a regular expression.
1524 a regular expression.
1525
1525
1526 fn is an optional function that will be applied to the replacement text
1526 fn is an optional function that will be applied to the replacement text
1527 just before replacement.
1527 just before replacement.
1528
1528
1529 escape_prefix is an optional flag that allows using doubled prefix for
1529 escape_prefix is an optional flag that allows using doubled prefix for
1530 its escaping.
1530 its escaping.
1531 """
1531 """
1532 fn = fn or (lambda s: s)
1532 fn = fn or (lambda s: s)
1533 patterns = '|'.join(mapping.keys())
1533 patterns = '|'.join(mapping.keys())
1534 if escape_prefix:
1534 if escape_prefix:
1535 patterns += '|' + prefix
1535 patterns += '|' + prefix
1536 if len(prefix) > 1:
1536 if len(prefix) > 1:
1537 prefix_char = prefix[1:]
1537 prefix_char = prefix[1:]
1538 else:
1538 else:
1539 prefix_char = prefix
1539 prefix_char = prefix
1540 mapping[prefix_char] = prefix_char
1540 mapping[prefix_char] = prefix_char
1541 r = re.compile(r'%s(%s)' % (prefix, patterns))
1541 r = re.compile(r'%s(%s)' % (prefix, patterns))
1542 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1542 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1543
1543
1544 def getport(port):
1544 def getport(port):
1545 """Return the port for a given network service.
1545 """Return the port for a given network service.
1546
1546
1547 If port is an integer, it's returned as is. If it's a string, it's
1547 If port is an integer, it's returned as is. If it's a string, it's
1548 looked up using socket.getservbyname(). If there's no matching
1548 looked up using socket.getservbyname(). If there's no matching
1549 service, util.Abort is raised.
1549 service, util.Abort is raised.
1550 """
1550 """
1551 try:
1551 try:
1552 return int(port)
1552 return int(port)
1553 except ValueError:
1553 except ValueError:
1554 pass
1554 pass
1555
1555
1556 try:
1556 try:
1557 return socket.getservbyname(port)
1557 return socket.getservbyname(port)
1558 except socket.error:
1558 except socket.error:
1559 raise Abort(_("no port number associated with service '%s'") % port)
1559 raise Abort(_("no port number associated with service '%s'") % port)
1560
1560
1561 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1561 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1562 '0': False, 'no': False, 'false': False, 'off': False,
1562 '0': False, 'no': False, 'false': False, 'off': False,
1563 'never': False}
1563 'never': False}
1564
1564
1565 def parsebool(s):
1565 def parsebool(s):
1566 """Parse s into a boolean.
1566 """Parse s into a boolean.
1567
1567
1568 If s is not a valid boolean, returns None.
1568 If s is not a valid boolean, returns None.
1569 """
1569 """
1570 return _booleans.get(s.lower(), None)
1570 return _booleans.get(s.lower(), None)
1571
1571
1572 _hexdig = '0123456789ABCDEFabcdef'
1572 _hexdig = '0123456789ABCDEFabcdef'
1573 _hextochr = dict((a + b, chr(int(a + b, 16)))
1573 _hextochr = dict((a + b, chr(int(a + b, 16)))
1574 for a in _hexdig for b in _hexdig)
1574 for a in _hexdig for b in _hexdig)
1575
1575
1576 def _urlunquote(s):
1576 def _urlunquote(s):
1577 """Decode HTTP/HTML % encoding.
1577 """Decode HTTP/HTML % encoding.
1578
1578
1579 >>> _urlunquote('abc%20def')
1579 >>> _urlunquote('abc%20def')
1580 'abc def'
1580 'abc def'
1581 """
1581 """
1582 res = s.split('%')
1582 res = s.split('%')
1583 # fastpath
1583 # fastpath
1584 if len(res) == 1:
1584 if len(res) == 1:
1585 return s
1585 return s
1586 s = res[0]
1586 s = res[0]
1587 for item in res[1:]:
1587 for item in res[1:]:
1588 try:
1588 try:
1589 s += _hextochr[item[:2]] + item[2:]
1589 s += _hextochr[item[:2]] + item[2:]
1590 except KeyError:
1590 except KeyError:
1591 s += '%' + item
1591 s += '%' + item
1592 except UnicodeDecodeError:
1592 except UnicodeDecodeError:
1593 s += unichr(int(item[:2], 16)) + item[2:]
1593 s += unichr(int(item[:2], 16)) + item[2:]
1594 return s
1594 return s
1595
1595
1596 class url(object):
1596 class url(object):
1597 r"""Reliable URL parser.
1597 r"""Reliable URL parser.
1598
1598
1599 This parses URLs and provides attributes for the following
1599 This parses URLs and provides attributes for the following
1600 components:
1600 components:
1601
1601
1602 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1602 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1603
1603
1604 Missing components are set to None. The only exception is
1604 Missing components are set to None. The only exception is
1605 fragment, which is set to '' if present but empty.
1605 fragment, which is set to '' if present but empty.
1606
1606
1607 If parsefragment is False, fragment is included in query. If
1607 If parsefragment is False, fragment is included in query. If
1608 parsequery is False, query is included in path. If both are
1608 parsequery is False, query is included in path. If both are
1609 False, both fragment and query are included in path.
1609 False, both fragment and query are included in path.
1610
1610
1611 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1611 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1612
1612
1613 Note that for backward compatibility reasons, bundle URLs do not
1613 Note that for backward compatibility reasons, bundle URLs do not
1614 take host names. That means 'bundle://../' has a path of '../'.
1614 take host names. That means 'bundle://../' has a path of '../'.
1615
1615
1616 Examples:
1616 Examples:
1617
1617
1618 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1618 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1619 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1619 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1620 >>> url('ssh://[::1]:2200//home/joe/repo')
1620 >>> url('ssh://[::1]:2200//home/joe/repo')
1621 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1621 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1622 >>> url('file:///home/joe/repo')
1622 >>> url('file:///home/joe/repo')
1623 <url scheme: 'file', path: '/home/joe/repo'>
1623 <url scheme: 'file', path: '/home/joe/repo'>
1624 >>> url('file:///c:/temp/foo/')
1624 >>> url('file:///c:/temp/foo/')
1625 <url scheme: 'file', path: 'c:/temp/foo/'>
1625 <url scheme: 'file', path: 'c:/temp/foo/'>
1626 >>> url('bundle:foo')
1626 >>> url('bundle:foo')
1627 <url scheme: 'bundle', path: 'foo'>
1627 <url scheme: 'bundle', path: 'foo'>
1628 >>> url('bundle://../foo')
1628 >>> url('bundle://../foo')
1629 <url scheme: 'bundle', path: '../foo'>
1629 <url scheme: 'bundle', path: '../foo'>
1630 >>> url(r'c:\foo\bar')
1630 >>> url(r'c:\foo\bar')
1631 <url path: 'c:\\foo\\bar'>
1631 <url path: 'c:\\foo\\bar'>
1632 >>> url(r'\\blah\blah\blah')
1632 >>> url(r'\\blah\blah\blah')
1633 <url path: '\\\\blah\\blah\\blah'>
1633 <url path: '\\\\blah\\blah\\blah'>
1634 >>> url(r'\\blah\blah\blah#baz')
1634 >>> url(r'\\blah\blah\blah#baz')
1635 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1635 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1636 >>> url(r'file:///C:\users\me')
1636 >>> url(r'file:///C:\users\me')
1637 <url scheme: 'file', path: 'C:\\users\\me'>
1637 <url scheme: 'file', path: 'C:\\users\\me'>
1638
1638
1639 Authentication credentials:
1639 Authentication credentials:
1640
1640
1641 >>> url('ssh://joe:xyz@x/repo')
1641 >>> url('ssh://joe:xyz@x/repo')
1642 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1642 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1643 >>> url('ssh://joe@x/repo')
1643 >>> url('ssh://joe@x/repo')
1644 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1644 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1645
1645
1646 Query strings and fragments:
1646 Query strings and fragments:
1647
1647
1648 >>> url('http://host/a?b#c')
1648 >>> url('http://host/a?b#c')
1649 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1649 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1650 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1650 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1651 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1651 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1652 """
1652 """
1653
1653
1654 _safechars = "!~*'()+"
1654 _safechars = "!~*'()+"
1655 _safepchars = "/!~*'()+:\\"
1655 _safepchars = "/!~*'()+:\\"
1656 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1656 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1657
1657
1658 def __init__(self, path, parsequery=True, parsefragment=True):
1658 def __init__(self, path, parsequery=True, parsefragment=True):
1659 # We slowly chomp away at path until we have only the path left
1659 # We slowly chomp away at path until we have only the path left
1660 self.scheme = self.user = self.passwd = self.host = None
1660 self.scheme = self.user = self.passwd = self.host = None
1661 self.port = self.path = self.query = self.fragment = None
1661 self.port = self.path = self.query = self.fragment = None
1662 self._localpath = True
1662 self._localpath = True
1663 self._hostport = ''
1663 self._hostport = ''
1664 self._origpath = path
1664 self._origpath = path
1665
1665
1666 if parsefragment and '#' in path:
1666 if parsefragment and '#' in path:
1667 path, self.fragment = path.split('#', 1)
1667 path, self.fragment = path.split('#', 1)
1668 if not path:
1668 if not path:
1669 path = None
1669 path = None
1670
1670
1671 # special case for Windows drive letters and UNC paths
1671 # special case for Windows drive letters and UNC paths
1672 if hasdriveletter(path) or path.startswith(r'\\'):
1672 if hasdriveletter(path) or path.startswith(r'\\'):
1673 self.path = path
1673 self.path = path
1674 return
1674 return
1675
1675
1676 # For compatibility reasons, we can't handle bundle paths as
1676 # For compatibility reasons, we can't handle bundle paths as
1677 # normal URLS
1677 # normal URLS
1678 if path.startswith('bundle:'):
1678 if path.startswith('bundle:'):
1679 self.scheme = 'bundle'
1679 self.scheme = 'bundle'
1680 path = path[7:]
1680 path = path[7:]
1681 if path.startswith('//'):
1681 if path.startswith('//'):
1682 path = path[2:]
1682 path = path[2:]
1683 self.path = path
1683 self.path = path
1684 return
1684 return
1685
1685
1686 if self._matchscheme(path):
1686 if self._matchscheme(path):
1687 parts = path.split(':', 1)
1687 parts = path.split(':', 1)
1688 if parts[0]:
1688 if parts[0]:
1689 self.scheme, path = parts
1689 self.scheme, path = parts
1690 self._localpath = False
1690 self._localpath = False
1691
1691
1692 if not path:
1692 if not path:
1693 path = None
1693 path = None
1694 if self._localpath:
1694 if self._localpath:
1695 self.path = ''
1695 self.path = ''
1696 return
1696 return
1697 else:
1697 else:
1698 if self._localpath:
1698 if self._localpath:
1699 self.path = path
1699 self.path = path
1700 return
1700 return
1701
1701
1702 if parsequery and '?' in path:
1702 if parsequery and '?' in path:
1703 path, self.query = path.split('?', 1)
1703 path, self.query = path.split('?', 1)
1704 if not path:
1704 if not path:
1705 path = None
1705 path = None
1706 if not self.query:
1706 if not self.query:
1707 self.query = None
1707 self.query = None
1708
1708
1709 # // is required to specify a host/authority
1709 # // is required to specify a host/authority
1710 if path and path.startswith('//'):
1710 if path and path.startswith('//'):
1711 parts = path[2:].split('/', 1)
1711 parts = path[2:].split('/', 1)
1712 if len(parts) > 1:
1712 if len(parts) > 1:
1713 self.host, path = parts
1713 self.host, path = parts
1714 else:
1714 else:
1715 self.host = parts[0]
1715 self.host = parts[0]
1716 path = None
1716 path = None
1717 if not self.host:
1717 if not self.host:
1718 self.host = None
1718 self.host = None
1719 # path of file:///d is /d
1719 # path of file:///d is /d
1720 # path of file:///d:/ is d:/, not /d:/
1720 # path of file:///d:/ is d:/, not /d:/
1721 if path and not hasdriveletter(path):
1721 if path and not hasdriveletter(path):
1722 path = '/' + path
1722 path = '/' + path
1723
1723
1724 if self.host and '@' in self.host:
1724 if self.host and '@' in self.host:
1725 self.user, self.host = self.host.rsplit('@', 1)
1725 self.user, self.host = self.host.rsplit('@', 1)
1726 if ':' in self.user:
1726 if ':' in self.user:
1727 self.user, self.passwd = self.user.split(':', 1)
1727 self.user, self.passwd = self.user.split(':', 1)
1728 if not self.host:
1728 if not self.host:
1729 self.host = None
1729 self.host = None
1730
1730
1731 # Don't split on colons in IPv6 addresses without ports
1731 # Don't split on colons in IPv6 addresses without ports
1732 if (self.host and ':' in self.host and
1732 if (self.host and ':' in self.host and
1733 not (self.host.startswith('[') and self.host.endswith(']'))):
1733 not (self.host.startswith('[') and self.host.endswith(']'))):
1734 self._hostport = self.host
1734 self._hostport = self.host
1735 self.host, self.port = self.host.rsplit(':', 1)
1735 self.host, self.port = self.host.rsplit(':', 1)
1736 if not self.host:
1736 if not self.host:
1737 self.host = None
1737 self.host = None
1738
1738
1739 if (self.host and self.scheme == 'file' and
1739 if (self.host and self.scheme == 'file' and
1740 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1740 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1741 raise Abort(_('file:// URLs can only refer to localhost'))
1741 raise Abort(_('file:// URLs can only refer to localhost'))
1742
1742
1743 self.path = path
1743 self.path = path
1744
1744
1745 # leave the query string escaped
1745 # leave the query string escaped
1746 for a in ('user', 'passwd', 'host', 'port',
1746 for a in ('user', 'passwd', 'host', 'port',
1747 'path', 'fragment'):
1747 'path', 'fragment'):
1748 v = getattr(self, a)
1748 v = getattr(self, a)
1749 if v is not None:
1749 if v is not None:
1750 setattr(self, a, _urlunquote(v))
1750 setattr(self, a, _urlunquote(v))
1751
1751
1752 def __repr__(self):
1752 def __repr__(self):
1753 attrs = []
1753 attrs = []
1754 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1754 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1755 'query', 'fragment'):
1755 'query', 'fragment'):
1756 v = getattr(self, a)
1756 v = getattr(self, a)
1757 if v is not None:
1757 if v is not None:
1758 attrs.append('%s: %r' % (a, v))
1758 attrs.append('%s: %r' % (a, v))
1759 return '<url %s>' % ', '.join(attrs)
1759 return '<url %s>' % ', '.join(attrs)
1760
1760
1761 def __str__(self):
1761 def __str__(self):
1762 r"""Join the URL's components back into a URL string.
1762 r"""Join the URL's components back into a URL string.
1763
1763
1764 Examples:
1764 Examples:
1765
1765
1766 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1766 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1767 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1767 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1768 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1768 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1769 'http://user:pw@host:80/?foo=bar&baz=42'
1769 'http://user:pw@host:80/?foo=bar&baz=42'
1770 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1770 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1771 'http://user:pw@host:80/?foo=bar%3dbaz'
1771 'http://user:pw@host:80/?foo=bar%3dbaz'
1772 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1772 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1773 'ssh://user:pw@[::1]:2200//home/joe#'
1773 'ssh://user:pw@[::1]:2200//home/joe#'
1774 >>> str(url('http://localhost:80//'))
1774 >>> str(url('http://localhost:80//'))
1775 'http://localhost:80//'
1775 'http://localhost:80//'
1776 >>> str(url('http://localhost:80/'))
1776 >>> str(url('http://localhost:80/'))
1777 'http://localhost:80/'
1777 'http://localhost:80/'
1778 >>> str(url('http://localhost:80'))
1778 >>> str(url('http://localhost:80'))
1779 'http://localhost:80/'
1779 'http://localhost:80/'
1780 >>> str(url('bundle:foo'))
1780 >>> str(url('bundle:foo'))
1781 'bundle:foo'
1781 'bundle:foo'
1782 >>> str(url('bundle://../foo'))
1782 >>> str(url('bundle://../foo'))
1783 'bundle:../foo'
1783 'bundle:../foo'
1784 >>> str(url('path'))
1784 >>> str(url('path'))
1785 'path'
1785 'path'
1786 >>> str(url('file:///tmp/foo/bar'))
1786 >>> str(url('file:///tmp/foo/bar'))
1787 'file:///tmp/foo/bar'
1787 'file:///tmp/foo/bar'
1788 >>> str(url('file:///c:/tmp/foo/bar'))
1788 >>> str(url('file:///c:/tmp/foo/bar'))
1789 'file:///c:/tmp/foo/bar'
1789 'file:///c:/tmp/foo/bar'
1790 >>> print url(r'bundle:foo\bar')
1790 >>> print url(r'bundle:foo\bar')
1791 bundle:foo\bar
1791 bundle:foo\bar
1792 >>> print url(r'file:///D:\data\hg')
1792 >>> print url(r'file:///D:\data\hg')
1793 file:///D:\data\hg
1793 file:///D:\data\hg
1794 """
1794 """
1795 if self._localpath:
1795 if self._localpath:
1796 s = self.path
1796 s = self.path
1797 if self.scheme == 'bundle':
1797 if self.scheme == 'bundle':
1798 s = 'bundle:' + s
1798 s = 'bundle:' + s
1799 if self.fragment:
1799 if self.fragment:
1800 s += '#' + self.fragment
1800 s += '#' + self.fragment
1801 return s
1801 return s
1802
1802
1803 s = self.scheme + ':'
1803 s = self.scheme + ':'
1804 if self.user or self.passwd or self.host:
1804 if self.user or self.passwd or self.host:
1805 s += '//'
1805 s += '//'
1806 elif self.scheme and (not self.path or self.path.startswith('/')
1806 elif self.scheme and (not self.path or self.path.startswith('/')
1807 or hasdriveletter(self.path)):
1807 or hasdriveletter(self.path)):
1808 s += '//'
1808 s += '//'
1809 if hasdriveletter(self.path):
1809 if hasdriveletter(self.path):
1810 s += '/'
1810 s += '/'
1811 if self.user:
1811 if self.user:
1812 s += urllib.quote(self.user, safe=self._safechars)
1812 s += urllib.quote(self.user, safe=self._safechars)
1813 if self.passwd:
1813 if self.passwd:
1814 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1814 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1815 if self.user or self.passwd:
1815 if self.user or self.passwd:
1816 s += '@'
1816 s += '@'
1817 if self.host:
1817 if self.host:
1818 if not (self.host.startswith('[') and self.host.endswith(']')):
1818 if not (self.host.startswith('[') and self.host.endswith(']')):
1819 s += urllib.quote(self.host)
1819 s += urllib.quote(self.host)
1820 else:
1820 else:
1821 s += self.host
1821 s += self.host
1822 if self.port:
1822 if self.port:
1823 s += ':' + urllib.quote(self.port)
1823 s += ':' + urllib.quote(self.port)
1824 if self.host:
1824 if self.host:
1825 s += '/'
1825 s += '/'
1826 if self.path:
1826 if self.path:
1827 # TODO: similar to the query string, we should not unescape the
1827 # TODO: similar to the query string, we should not unescape the
1828 # path when we store it, the path might contain '%2f' = '/',
1828 # path when we store it, the path might contain '%2f' = '/',
1829 # which we should *not* escape.
1829 # which we should *not* escape.
1830 s += urllib.quote(self.path, safe=self._safepchars)
1830 s += urllib.quote(self.path, safe=self._safepchars)
1831 if self.query:
1831 if self.query:
1832 # we store the query in escaped form.
1832 # we store the query in escaped form.
1833 s += '?' + self.query
1833 s += '?' + self.query
1834 if self.fragment is not None:
1834 if self.fragment is not None:
1835 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1835 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1836 return s
1836 return s
1837
1837
1838 def authinfo(self):
1838 def authinfo(self):
1839 user, passwd = self.user, self.passwd
1839 user, passwd = self.user, self.passwd
1840 try:
1840 try:
1841 self.user, self.passwd = None, None
1841 self.user, self.passwd = None, None
1842 s = str(self)
1842 s = str(self)
1843 finally:
1843 finally:
1844 self.user, self.passwd = user, passwd
1844 self.user, self.passwd = user, passwd
1845 if not self.user:
1845 if not self.user:
1846 return (s, None)
1846 return (s, None)
1847 # authinfo[1] is passed to urllib2 password manager, and its
1847 # authinfo[1] is passed to urllib2 password manager, and its
1848 # URIs must not contain credentials. The host is passed in the
1848 # URIs must not contain credentials. The host is passed in the
1849 # URIs list because Python < 2.4.3 uses only that to search for
1849 # URIs list because Python < 2.4.3 uses only that to search for
1850 # a password.
1850 # a password.
1851 return (s, (None, (s, self.host),
1851 return (s, (None, (s, self.host),
1852 self.user, self.passwd or ''))
1852 self.user, self.passwd or ''))
1853
1853
1854 def isabs(self):
1854 def isabs(self):
1855 if self.scheme and self.scheme != 'file':
1855 if self.scheme and self.scheme != 'file':
1856 return True # remote URL
1856 return True # remote URL
1857 if hasdriveletter(self.path):
1857 if hasdriveletter(self.path):
1858 return True # absolute for our purposes - can't be joined()
1858 return True # absolute for our purposes - can't be joined()
1859 if self.path.startswith(r'\\'):
1859 if self.path.startswith(r'\\'):
1860 return True # Windows UNC path
1860 return True # Windows UNC path
1861 if self.path.startswith('/'):
1861 if self.path.startswith('/'):
1862 return True # POSIX-style
1862 return True # POSIX-style
1863 return False
1863 return False
1864
1864
1865 def localpath(self):
1865 def localpath(self):
1866 if self.scheme == 'file' or self.scheme == 'bundle':
1866 if self.scheme == 'file' or self.scheme == 'bundle':
1867 path = self.path or '/'
1867 path = self.path or '/'
1868 # For Windows, we need to promote hosts containing drive
1868 # For Windows, we need to promote hosts containing drive
1869 # letters to paths with drive letters.
1869 # letters to paths with drive letters.
1870 if hasdriveletter(self._hostport):
1870 if hasdriveletter(self._hostport):
1871 path = self._hostport + '/' + self.path
1871 path = self._hostport + '/' + self.path
1872 elif (self.host is not None and self.path
1872 elif (self.host is not None and self.path
1873 and not hasdriveletter(path)):
1873 and not hasdriveletter(path)):
1874 path = '/' + path
1874 path = '/' + path
1875 return path
1875 return path
1876 return self._origpath
1876 return self._origpath
1877
1877
1878 def islocal(self):
1879 '''whether localpath will return something that posixfile can open'''
1880 return (not self.scheme or self.scheme == 'file'
1881 or self.scheme == 'bundle')
1882
1878 def hasscheme(path):
1883 def hasscheme(path):
1879 return bool(url(path).scheme)
1884 return bool(url(path).scheme)
1880
1885
1881 def hasdriveletter(path):
1886 def hasdriveletter(path):
1882 return path and path[1:2] == ':' and path[0:1].isalpha()
1887 return path and path[1:2] == ':' and path[0:1].isalpha()
1883
1888
1884 def urllocalpath(path):
1889 def urllocalpath(path):
1885 return url(path, parsequery=False, parsefragment=False).localpath()
1890 return url(path, parsequery=False, parsefragment=False).localpath()
1886
1891
1887 def hidepassword(u):
1892 def hidepassword(u):
1888 '''hide user credential in a url string'''
1893 '''hide user credential in a url string'''
1889 u = url(u)
1894 u = url(u)
1890 if u.passwd:
1895 if u.passwd:
1891 u.passwd = '***'
1896 u.passwd = '***'
1892 return str(u)
1897 return str(u)
1893
1898
1894 def removeauth(u):
1899 def removeauth(u):
1895 '''remove all authentication information from a url string'''
1900 '''remove all authentication information from a url string'''
1896 u = url(u)
1901 u = url(u)
1897 u.user = u.passwd = None
1902 u.user = u.passwd = None
1898 return str(u)
1903 return str(u)
1899
1904
1900 def isatty(fd):
1905 def isatty(fd):
1901 try:
1906 try:
1902 return fd.isatty()
1907 return fd.isatty()
1903 except AttributeError:
1908 except AttributeError:
1904 return False
1909 return False
1905
1910
1906 timecount = unitcountfn(
1911 timecount = unitcountfn(
1907 (1, 1e3, _('%.0f s')),
1912 (1, 1e3, _('%.0f s')),
1908 (100, 1, _('%.1f s')),
1913 (100, 1, _('%.1f s')),
1909 (10, 1, _('%.2f s')),
1914 (10, 1, _('%.2f s')),
1910 (1, 1, _('%.3f s')),
1915 (1, 1, _('%.3f s')),
1911 (100, 0.001, _('%.1f ms')),
1916 (100, 0.001, _('%.1f ms')),
1912 (10, 0.001, _('%.2f ms')),
1917 (10, 0.001, _('%.2f ms')),
1913 (1, 0.001, _('%.3f ms')),
1918 (1, 0.001, _('%.3f ms')),
1914 (100, 0.000001, _('%.1f us')),
1919 (100, 0.000001, _('%.1f us')),
1915 (10, 0.000001, _('%.2f us')),
1920 (10, 0.000001, _('%.2f us')),
1916 (1, 0.000001, _('%.3f us')),
1921 (1, 0.000001, _('%.3f us')),
1917 (100, 0.000000001, _('%.1f ns')),
1922 (100, 0.000000001, _('%.1f ns')),
1918 (10, 0.000000001, _('%.2f ns')),
1923 (10, 0.000000001, _('%.2f ns')),
1919 (1, 0.000000001, _('%.3f ns')),
1924 (1, 0.000000001, _('%.3f ns')),
1920 )
1925 )
1921
1926
1922 _timenesting = [0]
1927 _timenesting = [0]
1923
1928
1924 def timed(func):
1929 def timed(func):
1925 '''Report the execution time of a function call to stderr.
1930 '''Report the execution time of a function call to stderr.
1926
1931
1927 During development, use as a decorator when you need to measure
1932 During development, use as a decorator when you need to measure
1928 the cost of a function, e.g. as follows:
1933 the cost of a function, e.g. as follows:
1929
1934
1930 @util.timed
1935 @util.timed
1931 def foo(a, b, c):
1936 def foo(a, b, c):
1932 pass
1937 pass
1933 '''
1938 '''
1934
1939
1935 def wrapper(*args, **kwargs):
1940 def wrapper(*args, **kwargs):
1936 start = time.time()
1941 start = time.time()
1937 indent = 2
1942 indent = 2
1938 _timenesting[0] += indent
1943 _timenesting[0] += indent
1939 try:
1944 try:
1940 return func(*args, **kwargs)
1945 return func(*args, **kwargs)
1941 finally:
1946 finally:
1942 elapsed = time.time() - start
1947 elapsed = time.time() - start
1943 _timenesting[0] -= indent
1948 _timenesting[0] -= indent
1944 sys.stderr.write('%s%s: %s\n' %
1949 sys.stderr.write('%s%s: %s\n' %
1945 (' ' * _timenesting[0], func.__name__,
1950 (' ' * _timenesting[0], func.__name__,
1946 timecount(elapsed)))
1951 timecount(elapsed)))
1947 return wrapper
1952 return wrapper
1948
1953
1949 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
1954 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
1950 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
1955 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
1951
1956
1952 def sizetoint(s):
1957 def sizetoint(s):
1953 '''Convert a space specifier to a byte count.
1958 '''Convert a space specifier to a byte count.
1954
1959
1955 >>> sizetoint('30')
1960 >>> sizetoint('30')
1956 30
1961 30
1957 >>> sizetoint('2.2kb')
1962 >>> sizetoint('2.2kb')
1958 2252
1963 2252
1959 >>> sizetoint('6M')
1964 >>> sizetoint('6M')
1960 6291456
1965 6291456
1961 '''
1966 '''
1962 t = s.strip().lower()
1967 t = s.strip().lower()
1963 try:
1968 try:
1964 for k, u in _sizeunits:
1969 for k, u in _sizeunits:
1965 if t.endswith(k):
1970 if t.endswith(k):
1966 return int(float(t[:-len(k)]) * u)
1971 return int(float(t[:-len(k)]) * u)
1967 return int(t)
1972 return int(t)
1968 except ValueError:
1973 except ValueError:
1969 raise error.ParseError(_("couldn't parse size: %s") % s)
1974 raise error.ParseError(_("couldn't parse size: %s") % s)
1970
1975
1971 class hooks(object):
1976 class hooks(object):
1972 '''A collection of hook functions that can be used to extend a
1977 '''A collection of hook functions that can be used to extend a
1973 function's behaviour. Hooks are called in lexicographic order,
1978 function's behaviour. Hooks are called in lexicographic order,
1974 based on the names of their sources.'''
1979 based on the names of their sources.'''
1975
1980
1976 def __init__(self):
1981 def __init__(self):
1977 self._hooks = []
1982 self._hooks = []
1978
1983
1979 def add(self, source, hook):
1984 def add(self, source, hook):
1980 self._hooks.append((source, hook))
1985 self._hooks.append((source, hook))
1981
1986
1982 def __call__(self, *args):
1987 def __call__(self, *args):
1983 self._hooks.sort(key=lambda x: x[0])
1988 self._hooks.sort(key=lambda x: x[0])
1984 for source, hook in self._hooks:
1989 for source, hook in self._hooks:
1985 hook(*args)
1990 hook(*args)
1986
1991
1987 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr):
1992 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr):
1988 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
1993 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
1989 Skips the 'skip' last entries.
1994 Skips the 'skip' last entries.
1990 It can be used everywhere and do intentionally not require an ui object.
1995 It can be used everywhere and do intentionally not require an ui object.
1991 Not be used in production code but very convenient while developing.
1996 Not be used in production code but very convenient while developing.
1992 '''
1997 '''
1993 f.write('%s at:\n' % msg)
1998 f.write('%s at:\n' % msg)
1994 entries = [('%s:%s' % (fn, ln), func)
1999 entries = [('%s:%s' % (fn, ln), func)
1995 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2000 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
1996 if entries:
2001 if entries:
1997 fnmax = max(len(entry[0]) for entry in entries)
2002 fnmax = max(len(entry[0]) for entry in entries)
1998 for fnln, func in entries:
2003 for fnln, func in entries:
1999 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2004 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2000
2005
2001 # convenient shortcut
2006 # convenient shortcut
2002 dst = debugstacktrace
2007 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now