##// END OF EJS Templates
cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1...
Augie Fackler -
r29341:0d83ad96 default
parent child Browse files
Show More
@@ -43,6 +43,7 b' from __future__ import absolute_import'
43 import SocketServer
43 import SocketServer
44 import errno
44 import errno
45 import gc
45 import gc
46 import hashlib
46 import inspect
47 import inspect
47 import os
48 import os
48 import random
49 import random
@@ -76,7 +77,7 b" testedwith = 'internal'"
76
77
77 def _hashlist(items):
78 def _hashlist(items):
78 """return sha1 hexdigest for a list"""
79 """return sha1 hexdigest for a list"""
79 return util.sha1(str(items)).hexdigest()
80 return hashlib.sha1(str(items)).hexdigest()
80
81
81 # sensitive config sections affecting confighash
82 # sensitive config sections affecting confighash
82 _configsections = [
83 _configsections = [
@@ -91,6 +91,7 b' will disable itself if any of those are '
91
91
92 from __future__ import absolute_import
92 from __future__ import absolute_import
93
93
94 import hashlib
94 import os
95 import os
95 import stat
96 import stat
96 import sys
97 import sys
@@ -141,7 +142,7 b' def _hashignore(ignore):'
141 copy.
142 copy.
142
143
143 """
144 """
144 sha1 = util.sha1()
145 sha1 = hashlib.sha1()
145 if util.safehasattr(ignore, 'includepat'):
146 if util.safehasattr(ignore, 'includepat'):
146 sha1.update(ignore.includepat)
147 sha1.update(ignore.includepat)
147 sha1.update('\0\0')
148 sha1.update('\0\0')
@@ -10,6 +10,7 b''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import errno
12 import errno
13 import hashlib
13 import os
14 import os
14 import shutil
15 import shutil
15
16
@@ -229,7 +230,7 b' def _lfconvert_addchangeset(rsrc, rdst, '
229 raise error.Abort(_('largefile %s becomes symlink') % f)
230 raise error.Abort(_('largefile %s becomes symlink') % f)
230
231
231 # largefile was modified, update standins
232 # largefile was modified, update standins
232 m = util.sha1('')
233 m = hashlib.sha1('')
233 m.update(ctx[f].data())
234 m.update(ctx[f].data())
234 hash = m.hexdigest()
235 hash = m.hexdigest()
235 if f not in lfiletohash or lfiletohash[f] != hash:
236 if f not in lfiletohash or lfiletohash[f] != hash:
@@ -10,6 +10,7 b''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13 import hashlib
13 import os
14 import os
14 import platform
15 import platform
15 import stat
16 import stat
@@ -359,7 +360,7 b' def writestandin(repo, standin, hash, ex'
359 def copyandhash(instream, outfile):
360 def copyandhash(instream, outfile):
360 '''Read bytes from instream (iterable) and write them to outfile,
361 '''Read bytes from instream (iterable) and write them to outfile,
361 computing the SHA-1 hash of the data along the way. Return the hash.'''
362 computing the SHA-1 hash of the data along the way. Return the hash.'''
362 hasher = util.sha1('')
363 hasher = hashlib.sha1('')
363 for data in instream:
364 for data in instream:
364 hasher.update(data)
365 hasher.update(data)
365 outfile.write(data)
366 outfile.write(data)
@@ -371,7 +372,7 b' def hashrepofile(repo, file):'
371 def hashfile(file):
372 def hashfile(file):
372 if not os.path.exists(file):
373 if not os.path.exists(file):
373 return ''
374 return ''
374 hasher = util.sha1('')
375 hasher = hashlib.sha1('')
375 fd = open(file, 'rb')
376 fd = open(file, 'rb')
376 for data in util.filechunkiter(fd, 128 * 1024):
377 for data in util.filechunkiter(fd, 128 * 1024):
377 hasher.update(data)
378 hasher.update(data)
@@ -400,7 +401,7 b' def urljoin(first, second, *arg):'
400 def hexsha1(data):
401 def hexsha1(data):
401 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
402 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
402 object data"""
403 object data"""
403 h = util.sha1()
404 h = hashlib.sha1()
404 for chunk in util.filechunkiter(data):
405 for chunk in util.filechunkiter(data):
405 h.update(chunk)
406 h.update(chunk)
406 return h.hexdigest()
407 return h.hexdigest()
@@ -8,6 +8,7 b''
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11
12
12 from .i18n import _
13 from .i18n import _
13 from .node import (
14 from .node import (
@@ -1646,7 +1647,7 b' def check_heads(repo, their_heads, conte'
1646 Used by peer for unbundling.
1647 Used by peer for unbundling.
1647 """
1648 """
1648 heads = repo.heads()
1649 heads = repo.heads()
1649 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1650 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1650 if not (their_heads == ['force'] or their_heads == heads or
1651 if not (their_heads == ['force'] or their_heads == heads or
1651 their_heads == ['hashed', heads_hash]):
1652 their_heads == ['hashed', heads_hash]):
1652 # someone else committed/pushed/unbundled while we
1653 # someone else committed/pushed/unbundled while we
@@ -9,6 +9,7 b''
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import hashlib
12 import os
13 import os
13 import shutil
14 import shutil
14
15
@@ -480,7 +481,8 b' def clone(ui, peeropts, source, dest=Non'
480 ui.status(_('(not using pooled storage: '
481 ui.status(_('(not using pooled storage: '
481 'unable to resolve identity of remote)\n'))
482 'unable to resolve identity of remote)\n'))
482 elif sharenamemode == 'remote':
483 elif sharenamemode == 'remote':
483 sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
484 sharepath = os.path.join(
485 sharepool, hashlib.sha1(source).hexdigest())
484 else:
486 else:
485 raise error.Abort('unknown share naming mode: %s' % sharenamemode)
487 raise error.Abort('unknown share naming mode: %s' % sharenamemode)
486
488
@@ -110,6 +110,7 b' EXTRA ATTRIBUTES AND METHODS'
110 from __future__ import absolute_import, print_function
110 from __future__ import absolute_import, print_function
111
111
112 import errno
112 import errno
113 import hashlib
113 import httplib
114 import httplib
114 import socket
115 import socket
115 import sys
116 import sys
@@ -624,8 +625,7 b' def error_handler(url):'
624 keepalive_handler.close_all()
625 keepalive_handler.close_all()
625
626
626 def continuity(url):
627 def continuity(url):
627 from . import util
628 md5 = hashlib.md5
628 md5 = util.md5
629 format = '%25s: %s'
629 format = '%25s: %s'
630
630
631 # first fetch the file with the normal http handler
631 # first fetch the file with the normal http handler
@@ -8,6 +8,7 b''
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import inspect
12 import inspect
12 import os
13 import os
13 import random
14 import random
@@ -1013,7 +1014,7 b' class localrepository(object):'
1013 hint=_("run 'hg recover' to clean up transaction"))
1014 hint=_("run 'hg recover' to clean up transaction"))
1014
1015
1015 idbase = "%.40f#%f" % (random.random(), time.time())
1016 idbase = "%.40f#%f" % (random.random(), time.time())
1016 txnid = 'TXN:' + util.sha1(idbase).hexdigest()
1017 txnid = 'TXN:' + hashlib.sha1(idbase).hexdigest()
1017 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1018 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1018
1019
1019 self._writejournal(desc)
1020 self._writejournal(desc)
@@ -8,6 +8,7 b''
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import os
12 import os
12 import shutil
13 import shutil
13 import struct
14 import struct
@@ -408,7 +409,7 b' class mergestate(object):'
408 if fcl.isabsent():
409 if fcl.isabsent():
409 hash = nullhex
410 hash = nullhex
410 else:
411 else:
411 hash = util.sha1(fcl.path()).hexdigest()
412 hash = hashlib.sha1(fcl.path()).hexdigest()
412 self._repo.vfs.write('merge/' + hash, fcl.data())
413 self._repo.vfs.write('merge/' + hash, fcl.data())
413 self._state[fd] = ['u', hash, fcl.path(),
414 self._state[fd] = ['u', hash, fcl.path(),
414 fca.path(), hex(fca.filenode()),
415 fca.path(), hex(fca.filenode()),
@@ -12,6 +12,7 b' import collections'
12 import copy
12 import copy
13 import email
13 import email
14 import errno
14 import errno
15 import hashlib
15 import os
16 import os
16 import posixpath
17 import posixpath
17 import re
18 import re
@@ -2412,7 +2413,7 b' def trydiff(repo, revs, ctx1, ctx2, modi'
2412 if not text:
2413 if not text:
2413 text = ""
2414 text = ""
2414 l = len(text)
2415 l = len(text)
2415 s = util.sha1('blob %d\0' % l)
2416 s = hashlib.sha1('blob %d\0' % l)
2416 s.update(text)
2417 s.update(text)
2417 return s.hexdigest()
2418 return s.hexdigest()
2418
2419
@@ -9,6 +9,7 b''
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import hashlib
12
13
13 from .i18n import _
14 from .i18n import _
14 from .node import short
15 from .node import short
@@ -35,7 +36,7 b' def _bundle(repo, bases, heads, node, su'
35 # Include a hash of all the nodes in the filename for uniqueness
36 # Include a hash of all the nodes in the filename for uniqueness
36 allcommits = repo.set('%ln::%ln', bases, heads)
37 allcommits = repo.set('%ln::%ln', bases, heads)
37 allhashes = sorted(c.hex() for c in allcommits)
38 allhashes = sorted(c.hex() for c in allcommits)
38 totalhash = util.sha1(''.join(allhashes)).hexdigest()
39 totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
39 name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
40 name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
40
41
41 comp = None
42 comp = None
@@ -9,6 +9,7 b''
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import hashlib
12 import heapq
13 import heapq
13 import struct
14 import struct
14
15
@@ -18,7 +19,6 b' from . import ('
18 obsolete,
19 obsolete,
19 phases,
20 phases,
20 tags as tagsmod,
21 tags as tagsmod,
21 util,
22 )
22 )
23
23
24 def hideablerevs(repo):
24 def hideablerevs(repo):
@@ -102,7 +102,7 b' def cachehash(repo, hideable):'
102 it to the cache. Upon reading we can easily validate by checking the hash
102 it to the cache. Upon reading we can easily validate by checking the hash
103 against the stored one and discard the cache in case the hashes don't match.
103 against the stored one and discard the cache in case the hashes don't match.
104 """
104 """
105 h = util.sha1()
105 h = hashlib.sha1()
106 h.update(''.join(repo.heads()))
106 h.update(''.join(repo.heads()))
107 h.update(str(hash(frozenset(hideable))))
107 h.update(str(hash(frozenset(hideable))))
108 return h.digest()
108 return h.digest()
@@ -10,6 +10,7 b' from __future__ import absolute_import'
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import glob
12 import glob
13 import hashlib
13 import os
14 import os
14 import re
15 import re
15 import shutil
16 import shutil
@@ -224,7 +225,7 b' def filteredhash(repo, maxrev):'
224 key = None
225 key = None
225 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
226 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
226 if revs:
227 if revs:
227 s = util.sha1()
228 s = hashlib.sha1()
228 for rev in revs:
229 for rev in revs:
229 s.update('%s;' % rev)
230 s.update('%s;' % rev)
230 key = s.digest()
231 key = s.digest()
@@ -7,6 +7,8 b''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import hashlib
11
10 from .i18n import _
12 from .i18n import _
11 from . import (
13 from . import (
12 bdiff,
14 bdiff,
@@ -27,14 +29,14 b' def _findexactmatches(repo, added, remov'
27 for i, fctx in enumerate(removed):
29 for i, fctx in enumerate(removed):
28 repo.ui.progress(_('searching for exact renames'), i, total=numfiles,
30 repo.ui.progress(_('searching for exact renames'), i, total=numfiles,
29 unit=_('files'))
31 unit=_('files'))
30 h = util.sha1(fctx.data()).digest()
32 h = hashlib.sha1(fctx.data()).digest()
31 hashes[h] = fctx
33 hashes[h] = fctx
32
34
33 # For each added file, see if it corresponds to a removed file.
35 # For each added file, see if it corresponds to a removed file.
34 for i, fctx in enumerate(added):
36 for i, fctx in enumerate(added):
35 repo.ui.progress(_('searching for exact renames'), i + len(removed),
37 repo.ui.progress(_('searching for exact renames'), i + len(removed),
36 total=numfiles, unit=_('files'))
38 total=numfiles, unit=_('files'))
37 h = util.sha1(fctx.data()).digest()
39 h = hashlib.sha1(fctx.data()).digest()
38 if h in hashes:
40 if h in hashes:
39 yield (hashes[h], fctx)
41 yield (hashes[h], fctx)
40
42
@@ -9,6 +9,7 b''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import hashlib
12 import os
13 import os
13 import ssl
14 import ssl
14 import sys
15 import sys
@@ -388,9 +389,9 b' def validatesocket(sock):'
388 # If a certificate fingerprint is pinned, use it and only it to
389 # If a certificate fingerprint is pinned, use it and only it to
389 # validate the remote cert.
390 # validate the remote cert.
390 peerfingerprints = {
391 peerfingerprints = {
391 'sha1': util.sha1(peercert).hexdigest(),
392 'sha1': hashlib.sha1(peercert).hexdigest(),
392 'sha256': util.sha256(peercert).hexdigest(),
393 'sha256': hashlib.sha256(peercert).hexdigest(),
393 'sha512': util.sha512(peercert).hexdigest(),
394 'sha512': hashlib.sha512(peercert).hexdigest(),
394 }
395 }
395
396
396 def fmtfingerprint(s):
397 def fmtfingerprint(s):
@@ -9,6 +9,7 b' from __future__ import absolute_import'
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import os
13 import os
13 import posixpath
14 import posixpath
14 import re
15 import re
@@ -50,7 +51,7 b' def _expandedabspath(path):'
50
51
51 def _getstorehashcachename(remotepath):
52 def _getstorehashcachename(remotepath):
52 '''get a unique filename for the store hash cache of a remote repository'''
53 '''get a unique filename for the store hash cache of a remote repository'''
53 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
54 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
54
55
55 class SubrepoAbort(error.Abort):
56 class SubrepoAbort(error.Abort):
56 """Exception class used to avoid handling a subrepo error more than once"""
57 """Exception class used to avoid handling a subrepo error more than once"""
@@ -659,7 +660,7 b' class hgsubrepo(abstractsubrepo):'
659 yield '# %s\n' % _expandedabspath(remotepath)
660 yield '# %s\n' % _expandedabspath(remotepath)
660 vfs = self._repo.vfs
661 vfs = self._repo.vfs
661 for relname in filelist:
662 for relname in filelist:
662 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
663 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
663 yield '%s = %s\n' % (relname, filehash)
664 yield '%s = %s\n' % (relname, filehash)
664
665
665 @propertycache
666 @propertycache
@@ -7,6 +7,7 b''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import hashlib
10 import itertools
11 import itertools
11 import os
12 import os
12 import sys
13 import sys
@@ -410,7 +411,7 b' class wirepeer(peer.peerrepository):'
410
411
411 if heads != ['force'] and self.capable('unbundlehash'):
412 if heads != ['force'] and self.capable('unbundlehash'):
412 heads = encodelist(['hashed',
413 heads = encodelist(['hashed',
413 util.sha1(''.join(sorted(heads))).digest()])
414 hashlib.sha1(''.join(sorted(heads))).digest()])
414 else:
415 else:
415 heads = encodelist(heads)
416 heads = encodelist(heads)
416
417
General Comments 0
You need to be logged in to leave comments. Login now