##// END OF EJS Templates
cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1...
Augie Fackler -
r29341:0d83ad96 default
parent child Browse files
Show More
@@ -43,6 +43,7 from __future__ import absolute_import
43 43 import SocketServer
44 44 import errno
45 45 import gc
46 import hashlib
46 47 import inspect
47 48 import os
48 49 import random
@@ -76,7 +77,7 testedwith = 'internal'
76 77
77 78 def _hashlist(items):
78 79 """return sha1 hexdigest for a list"""
79 return util.sha1(str(items)).hexdigest()
80 return hashlib.sha1(str(items)).hexdigest()
80 81
81 82 # sensitive config sections affecting confighash
82 83 _configsections = [
@@ -91,6 +91,7 will disable itself if any of those are
91 91
92 92 from __future__ import absolute_import
93 93
94 import hashlib
94 95 import os
95 96 import stat
96 97 import sys
@@ -141,7 +142,7 def _hashignore(ignore):
141 142 copy.
142 143
143 144 """
144 sha1 = util.sha1()
145 sha1 = hashlib.sha1()
145 146 if util.safehasattr(ignore, 'includepat'):
146 147 sha1.update(ignore.includepat)
147 148 sha1.update('\0\0')
@@ -10,6 +10,7
10 10 from __future__ import absolute_import
11 11
12 12 import errno
13 import hashlib
13 14 import os
14 15 import shutil
15 16
@@ -229,7 +230,7 def _lfconvert_addchangeset(rsrc, rdst,
229 230 raise error.Abort(_('largefile %s becomes symlink') % f)
230 231
231 232 # largefile was modified, update standins
232 m = util.sha1('')
233 m = hashlib.sha1('')
233 234 m.update(ctx[f].data())
234 235 hash = m.hexdigest()
235 236 if f not in lfiletohash or lfiletohash[f] != hash:
@@ -10,6 +10,7
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 import hashlib
13 14 import os
14 15 import platform
15 16 import stat
@@ -359,7 +360,7 def writestandin(repo, standin, hash, ex
359 360 def copyandhash(instream, outfile):
360 361 '''Read bytes from instream (iterable) and write them to outfile,
361 362 computing the SHA-1 hash of the data along the way. Return the hash.'''
362 hasher = util.sha1('')
363 hasher = hashlib.sha1('')
363 364 for data in instream:
364 365 hasher.update(data)
365 366 outfile.write(data)
@@ -371,7 +372,7 def hashrepofile(repo, file):
371 372 def hashfile(file):
372 373 if not os.path.exists(file):
373 374 return ''
374 hasher = util.sha1('')
375 hasher = hashlib.sha1('')
375 376 fd = open(file, 'rb')
376 377 for data in util.filechunkiter(fd, 128 * 1024):
377 378 hasher.update(data)
@@ -400,7 +401,7 def urljoin(first, second, *arg):
400 401 def hexsha1(data):
401 402 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
402 403 object data"""
403 h = util.sha1()
404 h = hashlib.sha1()
404 405 for chunk in util.filechunkiter(data):
405 406 h.update(chunk)
406 407 return h.hexdigest()
@@ -8,6 +8,7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 import hashlib
11 12
12 13 from .i18n import _
13 14 from .node import (
@@ -1646,7 +1647,7 def check_heads(repo, their_heads, conte
1646 1647 Used by peer for unbundling.
1647 1648 """
1648 1649 heads = repo.heads()
1649 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1650 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1650 1651 if not (their_heads == ['force'] or their_heads == heads or
1651 1652 their_heads == ['hashed', heads_hash]):
1652 1653 # someone else committed/pushed/unbundled while we
@@ -9,6 +9,7
9 9 from __future__ import absolute_import
10 10
11 11 import errno
12 import hashlib
12 13 import os
13 14 import shutil
14 15
@@ -480,7 +481,8 def clone(ui, peeropts, source, dest=Non
480 481 ui.status(_('(not using pooled storage: '
481 482 'unable to resolve identity of remote)\n'))
482 483 elif sharenamemode == 'remote':
483 sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
484 sharepath = os.path.join(
485 sharepool, hashlib.sha1(source).hexdigest())
484 486 else:
485 487 raise error.Abort('unknown share naming mode: %s' % sharenamemode)
486 488
@@ -110,6 +110,7 EXTRA ATTRIBUTES AND METHODS
110 110 from __future__ import absolute_import, print_function
111 111
112 112 import errno
113 import hashlib
113 114 import httplib
114 115 import socket
115 116 import sys
@@ -624,8 +625,7 def error_handler(url):
624 625 keepalive_handler.close_all()
625 626
626 627 def continuity(url):
627 from . import util
628 md5 = util.md5
628 md5 = hashlib.md5
629 629 format = '%25s: %s'
630 630
631 631 # first fetch the file with the normal http handler
@@ -8,6 +8,7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 import hashlib
11 12 import inspect
12 13 import os
13 14 import random
@@ -1013,7 +1014,7 class localrepository(object):
1013 1014 hint=_("run 'hg recover' to clean up transaction"))
1014 1015
1015 1016 idbase = "%.40f#%f" % (random.random(), time.time())
1016 txnid = 'TXN:' + util.sha1(idbase).hexdigest()
1017 txnid = 'TXN:' + hashlib.sha1(idbase).hexdigest()
1017 1018 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1018 1019
1019 1020 self._writejournal(desc)
@@ -8,6 +8,7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 import hashlib
11 12 import os
12 13 import shutil
13 14 import struct
@@ -408,7 +409,7 class mergestate(object):
408 409 if fcl.isabsent():
409 410 hash = nullhex
410 411 else:
411 hash = util.sha1(fcl.path()).hexdigest()
412 hash = hashlib.sha1(fcl.path()).hexdigest()
412 413 self._repo.vfs.write('merge/' + hash, fcl.data())
413 414 self._state[fd] = ['u', hash, fcl.path(),
414 415 fca.path(), hex(fca.filenode()),
@@ -12,6 +12,7 import collections
12 12 import copy
13 13 import email
14 14 import errno
15 import hashlib
15 16 import os
16 17 import posixpath
17 18 import re
@@ -2412,7 +2413,7 def trydiff(repo, revs, ctx1, ctx2, modi
2412 2413 if not text:
2413 2414 text = ""
2414 2415 l = len(text)
2415 s = util.sha1('blob %d\0' % l)
2416 s = hashlib.sha1('blob %d\0' % l)
2416 2417 s.update(text)
2417 2418 return s.hexdigest()
2418 2419
@@ -9,6 +9,7
9 9 from __future__ import absolute_import
10 10
11 11 import errno
12 import hashlib
12 13
13 14 from .i18n import _
14 15 from .node import short
@@ -35,7 +36,7 def _bundle(repo, bases, heads, node, su
35 36 # Include a hash of all the nodes in the filename for uniqueness
36 37 allcommits = repo.set('%ln::%ln', bases, heads)
37 38 allhashes = sorted(c.hex() for c in allcommits)
38 totalhash = util.sha1(''.join(allhashes)).hexdigest()
39 totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
39 40 name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
40 41
41 42 comp = None
@@ -9,6 +9,7
9 9 from __future__ import absolute_import
10 10
11 11 import copy
12 import hashlib
12 13 import heapq
13 14 import struct
14 15
@@ -18,7 +19,6 from . import (
18 19 obsolete,
19 20 phases,
20 21 tags as tagsmod,
21 util,
22 22 )
23 23
24 24 def hideablerevs(repo):
@@ -102,7 +102,7 def cachehash(repo, hideable):
102 102 it to the cache. Upon reading we can easily validate by checking the hash
103 103 against the stored one and discard the cache in case the hashes don't match.
104 104 """
105 h = util.sha1()
105 h = hashlib.sha1()
106 106 h.update(''.join(repo.heads()))
107 107 h.update(str(hash(frozenset(hideable))))
108 108 return h.digest()
@@ -10,6 +10,7 from __future__ import absolute_import
10 10 import contextlib
11 11 import errno
12 12 import glob
13 import hashlib
13 14 import os
14 15 import re
15 16 import shutil
@@ -224,7 +225,7 def filteredhash(repo, maxrev):
224 225 key = None
225 226 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
226 227 if revs:
227 s = util.sha1()
228 s = hashlib.sha1()
228 229 for rev in revs:
229 230 s.update('%s;' % rev)
230 231 key = s.digest()
@@ -7,6 +7,8
7 7
8 8 from __future__ import absolute_import
9 9
10 import hashlib
11
10 12 from .i18n import _
11 13 from . import (
12 14 bdiff,
@@ -27,14 +29,14 def _findexactmatches(repo, added, remov
27 29 for i, fctx in enumerate(removed):
28 30 repo.ui.progress(_('searching for exact renames'), i, total=numfiles,
29 31 unit=_('files'))
30 h = util.sha1(fctx.data()).digest()
32 h = hashlib.sha1(fctx.data()).digest()
31 33 hashes[h] = fctx
32 34
33 35 # For each added file, see if it corresponds to a removed file.
34 36 for i, fctx in enumerate(added):
35 37 repo.ui.progress(_('searching for exact renames'), i + len(removed),
36 38 total=numfiles, unit=_('files'))
37 h = util.sha1(fctx.data()).digest()
39 h = hashlib.sha1(fctx.data()).digest()
38 40 if h in hashes:
39 41 yield (hashes[h], fctx)
40 42
@@ -9,6 +9,7
9 9
10 10 from __future__ import absolute_import
11 11
12 import hashlib
12 13 import os
13 14 import ssl
14 15 import sys
@@ -388,9 +389,9 def validatesocket(sock):
388 389 # If a certificate fingerprint is pinned, use it and only it to
389 390 # validate the remote cert.
390 391 peerfingerprints = {
391 'sha1': util.sha1(peercert).hexdigest(),
392 'sha256': util.sha256(peercert).hexdigest(),
393 'sha512': util.sha512(peercert).hexdigest(),
392 'sha1': hashlib.sha1(peercert).hexdigest(),
393 'sha256': hashlib.sha256(peercert).hexdigest(),
394 'sha512': hashlib.sha512(peercert).hexdigest(),
394 395 }
395 396
396 397 def fmtfingerprint(s):
@@ -9,6 +9,7 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 import hashlib
12 13 import os
13 14 import posixpath
14 15 import re
@@ -50,7 +51,7 def _expandedabspath(path):
50 51
51 52 def _getstorehashcachename(remotepath):
52 53 '''get a unique filename for the store hash cache of a remote repository'''
53 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
54 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
54 55
55 56 class SubrepoAbort(error.Abort):
56 57 """Exception class used to avoid handling a subrepo error more than once"""
@@ -659,7 +660,7 class hgsubrepo(abstractsubrepo):
659 660 yield '# %s\n' % _expandedabspath(remotepath)
660 661 vfs = self._repo.vfs
661 662 for relname in filelist:
662 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
663 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
663 664 yield '%s = %s\n' % (relname, filehash)
664 665
665 666 @propertycache
@@ -7,6 +7,7
7 7
8 8 from __future__ import absolute_import
9 9
10 import hashlib
10 11 import itertools
11 12 import os
12 13 import sys
@@ -410,7 +411,7 class wirepeer(peer.peerrepository):
410 411
411 412 if heads != ['force'] and self.capable('unbundlehash'):
412 413 heads = encodelist(['hashed',
413 util.sha1(''.join(sorted(heads))).digest()])
414 hashlib.sha1(''.join(sorted(heads))).digest()])
414 415 else:
415 416 heads = encodelist(heads)
416 417
General Comments 0
You need to be logged in to leave comments. Login now