##// END OF EJS Templates
core: migrate uses of hashlib.sha1 to hashutil.sha1...
Augie Fackler -
r44517:a61287a9 default
parent child Browse files
Show More
@@ -41,7 +41,6 Config
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import hashlib
45 import inspect
44 import inspect
46 import os
45 import os
47 import re
46 import re
@@ -67,6 +66,7 from . import (
67 )
66 )
68
67
69 from .utils import (
68 from .utils import (
69 hashutil,
70 procutil,
70 procutil,
71 stringutil,
71 stringutil,
72 )
72 )
@@ -74,7 +74,7 from .utils import (
74
74
75 def _hashlist(items):
75 def _hashlist(items):
76 """return sha1 hexdigest for a list"""
76 """return sha1 hexdigest for a list"""
77 return node.hex(hashlib.sha1(stringutil.pprint(items)).digest())
77 return node.hex(hashutil.sha1(stringutil.pprint(items)).digest())
78
78
79
79
80 # sensitive config sections affecting confighash
80 # sensitive config sections affecting confighash
@@ -8,7 +8,6
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import hashlib
12
11
13 from .i18n import _
12 from .i18n import _
14 from .node import (
13 from .node import (
@@ -40,7 +39,10 from . import (
40 wireprototypes,
39 wireprototypes,
41 )
40 )
42 from .interfaces import repository
41 from .interfaces import repository
43 from .utils import stringutil
42 from .utils import (
43 hashutil,
44 stringutil,
45 )
44
46
45 urlerr = util.urlerr
47 urlerr = util.urlerr
46 urlreq = util.urlreq
48 urlreq = util.urlreq
@@ -2705,7 +2707,7 def check_heads(repo, their_heads, conte
2705 Used by peer for unbundling.
2707 Used by peer for unbundling.
2706 """
2708 """
2707 heads = repo.heads()
2709 heads = repo.heads()
2708 heads_hash = hashlib.sha1(b''.join(sorted(heads))).digest()
2710 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2709 if not (
2711 if not (
2710 their_heads == [b'force']
2712 their_heads == [b'force']
2711 or their_heads == heads
2713 or their_heads == heads
@@ -9,7 +9,6
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import hashlib
13 import os
12 import os
14 import shutil
13 import shutil
15 import stat
14 import stat
@@ -48,7 +47,7 from . import (
48 verify as verifymod,
47 verify as verifymod,
49 vfs as vfsmod,
48 vfs as vfsmod,
50 )
49 )
51
50 from .utils import hashutil
52 from .interfaces import repository as repositorymod
51 from .interfaces import repository as repositorymod
53
52
54 release = lock.release
53 release = lock.release
@@ -738,7 +737,7 def clone(
738 )
737 )
739 elif sharenamemode == b'remote':
738 elif sharenamemode == b'remote':
740 sharepath = os.path.join(
739 sharepath = os.path.join(
741 sharepool, node.hex(hashlib.sha1(source).digest())
740 sharepool, node.hex(hashutil.sha1(source).digest())
742 )
741 )
743 else:
742 else:
744 raise error.Abort(
743 raise error.Abort(
@@ -8,7 +8,6
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
12 import os
11 import os
13 import random
12 import random
14 import sys
13 import sys
@@ -74,6 +73,7 from .interfaces import (
74 )
73 )
75
74
76 from .utils import (
75 from .utils import (
76 hashutil,
77 procutil,
77 procutil,
78 stringutil,
78 stringutil,
79 )
79 )
@@ -2007,7 +2007,7 class localrepository(object):
2007 )
2007 )
2008
2008
2009 idbase = b"%.40f#%f" % (random.random(), time.time())
2009 idbase = b"%.40f#%f" % (random.random(), time.time())
2010 ha = hex(hashlib.sha1(idbase).digest())
2010 ha = hex(hashutil.sha1(idbase).digest())
2011 txnid = b'TXN:' + ha
2011 txnid = b'TXN:' + ha
2012 self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid)
2012 self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid)
2013
2013
@@ -8,7 +8,6
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
12 import shutil
11 import shutil
13 import stat
12 import stat
14 import struct
13 import struct
@@ -39,6 +38,7 from . import (
39 util,
38 util,
40 worker,
39 worker,
41 )
40 )
41 from .utils import hashutil
42
42
43 _pack = struct.pack
43 _pack = struct.pack
44 _unpack = struct.unpack
44 _unpack = struct.unpack
@@ -512,7 +512,7 class mergestate(object):
512 """hash the path of a local file context for storage in the .hg/merge
512 """hash the path of a local file context for storage in the .hg/merge
513 directory."""
513 directory."""
514
514
515 return hex(hashlib.sha1(path).digest())
515 return hex(hashutil.sha1(path).digest())
516
516
517 def add(self, fcl, fco, fca, fd):
517 def add(self, fcl, fco, fca, fd):
518 """add a new (potentially?) conflicting file the merge state
518 """add a new (potentially?) conflicting file the merge state
@@ -70,7 +70,6 comment associated with each format for
70 from __future__ import absolute_import
70 from __future__ import absolute_import
71
71
72 import errno
72 import errno
73 import hashlib
74 import struct
73 import struct
75
74
76 from .i18n import _
75 from .i18n import _
@@ -85,7 +84,10 from . import (
85 pycompat,
84 pycompat,
86 util,
85 util,
87 )
86 )
88 from .utils import dateutil
87 from .utils import (
88 dateutil,
89 hashutil,
90 )
89
91
90 parsers = policy.importmod('parsers')
92 parsers = policy.importmod('parsers')
91
93
@@ -1028,7 +1030,7 def _computecontentdivergentset(repo):
1028
1030
1029 def makefoldid(relation, user):
1031 def makefoldid(relation, user):
1030
1032
1031 folddigest = hashlib.sha1(user)
1033 folddigest = hashutil.sha1(user)
1032 for p in relation[0] + relation[1]:
1034 for p in relation[0] + relation[1]:
1033 folddigest.update(b'%d' % p.rev())
1035 folddigest.update(b'%d' % p.rev())
1034 folddigest.update(p.node())
1036 folddigest.update(p.node())
@@ -12,7 +12,6 import collections
12 import contextlib
12 import contextlib
13 import copy
13 import copy
14 import errno
14 import errno
15 import hashlib
16 import os
15 import os
17 import re
16 import re
18 import shutil
17 import shutil
@@ -41,6 +40,7 from . import (
41 )
40 )
42 from .utils import (
41 from .utils import (
43 dateutil,
42 dateutil,
43 hashutil,
44 procutil,
44 procutil,
45 stringutil,
45 stringutil,
46 )
46 )
@@ -2943,7 +2943,7 def trydiff(
2943 if not text:
2943 if not text:
2944 text = b""
2944 text = b""
2945 l = len(text)
2945 l = len(text)
2946 s = hashlib.sha1(b'blob %d\0' % l)
2946 s = hashutil.sha1(b'blob %d\0' % l)
2947 s.update(text)
2947 s.update(text)
2948 return hex(s.digest())
2948 return hex(s.digest())
2949
2949
@@ -9,7 +9,6
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import hashlib
13
12
14 from .i18n import _
13 from .i18n import _
15 from .node import (
14 from .node import (
@@ -29,7 +28,10 from . import (
29 pycompat,
28 pycompat,
30 util,
29 util,
31 )
30 )
32 from .utils import stringutil
31 from .utils import (
32 hashutil,
33 stringutil,
34 )
33
35
34
36
35 def backupbundle(
37 def backupbundle(
@@ -45,7 +47,7 def backupbundle(
45 # Include a hash of all the nodes in the filename for uniqueness
47 # Include a hash of all the nodes in the filename for uniqueness
46 allcommits = repo.set(b'%ln::%ln', bases, heads)
48 allcommits = repo.set(b'%ln::%ln', bases, heads)
47 allhashes = sorted(c.hex() for c in allcommits)
49 allhashes = sorted(c.hex() for c in allcommits)
48 totalhash = hashlib.sha1(b''.join(allhashes)).digest()
50 totalhash = hashutil.sha1(b''.join(allhashes)).digest()
49 name = b"%s/%s-%s-%s.hg" % (
51 name = b"%s/%s-%s-%s.hg" % (
50 backupdir,
52 backupdir,
51 short(node),
53 short(node),
@@ -33,10 +33,10 the concept.
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import hashlib
37 import struct
36 import struct
38
37
39 from .. import error
38 from .. import error
39 from ..utils import hashutil
40
40
41 ## sidedata type constant
41 ## sidedata type constant
42 # reserve a block for testing purposes.
42 # reserve a block for testing purposes.
@@ -64,7 +64,7 def sidedatawriteprocessor(rl, text, sid
64 sidedata.sort()
64 sidedata.sort()
65 rawtext = [SIDEDATA_HEADER.pack(len(sidedata))]
65 rawtext = [SIDEDATA_HEADER.pack(len(sidedata))]
66 for key, value in sidedata:
66 for key, value in sidedata:
67 digest = hashlib.sha1(value).digest()
67 digest = hashutil.sha1(value).digest()
68 rawtext.append(SIDEDATA_ENTRY.pack(key, len(value), digest))
68 rawtext.append(SIDEDATA_ENTRY.pack(key, len(value), digest))
69 for key, value in sidedata:
69 for key, value in sidedata:
70 rawtext.append(value)
70 rawtext.append(value)
@@ -85,7 +85,7 def sidedatareadprocessor(rl, text):
85 # read the data associated with that entry
85 # read the data associated with that entry
86 nextdataoffset = dataoffset + size
86 nextdataoffset = dataoffset + size
87 entrytext = text[dataoffset:nextdataoffset]
87 entrytext = text[dataoffset:nextdataoffset]
88 readdigest = hashlib.sha1(entrytext).digest()
88 readdigest = hashutil.sha1(entrytext).digest()
89 if storeddigest != readdigest:
89 if storeddigest != readdigest:
90 raise error.SidedataHashError(key, storeddigest, readdigest)
90 raise error.SidedataHashError(key, storeddigest, readdigest)
91 sidedata[key] = entrytext
91 sidedata[key] = entrytext
@@ -9,7 +9,6 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
13 import os
12 import os
14 import posixpath
13 import posixpath
15 import re
14 import re
@@ -48,6 +47,7 from . import (
48 )
47 )
49
48
50 from .utils import (
49 from .utils import (
50 hashutil,
51 procutil,
51 procutil,
52 stringutil,
52 stringutil,
53 )
53 )
@@ -366,7 +366,7 def filteredhash(repo, maxrev):
366 key = None
366 key = None
367 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
368 if revs:
368 if revs:
369 s = hashlib.sha1()
369 s = hashutil.sha1()
370 for rev in revs:
370 for rev in revs:
371 s.update(b'%d;' % rev)
371 s.update(b'%d;' % rev)
372 key = s.digest()
372 key = s.digest()
@@ -7,7 +7,6
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import hashlib
11 import os
10 import os
12
11
13 from .i18n import _
12 from .i18n import _
@@ -24,6 +23,7 from . import (
24 scmutil,
23 scmutil,
25 util,
24 util,
26 )
25 )
26 from .utils import hashutil
27
27
28 # Whether sparse features are enabled. This variable is intended to be
28 # Whether sparse features are enabled. This variable is intended to be
29 # temporary to facilitate porting sparse to core. It should eventually be
29 # temporary to facilitate porting sparse to core. It should eventually be
@@ -205,12 +205,12 def configsignature(repo, includetemp=Tr
205 tempsignature = b'0'
205 tempsignature = b'0'
206
206
207 if signature is None or (includetemp and tempsignature is None):
207 if signature is None or (includetemp and tempsignature is None):
208 signature = hex(hashlib.sha1(repo.vfs.tryread(b'sparse')).digest())
208 signature = hex(hashutil.sha1(repo.vfs.tryread(b'sparse')).digest())
209 cache[b'signature'] = signature
209 cache[b'signature'] = signature
210
210
211 if includetemp:
211 if includetemp:
212 raw = repo.vfs.tryread(b'tempsparse')
212 raw = repo.vfs.tryread(b'tempsparse')
213 tempsignature = hex(hashlib.sha1(raw).digest())
213 tempsignature = hex(hashutil.sha1(raw).digest())
214 cache[b'tempsignature'] = tempsignature
214 cache[b'tempsignature'] = tempsignature
215
215
216 return b'%s %s' % (signature, tempsignature)
216 return b'%s %s' % (signature, tempsignature)
@@ -9,7 +9,6 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import functools
11 import functools
12 import hashlib
13 import os
12 import os
14 import stat
13 import stat
15
14
@@ -25,6 +24,7 from . import (
25 util,
24 util,
26 vfs as vfsmod,
25 vfs as vfsmod,
27 )
26 )
27 from .utils import hashutil
28
28
29 parsers = policy.importmod('parsers')
29 parsers = policy.importmod('parsers')
30 # how much bytes should be read from fncache in one read
30 # how much bytes should be read from fncache in one read
@@ -273,7 +273,7 def _auxencode(path, dotencode):
273
273
274
274
275 def _hashencode(path, dotencode):
275 def _hashencode(path, dotencode):
276 digest = node.hex(hashlib.sha1(path).digest())
276 digest = node.hex(hashutil.sha1(path).digest())
277 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
277 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
278 parts = _auxencode(le, dotencode)
278 parts = _auxencode(le, dotencode)
279 basename = parts[-1]
279 basename = parts[-1]
@@ -9,7 +9,6 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
13 import os
12 import os
14 import re
13 import re
15 import stat
14 import stat
@@ -37,6 +36,7 from . import (
37 )
36 )
38 from .utils import (
37 from .utils import (
39 dateutil,
38 dateutil,
39 hashutil,
40 procutil,
40 procutil,
41 stringutil,
41 stringutil,
42 )
42 )
@@ -61,7 +61,7 def _expandedabspath(path):
61
61
62 def _getstorehashcachename(remotepath):
62 def _getstorehashcachename(remotepath):
63 '''get a unique filename for the store hash cache of a remote repository'''
63 '''get a unique filename for the store hash cache of a remote repository'''
64 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64 return node.hex(hashutil.sha1(_expandedabspath(remotepath)).digest())[0:12]
65
65
66
66
67 class SubrepoAbort(error.Abort):
67 class SubrepoAbort(error.Abort):
@@ -514,7 +514,7 class hgsubrepo(abstractsubrepo):
514 yield b'# %s\n' % _expandedabspath(remotepath)
514 yield b'# %s\n' % _expandedabspath(remotepath)
515 vfs = self._repo.vfs
515 vfs = self._repo.vfs
516 for relname in filelist:
516 for relname in filelist:
517 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
517 filehash = node.hex(hashutil.sha1(vfs.tryread(relname)).digest())
518 yield b'%s = %s\n' % (relname, filehash)
518 yield b'%s = %s\n' % (relname, filehash)
519
519
520 @propertycache
520 @propertycache
@@ -53,6 +53,7 from . import (
53 )
53 )
54 from .utils import (
54 from .utils import (
55 compression,
55 compression,
56 hashutil,
56 procutil,
57 procutil,
57 stringutil,
58 stringutil,
58 )
59 )
@@ -197,7 +198,7 def nouideprecwarn(msg, version, stackle
197
198
198 DIGESTS = {
199 DIGESTS = {
199 b'md5': hashlib.md5,
200 b'md5': hashlib.md5,
200 b'sha1': hashlib.sha1,
201 b'sha1': hashutil.sha1,
201 b'sha512': hashlib.sha512,
202 b'sha512': hashlib.sha512,
202 }
203 }
203 # List of digest types from strongest to weakest
204 # List of digest types from strongest to weakest
@@ -7,7 +7,6
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import hashlib
11 import re
10 import re
12 import struct
11 import struct
13
12
@@ -24,8 +23,9 from .. import (
24 pycompat,
23 pycompat,
25 )
24 )
26 from ..interfaces import repository
25 from ..interfaces import repository
26 from ..utils import hashutil
27
27
28 _nullhash = hashlib.sha1(nullid)
28 _nullhash = hashutil.sha1(nullid)
29
29
30
30
31 def hashrevisionsha1(text, p1, p2):
31 def hashrevisionsha1(text, p1, p2):
@@ -48,7 +48,7 def hashrevisionsha1(text, p1, p2):
48 else:
48 else:
49 a = p2
49 a = p2
50 b = p1
50 b = p1
51 s = hashlib.sha1(a)
51 s = hashutil.sha1(a)
52 s.update(b)
52 s.update(b)
53 s.update(text)
53 s.update(text)
54 return s.digest()
54 return s.digest()
@@ -7,7 +7,6
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import hashlib
11 import sys
10 import sys
12 import weakref
11 import weakref
13
12
@@ -31,6 +30,7 from .interfaces import (
31 repository,
30 repository,
32 util as interfaceutil,
31 util as interfaceutil,
33 )
32 )
33 from .utils import hashutil
34
34
35 urlreq = util.urlreq
35 urlreq = util.urlreq
36
36
@@ -489,7 +489,7 class wirepeer(repository.peer):
489
489
490 if heads != [b'force'] and self.capable(b'unbundlehash'):
490 if heads != [b'force'] and self.capable(b'unbundlehash'):
491 heads = wireprototypes.encodelist(
491 heads = wireprototypes.encodelist(
492 [b'hashed', hashlib.sha1(b''.join(sorted(heads))).digest()]
492 [b'hashed', hashutil.sha1(b''.join(sorted(heads))).digest()]
493 )
493 )
494 else:
494 else:
495 heads = wireprototypes.encodelist(heads)
495 heads = wireprototypes.encodelist(heads)
@@ -8,7 +8,6 from __future__ import absolute_import
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import hashlib
12
11
13 from .i18n import _
12 from .i18n import _
14 from .node import (
13 from .node import (
@@ -31,6 +30,7 from . import (
31 from .interfaces import util as interfaceutil
30 from .interfaces import util as interfaceutil
32 from .utils import (
31 from .utils import (
33 cborutil,
32 cborutil,
33 hashutil,
34 stringutil,
34 stringutil,
35 )
35 )
36
36
@@ -858,7 +858,7 def makecommandcachekeyfn(command, local
858
858
859 cacher.adjustcachekeystate(state)
859 cacher.adjustcachekeystate(state)
860
860
861 hasher = hashlib.sha1()
861 hasher = hashutil.sha1()
862 for chunk in cborutil.streamencode(state):
862 for chunk in cborutil.streamencode(state):
863 hasher.update(chunk)
863 hasher.update(chunk)
864
864
General Comments 0
You need to be logged in to leave comments. Login now