Show More
@@ -41,7 +41,6 b' Config' | |||
|
41 | 41 | |
|
42 | 42 | from __future__ import absolute_import |
|
43 | 43 | |
|
44 | import hashlib | |
|
45 | 44 | import inspect |
|
46 | 45 | import os |
|
47 | 46 | import re |
@@ -67,6 +66,7 b' from . import (' | |||
|
67 | 66 | ) |
|
68 | 67 | |
|
69 | 68 | from .utils import ( |
|
69 | hashutil, | |
|
70 | 70 | procutil, |
|
71 | 71 | stringutil, |
|
72 | 72 | ) |
@@ -74,7 +74,7 b' from .utils import (' | |||
|
74 | 74 | |
|
75 | 75 | def _hashlist(items): |
|
76 | 76 | """return sha1 hexdigest for a list""" |
|
77 |
return node.hex(hashl |
|
|
77 | return node.hex(hashutil.sha1(stringutil.pprint(items)).digest()) | |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | # sensitive config sections affecting confighash |
@@ -8,7 +8,6 b'' | |||
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import collections |
|
11 | import hashlib | |
|
12 | 11 | |
|
13 | 12 | from .i18n import _ |
|
14 | 13 | from .node import ( |
@@ -40,7 +39,10 b' from . import (' | |||
|
40 | 39 | wireprototypes, |
|
41 | 40 | ) |
|
42 | 41 | from .interfaces import repository |
|
43 |
from .utils import |
|
|
42 | from .utils import ( | |
|
43 | hashutil, | |
|
44 | stringutil, | |
|
45 | ) | |
|
44 | 46 | |
|
45 | 47 | urlerr = util.urlerr |
|
46 | 48 | urlreq = util.urlreq |
@@ -2705,7 +2707,7 b' def check_heads(repo, their_heads, conte' | |||
|
2705 | 2707 | Used by peer for unbundling. |
|
2706 | 2708 | """ |
|
2707 | 2709 | heads = repo.heads() |
|
2708 |
heads_hash = hashl |
|
|
2710 | heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest() | |
|
2709 | 2711 | if not ( |
|
2710 | 2712 | their_heads == [b'force'] |
|
2711 | 2713 | or their_heads == heads |
@@ -9,7 +9,6 b'' | |||
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import errno |
|
12 | import hashlib | |
|
13 | 12 | import os |
|
14 | 13 | import shutil |
|
15 | 14 | import stat |
@@ -48,7 +47,7 b' from . import (' | |||
|
48 | 47 | verify as verifymod, |
|
49 | 48 | vfs as vfsmod, |
|
50 | 49 | ) |
|
51 | ||
|
50 | from .utils import hashutil | |
|
52 | 51 | from .interfaces import repository as repositorymod |
|
53 | 52 | |
|
54 | 53 | release = lock.release |
@@ -738,7 +737,7 b' def clone(' | |||
|
738 | 737 | ) |
|
739 | 738 | elif sharenamemode == b'remote': |
|
740 | 739 | sharepath = os.path.join( |
|
741 |
sharepool, node.hex(hashl |
|
|
740 | sharepool, node.hex(hashutil.sha1(source).digest()) | |
|
742 | 741 | ) |
|
743 | 742 | else: |
|
744 | 743 | raise error.Abort( |
@@ -8,7 +8,6 b'' | |||
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | import hashlib | |
|
12 | 11 | import os |
|
13 | 12 | import random |
|
14 | 13 | import sys |
@@ -74,6 +73,7 b' from .interfaces import (' | |||
|
74 | 73 | ) |
|
75 | 74 | |
|
76 | 75 | from .utils import ( |
|
76 | hashutil, | |
|
77 | 77 | procutil, |
|
78 | 78 | stringutil, |
|
79 | 79 | ) |
@@ -2007,7 +2007,7 b' class localrepository(object):' | |||
|
2007 | 2007 | ) |
|
2008 | 2008 | |
|
2009 | 2009 | idbase = b"%.40f#%f" % (random.random(), time.time()) |
|
2010 |
ha = hex(hashl |
|
|
2010 | ha = hex(hashutil.sha1(idbase).digest()) | |
|
2011 | 2011 | txnid = b'TXN:' + ha |
|
2012 | 2012 | self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid) |
|
2013 | 2013 |
@@ -8,7 +8,6 b'' | |||
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | import hashlib | |
|
12 | 11 | import shutil |
|
13 | 12 | import stat |
|
14 | 13 | import struct |
@@ -39,6 +38,7 b' from . import (' | |||
|
39 | 38 | util, |
|
40 | 39 | worker, |
|
41 | 40 | ) |
|
41 | from .utils import hashutil | |
|
42 | 42 | |
|
43 | 43 | _pack = struct.pack |
|
44 | 44 | _unpack = struct.unpack |
@@ -512,7 +512,7 b' class mergestate(object):' | |||
|
512 | 512 | """hash the path of a local file context for storage in the .hg/merge |
|
513 | 513 | directory.""" |
|
514 | 514 | |
|
515 |
return hex(hashl |
|
|
515 | return hex(hashutil.sha1(path).digest()) | |
|
516 | 516 | |
|
517 | 517 | def add(self, fcl, fco, fca, fd): |
|
518 | 518 | """add a new (potentially?) conflicting file the merge state |
@@ -70,7 +70,6 b' comment associated with each format for ' | |||
|
70 | 70 | from __future__ import absolute_import |
|
71 | 71 | |
|
72 | 72 | import errno |
|
73 | import hashlib | |
|
74 | 73 | import struct |
|
75 | 74 | |
|
76 | 75 | from .i18n import _ |
@@ -85,7 +84,10 b' from . import (' | |||
|
85 | 84 | pycompat, |
|
86 | 85 | util, |
|
87 | 86 | ) |
|
88 |
from .utils import |
|
|
87 | from .utils import ( | |
|
88 | dateutil, | |
|
89 | hashutil, | |
|
90 | ) | |
|
89 | 91 | |
|
90 | 92 | parsers = policy.importmod('parsers') |
|
91 | 93 | |
@@ -1028,7 +1030,7 b' def _computecontentdivergentset(repo):' | |||
|
1028 | 1030 | |
|
1029 | 1031 | def makefoldid(relation, user): |
|
1030 | 1032 | |
|
1031 |
folddigest = hashl |
|
|
1033 | folddigest = hashutil.sha1(user) | |
|
1032 | 1034 | for p in relation[0] + relation[1]: |
|
1033 | 1035 | folddigest.update(b'%d' % p.rev()) |
|
1034 | 1036 | folddigest.update(p.node()) |
@@ -12,7 +12,6 b' import collections' | |||
|
12 | 12 | import contextlib |
|
13 | 13 | import copy |
|
14 | 14 | import errno |
|
15 | import hashlib | |
|
16 | 15 | import os |
|
17 | 16 | import re |
|
18 | 17 | import shutil |
@@ -41,6 +40,7 b' from . import (' | |||
|
41 | 40 | ) |
|
42 | 41 | from .utils import ( |
|
43 | 42 | dateutil, |
|
43 | hashutil, | |
|
44 | 44 | procutil, |
|
45 | 45 | stringutil, |
|
46 | 46 | ) |
@@ -2943,7 +2943,7 b' def trydiff(' | |||
|
2943 | 2943 | if not text: |
|
2944 | 2944 | text = b"" |
|
2945 | 2945 | l = len(text) |
|
2946 |
s = hashl |
|
|
2946 | s = hashutil.sha1(b'blob %d\0' % l) | |
|
2947 | 2947 | s.update(text) |
|
2948 | 2948 | return hex(s.digest()) |
|
2949 | 2949 |
@@ -9,7 +9,6 b'' | |||
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import errno |
|
12 | import hashlib | |
|
13 | 12 | |
|
14 | 13 | from .i18n import _ |
|
15 | 14 | from .node import ( |
@@ -29,7 +28,10 b' from . import (' | |||
|
29 | 28 | pycompat, |
|
30 | 29 | util, |
|
31 | 30 | ) |
|
32 |
from .utils import |
|
|
31 | from .utils import ( | |
|
32 | hashutil, | |
|
33 | stringutil, | |
|
34 | ) | |
|
33 | 35 | |
|
34 | 36 | |
|
35 | 37 | def backupbundle( |
@@ -45,7 +47,7 b' def backupbundle(' | |||
|
45 | 47 | # Include a hash of all the nodes in the filename for uniqueness |
|
46 | 48 | allcommits = repo.set(b'%ln::%ln', bases, heads) |
|
47 | 49 | allhashes = sorted(c.hex() for c in allcommits) |
|
48 |
totalhash = hashl |
|
|
50 | totalhash = hashutil.sha1(b''.join(allhashes)).digest() | |
|
49 | 51 | name = b"%s/%s-%s-%s.hg" % ( |
|
50 | 52 | backupdir, |
|
51 | 53 | short(node), |
@@ -33,10 +33,10 b' the concept.' | |||
|
33 | 33 | |
|
34 | 34 | from __future__ import absolute_import |
|
35 | 35 | |
|
36 | import hashlib | |
|
37 | 36 | import struct |
|
38 | 37 | |
|
39 | 38 | from .. import error |
|
39 | from ..utils import hashutil | |
|
40 | 40 | |
|
41 | 41 | ## sidedata type constant |
|
42 | 42 | # reserve a block for testing purposes. |
@@ -64,7 +64,7 b' def sidedatawriteprocessor(rl, text, sid' | |||
|
64 | 64 | sidedata.sort() |
|
65 | 65 | rawtext = [SIDEDATA_HEADER.pack(len(sidedata))] |
|
66 | 66 | for key, value in sidedata: |
|
67 |
digest = hashl |
|
|
67 | digest = hashutil.sha1(value).digest() | |
|
68 | 68 | rawtext.append(SIDEDATA_ENTRY.pack(key, len(value), digest)) |
|
69 | 69 | for key, value in sidedata: |
|
70 | 70 | rawtext.append(value) |
@@ -85,7 +85,7 b' def sidedatareadprocessor(rl, text):' | |||
|
85 | 85 | # read the data associated with that entry |
|
86 | 86 | nextdataoffset = dataoffset + size |
|
87 | 87 | entrytext = text[dataoffset:nextdataoffset] |
|
88 |
readdigest = hashl |
|
|
88 | readdigest = hashutil.sha1(entrytext).digest() | |
|
89 | 89 | if storeddigest != readdigest: |
|
90 | 90 | raise error.SidedataHashError(key, storeddigest, readdigest) |
|
91 | 91 | sidedata[key] = entrytext |
@@ -9,7 +9,6 b' from __future__ import absolute_import' | |||
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | 11 | import glob |
|
12 | import hashlib | |
|
13 | 12 | import os |
|
14 | 13 | import posixpath |
|
15 | 14 | import re |
@@ -48,6 +47,7 b' from . import (' | |||
|
48 | 47 | ) |
|
49 | 48 | |
|
50 | 49 | from .utils import ( |
|
50 | hashutil, | |
|
51 | 51 | procutil, |
|
52 | 52 | stringutil, |
|
53 | 53 | ) |
@@ -366,7 +366,7 b' def filteredhash(repo, maxrev):' | |||
|
366 | 366 | key = None |
|
367 | 367 | revs = sorted(r for r in cl.filteredrevs if r <= maxrev) |
|
368 | 368 | if revs: |
|
369 |
s = hashl |
|
|
369 | s = hashutil.sha1() | |
|
370 | 370 | for rev in revs: |
|
371 | 371 | s.update(b'%d;' % rev) |
|
372 | 372 | key = s.digest() |
@@ -7,7 +7,6 b'' | |||
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | import hashlib | |
|
11 | 10 | import os |
|
12 | 11 | |
|
13 | 12 | from .i18n import _ |
@@ -24,6 +23,7 b' from . import (' | |||
|
24 | 23 | scmutil, |
|
25 | 24 | util, |
|
26 | 25 | ) |
|
26 | from .utils import hashutil | |
|
27 | 27 | |
|
28 | 28 | # Whether sparse features are enabled. This variable is intended to be |
|
29 | 29 | # temporary to facilitate porting sparse to core. It should eventually be |
@@ -205,12 +205,12 b' def configsignature(repo, includetemp=Tr' | |||
|
205 | 205 | tempsignature = b'0' |
|
206 | 206 | |
|
207 | 207 | if signature is None or (includetemp and tempsignature is None): |
|
208 |
signature = hex(hashl |
|
|
208 | signature = hex(hashutil.sha1(repo.vfs.tryread(b'sparse')).digest()) | |
|
209 | 209 | cache[b'signature'] = signature |
|
210 | 210 | |
|
211 | 211 | if includetemp: |
|
212 | 212 | raw = repo.vfs.tryread(b'tempsparse') |
|
213 |
tempsignature = hex(hashl |
|
|
213 | tempsignature = hex(hashutil.sha1(raw).digest()) | |
|
214 | 214 | cache[b'tempsignature'] = tempsignature |
|
215 | 215 | |
|
216 | 216 | return b'%s %s' % (signature, tempsignature) |
@@ -9,7 +9,6 b' from __future__ import absolute_import' | |||
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | 11 | import functools |
|
12 | import hashlib | |
|
13 | 12 | import os |
|
14 | 13 | import stat |
|
15 | 14 | |
@@ -25,6 +24,7 b' from . import (' | |||
|
25 | 24 | util, |
|
26 | 25 | vfs as vfsmod, |
|
27 | 26 | ) |
|
27 | from .utils import hashutil | |
|
28 | 28 | |
|
29 | 29 | parsers = policy.importmod('parsers') |
|
30 | 30 | # how much bytes should be read from fncache in one read |
@@ -273,7 +273,7 b' def _auxencode(path, dotencode):' | |||
|
273 | 273 | |
|
274 | 274 | |
|
275 | 275 | def _hashencode(path, dotencode): |
|
276 |
digest = node.hex(hashl |
|
|
276 | digest = node.hex(hashutil.sha1(path).digest()) | |
|
277 | 277 | le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/' |
|
278 | 278 | parts = _auxencode(le, dotencode) |
|
279 | 279 | basename = parts[-1] |
@@ -9,7 +9,6 b' from __future__ import absolute_import' | |||
|
9 | 9 | |
|
10 | 10 | import copy |
|
11 | 11 | import errno |
|
12 | import hashlib | |
|
13 | 12 | import os |
|
14 | 13 | import re |
|
15 | 14 | import stat |
@@ -37,6 +36,7 b' from . import (' | |||
|
37 | 36 | ) |
|
38 | 37 | from .utils import ( |
|
39 | 38 | dateutil, |
|
39 | hashutil, | |
|
40 | 40 | procutil, |
|
41 | 41 | stringutil, |
|
42 | 42 | ) |
@@ -61,7 +61,7 b' def _expandedabspath(path):' | |||
|
61 | 61 | |
|
62 | 62 | def _getstorehashcachename(remotepath): |
|
63 | 63 | '''get a unique filename for the store hash cache of a remote repository''' |
|
64 |
return node.hex(hashl |
|
|
64 | return node.hex(hashutil.sha1(_expandedabspath(remotepath)).digest())[0:12] | |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class SubrepoAbort(error.Abort): |
@@ -514,7 +514,7 b' class hgsubrepo(abstractsubrepo):' | |||
|
514 | 514 | yield b'# %s\n' % _expandedabspath(remotepath) |
|
515 | 515 | vfs = self._repo.vfs |
|
516 | 516 | for relname in filelist: |
|
517 |
filehash = node.hex(hashl |
|
|
517 | filehash = node.hex(hashutil.sha1(vfs.tryread(relname)).digest()) | |
|
518 | 518 | yield b'%s = %s\n' % (relname, filehash) |
|
519 | 519 | |
|
520 | 520 | @propertycache |
@@ -53,6 +53,7 b' from . import (' | |||
|
53 | 53 | ) |
|
54 | 54 | from .utils import ( |
|
55 | 55 | compression, |
|
56 | hashutil, | |
|
56 | 57 | procutil, |
|
57 | 58 | stringutil, |
|
58 | 59 | ) |
@@ -197,7 +198,7 b' def nouideprecwarn(msg, version, stackle' | |||
|
197 | 198 | |
|
198 | 199 | DIGESTS = { |
|
199 | 200 | b'md5': hashlib.md5, |
|
200 |
b'sha1': hashl |
|
|
201 | b'sha1': hashutil.sha1, | |
|
201 | 202 | b'sha512': hashlib.sha512, |
|
202 | 203 | } |
|
203 | 204 | # List of digest types from strongest to weakest |
@@ -7,7 +7,6 b'' | |||
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | import hashlib | |
|
11 | 10 | import re |
|
12 | 11 | import struct |
|
13 | 12 | |
@@ -24,8 +23,9 b' from .. import (' | |||
|
24 | 23 | pycompat, |
|
25 | 24 | ) |
|
26 | 25 | from ..interfaces import repository |
|
26 | from ..utils import hashutil | |
|
27 | 27 | |
|
28 |
_nullhash = hashl |
|
|
28 | _nullhash = hashutil.sha1(nullid) | |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def hashrevisionsha1(text, p1, p2): |
@@ -48,7 +48,7 b' def hashrevisionsha1(text, p1, p2):' | |||
|
48 | 48 | else: |
|
49 | 49 | a = p2 |
|
50 | 50 | b = p1 |
|
51 |
s = hashl |
|
|
51 | s = hashutil.sha1(a) | |
|
52 | 52 | s.update(b) |
|
53 | 53 | s.update(text) |
|
54 | 54 | return s.digest() |
@@ -7,7 +7,6 b'' | |||
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | import hashlib | |
|
11 | 10 | import sys |
|
12 | 11 | import weakref |
|
13 | 12 | |
@@ -31,6 +30,7 b' from .interfaces import (' | |||
|
31 | 30 | repository, |
|
32 | 31 | util as interfaceutil, |
|
33 | 32 | ) |
|
33 | from .utils import hashutil | |
|
34 | 34 | |
|
35 | 35 | urlreq = util.urlreq |
|
36 | 36 | |
@@ -489,7 +489,7 b' class wirepeer(repository.peer):' | |||
|
489 | 489 | |
|
490 | 490 | if heads != [b'force'] and self.capable(b'unbundlehash'): |
|
491 | 491 | heads = wireprototypes.encodelist( |
|
492 |
[b'hashed', hashl |
|
|
492 | [b'hashed', hashutil.sha1(b''.join(sorted(heads))).digest()] | |
|
493 | 493 | ) |
|
494 | 494 | else: |
|
495 | 495 | heads = wireprototypes.encodelist(heads) |
@@ -8,7 +8,6 b' from __future__ import absolute_import' | |||
|
8 | 8 | |
|
9 | 9 | import collections |
|
10 | 10 | import contextlib |
|
11 | import hashlib | |
|
12 | 11 | |
|
13 | 12 | from .i18n import _ |
|
14 | 13 | from .node import ( |
@@ -31,6 +30,7 b' from . import (' | |||
|
31 | 30 | from .interfaces import util as interfaceutil |
|
32 | 31 | from .utils import ( |
|
33 | 32 | cborutil, |
|
33 | hashutil, | |
|
34 | 34 | stringutil, |
|
35 | 35 | ) |
|
36 | 36 | |
@@ -858,7 +858,7 b' def makecommandcachekeyfn(command, local' | |||
|
858 | 858 | |
|
859 | 859 | cacher.adjustcachekeystate(state) |
|
860 | 860 | |
|
861 |
hasher = hashl |
|
|
861 | hasher = hashutil.sha1() | |
|
862 | 862 | for chunk in cborutil.streamencode(state): |
|
863 | 863 | hasher.update(chunk) |
|
864 | 864 |
General Comments 0
You need to be logged in to leave comments.
Login now