Show More
@@ -38,7 +38,6 b' import collections' | |||||
38 | from mercurial.i18n import _ |
|
38 | from mercurial.i18n import _ | |
39 | from mercurial.node import ( |
|
39 | from mercurial.node import ( | |
40 | hex, |
|
40 | hex, | |
41 | nullid, |
|
|||
42 | short, |
|
41 | short, | |
43 | ) |
|
42 | ) | |
44 | from mercurial import ( |
|
43 | from mercurial import ( | |
@@ -109,7 +108,7 b' class emptyfilecontext(object):' | |||||
109 | return b'' |
|
108 | return b'' | |
110 |
|
109 | |||
111 | def node(self): |
|
110 | def node(self): | |
112 | return nullid |
|
111 | return self._repo.nullid | |
113 |
|
112 | |||
114 |
|
113 | |||
115 | def uniq(lst): |
|
114 | def uniq(lst): | |
@@ -927,7 +926,7 b' class fixupstate(object):' | |||||
927 | the commit is a clone from ctx, with a (optionally) different p1, and |
|
926 | the commit is a clone from ctx, with a (optionally) different p1, and | |
928 | different file contents replaced by memworkingcopy. |
|
927 | different file contents replaced by memworkingcopy. | |
929 | """ |
|
928 | """ | |
930 | parents = p1 and (p1, nullid) |
|
929 | parents = p1 and (p1, self.repo.nullid) | |
931 | extra = ctx.extra() |
|
930 | extra = ctx.extra() | |
932 | if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'): |
|
931 | if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'): | |
933 | extra[b'absorb_source'] = ctx.hex() |
|
932 | extra[b'absorb_source'] = ctx.hex() |
@@ -9,7 +9,7 b' from __future__ import absolute_import' | |||||
9 | import os |
|
9 | import os | |
10 |
|
10 | |||
11 | from mercurial.i18n import _ |
|
11 | from mercurial.i18n import _ | |
12 |
from mercurial.node import |
|
12 | from mercurial.node import sha1nodeconstants | |
13 | from mercurial import ( |
|
13 | from mercurial import ( | |
14 | config, |
|
14 | config, | |
15 | error, |
|
15 | error, | |
@@ -192,7 +192,7 b' class convert_git(common.converter_sourc' | |||||
192 | return heads |
|
192 | return heads | |
193 |
|
193 | |||
194 | def catfile(self, rev, ftype): |
|
194 | def catfile(self, rev, ftype): | |
195 | if rev == nullhex: |
|
195 | if rev == sha1nodeconstants.nullhex: | |
196 | raise IOError |
|
196 | raise IOError | |
197 | self.catfilepipe[0].write(rev + b'\n') |
|
197 | self.catfilepipe[0].write(rev + b'\n') | |
198 | self.catfilepipe[0].flush() |
|
198 | self.catfilepipe[0].flush() | |
@@ -214,7 +214,7 b' class convert_git(common.converter_sourc' | |||||
214 | return data |
|
214 | return data | |
215 |
|
215 | |||
216 | def getfile(self, name, rev): |
|
216 | def getfile(self, name, rev): | |
217 | if rev == nullhex: |
|
217 | if rev == sha1nodeconstants.nullhex: | |
218 | return None, None |
|
218 | return None, None | |
219 | if name == b'.hgsub': |
|
219 | if name == b'.hgsub': | |
220 | data = b'\n'.join([m.hgsub() for m in self.submoditer()]) |
|
220 | data = b'\n'.join([m.hgsub() for m in self.submoditer()]) | |
@@ -228,7 +228,7 b' class convert_git(common.converter_sourc' | |||||
228 | return data, mode |
|
228 | return data, mode | |
229 |
|
229 | |||
230 | def submoditer(self): |
|
230 | def submoditer(self): | |
231 | null = nullhex |
|
231 | null = sha1nodeconstants.nullhex | |
232 | for m in sorted(self.submodules, key=lambda p: p.path): |
|
232 | for m in sorted(self.submodules, key=lambda p: p.path): | |
233 | if m.node != null: |
|
233 | if m.node != null: | |
234 | yield m |
|
234 | yield m | |
@@ -317,7 +317,7 b' class convert_git(common.converter_sourc' | |||||
317 | subexists[0] = True |
|
317 | subexists[0] = True | |
318 | if entry[4] == b'D' or renamesource: |
|
318 | if entry[4] == b'D' or renamesource: | |
319 | subdeleted[0] = True |
|
319 | subdeleted[0] = True | |
320 | changes.append((b'.hgsub', nullhex)) |
|
320 | changes.append((b'.hgsub', sha1nodeconstants.nullhex)) | |
321 | else: |
|
321 | else: | |
322 | changes.append((b'.hgsub', b'')) |
|
322 | changes.append((b'.hgsub', b'')) | |
323 | elif entry[1] == b'160000' or entry[0] == b':160000': |
|
323 | elif entry[1] == b'160000' or entry[0] == b':160000': | |
@@ -325,7 +325,7 b' class convert_git(common.converter_sourc' | |||||
325 | subexists[0] = True |
|
325 | subexists[0] = True | |
326 | else: |
|
326 | else: | |
327 | if renamesource: |
|
327 | if renamesource: | |
328 | h = nullhex |
|
328 | h = sha1nodeconstants.nullhex | |
329 | self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b"" |
|
329 | self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b"" | |
330 | changes.append((f, h)) |
|
330 | changes.append((f, h)) | |
331 |
|
331 | |||
@@ -362,7 +362,7 b' class convert_git(common.converter_sourc' | |||||
362 |
|
362 | |||
363 | if subexists[0]: |
|
363 | if subexists[0]: | |
364 | if subdeleted[0]: |
|
364 | if subdeleted[0]: | |
365 | changes.append((b'.hgsubstate', nullhex)) |
|
365 | changes.append((b'.hgsubstate', sha1nodeconstants.nullhex)) | |
366 | else: |
|
366 | else: | |
367 | self.retrievegitmodules(version) |
|
367 | self.retrievegitmodules(version) | |
368 | changes.append((b'.hgsubstate', b'')) |
|
368 | changes.append((b'.hgsubstate', b'')) |
@@ -27,8 +27,7 b' from mercurial.pycompat import open' | |||||
27 | from mercurial.node import ( |
|
27 | from mercurial.node import ( | |
28 | bin, |
|
28 | bin, | |
29 | hex, |
|
29 | hex, | |
30 | nullhex, |
|
30 | sha1nodeconstants, | |
31 | nullid, |
|
|||
32 | ) |
|
31 | ) | |
33 | from mercurial import ( |
|
32 | from mercurial import ( | |
34 | bookmarks, |
|
33 | bookmarks, | |
@@ -160,7 +159,7 b' class mercurial_sink(common.converter_si' | |||||
160 | continue |
|
159 | continue | |
161 | revid = revmap.get(source.lookuprev(s[0])) |
|
160 | revid = revmap.get(source.lookuprev(s[0])) | |
162 | if not revid: |
|
161 | if not revid: | |
163 | if s[0] == nullhex: |
|
162 | if s[0] == sha1nodeconstants.nullhex: | |
164 | revid = s[0] |
|
163 | revid = s[0] | |
165 | else: |
|
164 | else: | |
166 | # missing, but keep for hash stability |
|
165 | # missing, but keep for hash stability | |
@@ -179,7 +178,7 b' class mercurial_sink(common.converter_si' | |||||
179 |
|
178 | |||
180 | revid = s[0] |
|
179 | revid = s[0] | |
181 | subpath = s[1] |
|
180 | subpath = s[1] | |
182 | if revid != nullhex: |
|
181 | if revid != sha1nodeconstants.nullhex: | |
183 | revmap = self.subrevmaps.get(subpath) |
|
182 | revmap = self.subrevmaps.get(subpath) | |
184 | if revmap is None: |
|
183 | if revmap is None: | |
185 | revmap = mapfile( |
|
184 | revmap = mapfile( | |
@@ -304,9 +303,9 b' class mercurial_sink(common.converter_si' | |||||
304 | parent = parents[0] |
|
303 | parent = parents[0] | |
305 |
|
304 | |||
306 | if len(parents) < 2: |
|
305 | if len(parents) < 2: | |
307 | parents.append(nullid) |
|
306 | parents.append(self.repo.nullid) | |
308 | if len(parents) < 2: |
|
307 | if len(parents) < 2: | |
309 | parents.append(nullid) |
|
308 | parents.append(self.repo.nullid) | |
310 | p2 = parents.pop(0) |
|
309 | p2 = parents.pop(0) | |
311 |
|
310 | |||
312 | text = commit.desc |
|
311 | text = commit.desc | |
@@ -356,7 +355,7 b' class mercurial_sink(common.converter_si' | |||||
356 | p2 = parents.pop(0) |
|
355 | p2 = parents.pop(0) | |
357 | p1ctx = self.repo[p1] |
|
356 | p1ctx = self.repo[p1] | |
358 | p2ctx = None |
|
357 | p2ctx = None | |
359 | if p2 != nullid: |
|
358 | if p2 != self.repo.nullid: | |
360 | p2ctx = self.repo[p2] |
|
359 | p2ctx = self.repo[p2] | |
361 | fileset = set(files) |
|
360 | fileset = set(files) | |
362 | if full: |
|
361 | if full: | |
@@ -421,7 +420,7 b' class mercurial_sink(common.converter_si' | |||||
421 |
|
420 | |||
422 | def puttags(self, tags): |
|
421 | def puttags(self, tags): | |
423 | tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True) |
|
422 | tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True) | |
424 | tagparent = tagparent or nullid |
|
423 | tagparent = tagparent or self.repo.nullid | |
425 |
|
424 | |||
426 | oldlines = set() |
|
425 | oldlines = set() | |
427 | for branch, heads in pycompat.iteritems(self.repo.branchmap()): |
|
426 | for branch, heads in pycompat.iteritems(self.repo.branchmap()): |
@@ -4,7 +4,7 b' import contextlib' | |||||
4 | import errno |
|
4 | import errno | |
5 | import os |
|
5 | import os | |
6 |
|
6 | |||
7 |
from mercurial.node import |
|
7 | from mercurial.node import sha1nodeconstants | |
8 | from mercurial import ( |
|
8 | from mercurial import ( | |
9 | error, |
|
9 | error, | |
10 | extensions, |
|
10 | extensions, | |
@@ -81,14 +81,16 b' class gitdirstate(object):' | |||||
81 | except pygit2.GitError: |
|
81 | except pygit2.GitError: | |
82 | # Typically happens when peeling HEAD fails, as in an |
|
82 | # Typically happens when peeling HEAD fails, as in an | |
83 | # empty repository. |
|
83 | # empty repository. | |
84 | return nullid |
|
84 | return sha1nodeconstants.nullid | |
85 |
|
85 | |||
86 | def p2(self): |
|
86 | def p2(self): | |
87 | # TODO: MERGE_HEAD? something like that, right? |
|
87 | # TODO: MERGE_HEAD? something like that, right? | |
88 | return nullid |
|
88 | return sha1nodeconstants.nullid | |
89 |
|
89 | |||
90 |
def setparents(self, p1, p2= |
|
90 | def setparents(self, p1, p2=None): | |
91 | assert p2 == nullid, b'TODO merging support' |
|
91 | if p2 is None: | |
|
92 | p2 = sha1nodeconstants.nullid | |||
|
93 | assert p2 == sha1nodeconstants.nullid, b'TODO merging support' | |||
92 | self.git.head.set_target(gitutil.togitnode(p1)) |
|
94 | self.git.head.set_target(gitutil.togitnode(p1)) | |
93 |
|
95 | |||
94 | @util.propertycache |
|
96 | @util.propertycache | |
@@ -102,7 +104,7 b' class gitdirstate(object):' | |||||
102 |
|
104 | |||
103 | def parents(self): |
|
105 | def parents(self): | |
104 | # TODO how on earth do we find p2 if a merge is in flight? |
|
106 | # TODO how on earth do we find p2 if a merge is in flight? | |
105 | return self.p1(), nullid |
|
107 | return self.p1(), sha1nodeconstants.nullid | |
106 |
|
108 | |||
107 | def __iter__(self): |
|
109 | def __iter__(self): | |
108 | return (pycompat.fsencode(f.path) for f in self.git.index) |
|
110 | return (pycompat.fsencode(f.path) for f in self.git.index) |
@@ -5,11 +5,8 b' from mercurial.i18n import _' | |||||
5 | from mercurial.node import ( |
|
5 | from mercurial.node import ( | |
6 | bin, |
|
6 | bin, | |
7 | hex, |
|
7 | hex, | |
8 | nullhex, |
|
|||
9 | nullid, |
|
|||
10 | nullrev, |
|
8 | nullrev, | |
11 | sha1nodeconstants, |
|
9 | sha1nodeconstants, | |
12 | wdirhex, |
|
|||
13 | ) |
|
10 | ) | |
14 | from mercurial import ( |
|
11 | from mercurial import ( | |
15 | ancestor, |
|
12 | ancestor, | |
@@ -47,7 +44,7 b' class baselog(object): # revlog.revlog)' | |||||
47 | ) |
|
44 | ) | |
48 |
|
45 | |||
49 | def rev(self, n): |
|
46 | def rev(self, n): | |
50 | if n == nullid: |
|
47 | if n == sha1nodeconstants.nullid: | |
51 | return -1 |
|
48 | return -1 | |
52 | t = self._db.execute( |
|
49 | t = self._db.execute( | |
53 | 'SELECT rev FROM changelog WHERE node = ?', (gitutil.togitnode(n),) |
|
50 | 'SELECT rev FROM changelog WHERE node = ?', (gitutil.togitnode(n),) | |
@@ -58,7 +55,7 b' class baselog(object): # revlog.revlog)' | |||||
58 |
|
55 | |||
59 | def node(self, r): |
|
56 | def node(self, r): | |
60 | if r == nullrev: |
|
57 | if r == nullrev: | |
61 | return nullid |
|
58 | return sha1nodeconstants.nullid | |
62 | t = self._db.execute( |
|
59 | t = self._db.execute( | |
63 | 'SELECT node FROM changelog WHERE rev = ?', (r,) |
|
60 | 'SELECT node FROM changelog WHERE rev = ?', (r,) | |
64 | ).fetchone() |
|
61 | ).fetchone() | |
@@ -134,7 +131,7 b' class changelog(baselog):' | |||||
134 | bin(v[0]): v[1] |
|
131 | bin(v[0]): v[1] | |
135 | for v in self._db.execute('SELECT node, rev FROM changelog') |
|
132 | for v in self._db.execute('SELECT node, rev FROM changelog') | |
136 | } |
|
133 | } | |
137 | r[nullid] = nullrev |
|
134 | r[sha1nodeconstants.nullid] = nullrev | |
138 | return r |
|
135 | return r | |
139 |
|
136 | |||
140 | def tip(self): |
|
137 | def tip(self): | |
@@ -143,7 +140,7 b' class changelog(baselog):' | |||||
143 | ).fetchone() |
|
140 | ).fetchone() | |
144 | if t: |
|
141 | if t: | |
145 | return bin(t[0]) |
|
142 | return bin(t[0]) | |
146 | return nullid |
|
143 | return sha1nodeconstants.nullid | |
147 |
|
144 | |||
148 | def revs(self, start=0, stop=None): |
|
145 | def revs(self, start=0, stop=None): | |
149 | if stop is None: |
|
146 | if stop is None: | |
@@ -163,7 +160,7 b' class changelog(baselog):' | |||||
163 | return next(t) |
|
160 | return next(t) | |
164 |
|
161 | |||
165 | def _partialmatch(self, id): |
|
162 | def _partialmatch(self, id): | |
166 | if wdirhex.startswith(id): |
|
163 | if sha1nodeconstants.wdirhex.startswith(id): | |
167 | raise error.WdirUnsupported |
|
164 | raise error.WdirUnsupported | |
168 | candidates = [ |
|
165 | candidates = [ | |
169 | bin(x[0]) |
|
166 | bin(x[0]) | |
@@ -171,8 +168,8 b' class changelog(baselog):' | |||||
171 | 'SELECT node FROM changelog WHERE node LIKE ?', (id + b'%',) |
|
168 | 'SELECT node FROM changelog WHERE node LIKE ?', (id + b'%',) | |
172 | ) |
|
169 | ) | |
173 | ] |
|
170 | ] | |
174 | if nullhex.startswith(id): |
|
171 | if sha1nodeconstants.nullhex.startswith(id): | |
175 | candidates.append(nullid) |
|
172 | candidates.append(sha1nodeconstants.nullid) | |
176 | if len(candidates) > 1: |
|
173 | if len(candidates) > 1: | |
177 | raise error.AmbiguousPrefixLookupError( |
|
174 | raise error.AmbiguousPrefixLookupError( | |
178 | id, b'00changelog.i', _(b'ambiguous identifier') |
|
175 | id, b'00changelog.i', _(b'ambiguous identifier') | |
@@ -217,8 +214,10 b' class changelog(baselog):' | |||||
217 | else: |
|
214 | else: | |
218 | n = nodeorrev |
|
215 | n = nodeorrev | |
219 | # handle looking up nullid |
|
216 | # handle looking up nullid | |
220 | if n == nullid: |
|
217 | if n == sha1nodeconstants.nullid: | |
221 |
return hgchangelog._changelogrevision( |
|
218 | return hgchangelog._changelogrevision( | |
|
219 | extra={}, manifest=sha1nodeconstants.nullid | |||
|
220 | ) | |||
222 | hn = gitutil.togitnode(n) |
|
221 | hn = gitutil.togitnode(n) | |
223 | # We've got a real commit! |
|
222 | # We've got a real commit! | |
224 | files = [ |
|
223 | files = [ | |
@@ -234,7 +233,7 b' class changelog(baselog):' | |||||
234 | for r in self._db.execute( |
|
233 | for r in self._db.execute( | |
235 | 'SELECT filename FROM changedfiles ' |
|
234 | 'SELECT filename FROM changedfiles ' | |
236 | 'WHERE node = ? and filenode = ?', |
|
235 | 'WHERE node = ? and filenode = ?', | |
237 | (hn, nullhex), |
|
236 | (hn, sha1nodeconstants.nullhex), | |
238 | ) |
|
237 | ) | |
239 | ] |
|
238 | ] | |
240 | c = self.gitrepo[hn] |
|
239 | c = self.gitrepo[hn] | |
@@ -295,7 +294,7 b' class changelog(baselog):' | |||||
295 | not supplied, uses all of the revlog's heads. If common is not |
|
294 | not supplied, uses all of the revlog's heads. If common is not | |
296 | supplied, uses nullid.""" |
|
295 | supplied, uses nullid.""" | |
297 | if common is None: |
|
296 | if common is None: | |
298 | common = [nullid] |
|
297 | common = [sha1nodeconstants.nullid] | |
299 | if heads is None: |
|
298 | if heads is None: | |
300 | heads = self.heads() |
|
299 | heads = self.heads() | |
301 |
|
300 | |||
@@ -394,9 +393,9 b' class changelog(baselog):' | |||||
394 | ): |
|
393 | ): | |
395 | parents = [] |
|
394 | parents = [] | |
396 | hp1, hp2 = gitutil.togitnode(p1), gitutil.togitnode(p2) |
|
395 | hp1, hp2 = gitutil.togitnode(p1), gitutil.togitnode(p2) | |
397 | if p1 != nullid: |
|
396 | if p1 != sha1nodeconstants.nullid: | |
398 | parents.append(hp1) |
|
397 | parents.append(hp1) | |
399 | if p2 and p2 != nullid: |
|
398 | if p2 and p2 != sha1nodeconstants.nullid: | |
400 | parents.append(hp2) |
|
399 | parents.append(hp2) | |
401 | assert date is not None |
|
400 | assert date is not None | |
402 | timestamp, tz = date |
|
401 | timestamp, tz = date | |
@@ -429,7 +428,7 b' class manifestlog(baselog):' | |||||
429 | return self.get(b'', node) |
|
428 | return self.get(b'', node) | |
430 |
|
429 | |||
431 | def get(self, relpath, node): |
|
430 | def get(self, relpath, node): | |
432 | if node == nullid: |
|
431 | if node == sha1nodeconstants.nullid: | |
433 | # TODO: this should almost certainly be a memgittreemanifestctx |
|
432 | # TODO: this should almost certainly be a memgittreemanifestctx | |
434 | return manifest.memtreemanifestctx(self, relpath) |
|
433 | return manifest.memtreemanifestctx(self, relpath) | |
435 | commit = self.gitrepo[gitutil.togitnode(node)] |
|
434 | commit = self.gitrepo[gitutil.togitnode(node)] | |
@@ -448,9 +447,10 b' class filelog(baselog):' | |||||
448 | super(filelog, self).__init__(gr, db) |
|
447 | super(filelog, self).__init__(gr, db) | |
449 | assert isinstance(path, bytes) |
|
448 | assert isinstance(path, bytes) | |
450 | self.path = path |
|
449 | self.path = path | |
|
450 | self.nullid = sha1nodeconstants.nullid | |||
451 |
|
451 | |||
452 | def read(self, node): |
|
452 | def read(self, node): | |
453 | if node == nullid: |
|
453 | if node == sha1nodeconstants.nullid: | |
454 | return b'' |
|
454 | return b'' | |
455 | return self.gitrepo[gitutil.togitnode(node)].data |
|
455 | return self.gitrepo[gitutil.togitnode(node)].data | |
456 |
|
456 |
@@ -1,7 +1,7 b'' | |||||
1 | """utilities to assist in working with pygit2""" |
|
1 | """utilities to assist in working with pygit2""" | |
2 | from __future__ import absolute_import |
|
2 | from __future__ import absolute_import | |
3 |
|
3 | |||
4 |
from mercurial.node import bin, hex, |
|
4 | from mercurial.node import bin, hex, sha1nodeconstants | |
5 |
|
5 | |||
6 | from mercurial import pycompat |
|
6 | from mercurial import pycompat | |
7 |
|
7 | |||
@@ -50,4 +50,4 b' def fromgitnode(n):' | |||||
50 | return bin(n) |
|
50 | return bin(n) | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | nullgit = togitnode(nullid) |
|
53 | nullgit = togitnode(sha1nodeconstants.nullid) |
@@ -5,10 +5,7 b' import os' | |||||
5 | import sqlite3 |
|
5 | import sqlite3 | |
6 |
|
6 | |||
7 | from mercurial.i18n import _ |
|
7 | from mercurial.i18n import _ | |
8 |
from mercurial.node import |
|
8 | from mercurial.node import sha1nodeconstants | |
9 | nullhex, |
|
|||
10 | nullid, |
|
|||
11 | ) |
|
|||
12 |
|
9 | |||
13 | from mercurial import ( |
|
10 | from mercurial import ( | |
14 | encoding, |
|
11 | encoding, | |
@@ -281,7 +278,7 b' def _index_repo(' | |||||
281 | for pos, commit in enumerate(walker): |
|
278 | for pos, commit in enumerate(walker): | |
282 | if prog is not None: |
|
279 | if prog is not None: | |
283 | prog.update(pos) |
|
280 | prog.update(pos) | |
284 | p1 = p2 = nullhex |
|
281 | p1 = p2 = sha1nodeconstants.nullhex | |
285 | if len(commit.parents) > 2: |
|
282 | if len(commit.parents) > 2: | |
286 | raise error.ProgrammingError( |
|
283 | raise error.ProgrammingError( | |
287 | ( |
|
284 | ( | |
@@ -318,7 +315,9 b' def _index_repo(' | |||||
318 | ) |
|
315 | ) | |
319 | new_files = (p.delta.new_file for p in patchgen) |
|
316 | new_files = (p.delta.new_file for p in patchgen) | |
320 | files = { |
|
317 | files = { | |
321 |
nf.path: nf.id.hex |
|
318 | nf.path: nf.id.hex | |
|
319 | for nf in new_files | |||
|
320 | if nf.id.raw != sha1nodeconstants.nullid | |||
322 | } |
|
321 | } | |
323 | for p, n in files.items(): |
|
322 | for p, n in files.items(): | |
324 | # We intentionally set NULLs for any file parentage |
|
323 | # We intentionally set NULLs for any file parentage |
@@ -14,7 +14,6 b' from mercurial.i18n import _' | |||||
14 | from mercurial.node import ( |
|
14 | from mercurial.node import ( | |
15 | bin, |
|
15 | bin, | |
16 | hex, |
|
16 | hex, | |
17 | nullid, |
|
|||
18 | short, |
|
17 | short, | |
19 | ) |
|
18 | ) | |
20 | from mercurial import ( |
|
19 | from mercurial import ( | |
@@ -314,7 +313,9 b' def _dosign(ui, repo, *revs, **opts):' | |||||
314 | if revs: |
|
313 | if revs: | |
315 | nodes = [repo.lookup(n) for n in revs] |
|
314 | nodes = [repo.lookup(n) for n in revs] | |
316 | else: |
|
315 | else: | |
317 | nodes = [node for node in repo.dirstate.parents() if node != nullid] |
|
316 | nodes = [ | |
|
317 | node for node in repo.dirstate.parents() if node != repo.nullid | |||
|
318 | ] | |||
318 | if len(nodes) > 1: |
|
319 | if len(nodes) > 1: | |
319 | raise error.Abort( |
|
320 | raise error.Abort( | |
320 | _(b'uncommitted merge - please provide a specific revision') |
|
321 | _(b'uncommitted merge - please provide a specific revision') |
@@ -40,7 +40,6 b' import os' | |||||
40 |
|
40 | |||
41 | from mercurial.i18n import _ |
|
41 | from mercurial.i18n import _ | |
42 | from mercurial.node import ( |
|
42 | from mercurial.node import ( | |
43 | nullid, |
|
|||
44 | nullrev, |
|
43 | nullrev, | |
45 | short, |
|
44 | short, | |
46 | ) |
|
45 | ) | |
@@ -95,7 +94,7 b' def difftree(ui, repo, node1=None, node2' | |||||
95 | mmap2 = repo[node2].manifest() |
|
94 | mmap2 = repo[node2].manifest() | |
96 | m = scmutil.match(repo[node1], files) |
|
95 | m = scmutil.match(repo[node1], files) | |
97 | st = repo.status(node1, node2, m) |
|
96 | st = repo.status(node1, node2, m) | |
98 | empty = short(nullid) |
|
97 | empty = short(repo.nullid) | |
99 |
|
98 | |||
100 | for f in st.modified: |
|
99 | for f in st.modified: | |
101 | # TODO get file permissions |
|
100 | # TODO get file permissions | |
@@ -317,9 +316,9 b' def revtree(ui, args, repo, full=b"tree"' | |||||
317 | parentstr = b"" |
|
316 | parentstr = b"" | |
318 | if parents: |
|
317 | if parents: | |
319 | pp = repo.changelog.parents(n) |
|
318 | pp = repo.changelog.parents(n) | |
320 | if pp[0] != nullid: |
|
319 | if pp[0] != repo.nullid: | |
321 | parentstr += b" " + short(pp[0]) |
|
320 | parentstr += b" " + short(pp[0]) | |
322 | if pp[1] != nullid: |
|
321 | if pp[1] != repo.nullid: | |
323 | parentstr += b" " + short(pp[1]) |
|
322 | parentstr += b" " + short(pp[1]) | |
324 | if not full: |
|
323 | if not full: | |
325 | ui.write(b"%s%s\n" % (short(n), parentstr)) |
|
324 | ui.write(b"%s%s\n" % (short(n), parentstr)) |
@@ -22,7 +22,6 b' from mercurial.i18n import _' | |||||
22 | from mercurial.node import ( |
|
22 | from mercurial.node import ( | |
23 | bin, |
|
23 | bin, | |
24 | hex, |
|
24 | hex, | |
25 | nullid, |
|
|||
26 | ) |
|
25 | ) | |
27 |
|
26 | |||
28 | from mercurial import ( |
|
27 | from mercurial import ( | |
@@ -117,8 +116,8 b' def recorddirstateparents(dirstate, old,' | |||||
117 | new = list(new) |
|
116 | new = list(new) | |
118 | if util.safehasattr(dirstate, 'journalstorage'): |
|
117 | if util.safehasattr(dirstate, 'journalstorage'): | |
119 | # only record two hashes if there was a merge |
|
118 | # only record two hashes if there was a merge | |
120 | oldhashes = old[:1] if old[1] == nullid else old |
|
119 | oldhashes = old[:1] if old[1] == dirstate._nodeconstants.nullid else old | |
121 | newhashes = new[:1] if new[1] == nullid else new |
|
120 | newhashes = new[:1] if new[1] == dirstate._nodeconstants.nullid else new | |
122 | dirstate.journalstorage.record( |
|
121 | dirstate.journalstorage.record( | |
123 | wdirparenttype, b'.', oldhashes, newhashes |
|
122 | wdirparenttype, b'.', oldhashes, newhashes | |
124 | ) |
|
123 | ) | |
@@ -131,7 +130,7 b' def recordbookmarks(orig, store, fp):' | |||||
131 | if util.safehasattr(repo, 'journal'): |
|
130 | if util.safehasattr(repo, 'journal'): | |
132 | oldmarks = bookmarks.bmstore(repo) |
|
131 | oldmarks = bookmarks.bmstore(repo) | |
133 | for mark, value in pycompat.iteritems(store): |
|
132 | for mark, value in pycompat.iteritems(store): | |
134 | oldvalue = oldmarks.get(mark, nullid) |
|
133 | oldvalue = oldmarks.get(mark, repo.nullid) | |
135 | if value != oldvalue: |
|
134 | if value != oldvalue: | |
136 | repo.journal.record(bookmarktype, mark, oldvalue, value) |
|
135 | repo.journal.record(bookmarktype, mark, oldvalue, value) | |
137 | return orig(store, fp) |
|
136 | return orig(store, fp) |
@@ -11,7 +11,8 b' from __future__ import absolute_import' | |||||
11 |
|
11 | |||
12 | from mercurial.i18n import _ |
|
12 | from mercurial.i18n import _ | |
13 |
|
13 | |||
14 |
from mercurial import |
|
14 | from mercurial.node import short | |
|
15 | from mercurial import util | |||
15 | from mercurial.utils import ( |
|
16 | from mercurial.utils import ( | |
16 | urlutil, |
|
17 | urlutil, | |
17 | ) |
|
18 | ) | |
@@ -137,7 +138,7 b' class basestore(object):' | |||||
137 | filestocheck = [] # list of (cset, filename, expectedhash) |
|
138 | filestocheck = [] # list of (cset, filename, expectedhash) | |
138 | for rev in revs: |
|
139 | for rev in revs: | |
139 | cctx = self.repo[rev] |
|
140 | cctx = self.repo[rev] | |
140 |
cset = b"%d:%s" % (cctx.rev(), |
|
141 | cset = b"%d:%s" % (cctx.rev(), short(cctx.node())) | |
141 |
|
142 | |||
142 | for standin in cctx: |
|
143 | for standin in cctx: | |
143 | filename = lfutil.splitstandin(standin) |
|
144 | filename = lfutil.splitstandin(standin) |
@@ -17,7 +17,6 b' from mercurial.i18n import _' | |||||
17 | from mercurial.node import ( |
|
17 | from mercurial.node import ( | |
18 | bin, |
|
18 | bin, | |
19 | hex, |
|
19 | hex, | |
20 | nullid, |
|
|||
21 | ) |
|
20 | ) | |
22 |
|
21 | |||
23 | from mercurial import ( |
|
22 | from mercurial import ( | |
@@ -115,7 +114,7 b' def lfconvert(ui, src, dest, *pats, **op' | |||||
115 | rsrc[ctx] |
|
114 | rsrc[ctx] | |
116 | for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0] |
|
115 | for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0] | |
117 | ) |
|
116 | ) | |
118 | revmap = {nullid: nullid} |
|
117 | revmap = {rsrc.nullid: rdst.nullid} | |
119 | if tolfile: |
|
118 | if tolfile: | |
120 | # Lock destination to prevent modification while it is converted to. |
|
119 | # Lock destination to prevent modification while it is converted to. | |
121 | # Don't need to lock src because we are just reading from its |
|
120 | # Don't need to lock src because we are just reading from its | |
@@ -340,7 +339,7 b' def _commitcontext(rdst, parents, ctx, d' | |||||
340 | # Generate list of changed files |
|
339 | # Generate list of changed files | |
341 | def _getchangedfiles(ctx, parents): |
|
340 | def _getchangedfiles(ctx, parents): | |
342 | files = set(ctx.files()) |
|
341 | files = set(ctx.files()) | |
343 | if nullid not in parents: |
|
342 | if ctx.repo().nullid not in parents: | |
344 | mc = ctx.manifest() |
|
343 | mc = ctx.manifest() | |
345 | for pctx in ctx.parents(): |
|
344 | for pctx in ctx.parents(): | |
346 | for fn in pctx.manifest().diff(mc): |
|
345 | for fn in pctx.manifest().diff(mc): | |
@@ -354,7 +353,7 b' def _convertparents(ctx, revmap):' | |||||
354 | for p in ctx.parents(): |
|
353 | for p in ctx.parents(): | |
355 | parents.append(revmap[p.node()]) |
|
354 | parents.append(revmap[p.node()]) | |
356 | while len(parents) < 2: |
|
355 | while len(parents) < 2: | |
357 | parents.append(nullid) |
|
356 | parents.append(ctx.repo().nullid) | |
358 | return parents |
|
357 | return parents | |
359 |
|
358 | |||
360 |
|
359 |
@@ -15,10 +15,7 b' import os' | |||||
15 | import stat |
|
15 | import stat | |
16 |
|
16 | |||
17 | from mercurial.i18n import _ |
|
17 | from mercurial.i18n import _ | |
18 |
from mercurial.node import |
|
18 | from mercurial.node import hex | |
19 | hex, |
|
|||
20 | nullid, |
|
|||
21 | ) |
|
|||
22 | from mercurial.pycompat import open |
|
19 | from mercurial.pycompat import open | |
23 |
|
20 | |||
24 | from mercurial import ( |
|
21 | from mercurial import ( | |
@@ -613,7 +610,7 b' def getlfilestoupload(repo, missing, add' | |||||
613 | ) as progress: |
|
610 | ) as progress: | |
614 | for i, n in enumerate(missing): |
|
611 | for i, n in enumerate(missing): | |
615 | progress.update(i) |
|
612 | progress.update(i) | |
616 | parents = [p for p in repo[n].parents() if p != nullid] |
|
613 | parents = [p for p in repo[n].parents() if p != repo.nullid] | |
617 |
|
614 | |||
618 | with lfstatus(repo, value=False): |
|
615 | with lfstatus(repo, value=False): | |
619 | ctx = repo[n] |
|
616 | ctx = repo[n] |
@@ -10,7 +10,7 b' from __future__ import absolute_import' | |||||
10 | import hashlib |
|
10 | import hashlib | |
11 |
|
11 | |||
12 | from mercurial.i18n import _ |
|
12 | from mercurial.i18n import _ | |
13 |
from mercurial.node import bin, hex, |
|
13 | from mercurial.node import bin, hex, short | |
14 | from mercurial.pycompat import ( |
|
14 | from mercurial.pycompat import ( | |
15 | getattr, |
|
15 | getattr, | |
16 | setattr, |
|
16 | setattr, | |
@@ -158,7 +158,7 b' def _islfs(rlog, node=None, rev=None):' | |||||
158 | rev = rlog.rev(node) |
|
158 | rev = rlog.rev(node) | |
159 | else: |
|
159 | else: | |
160 | node = rlog.node(rev) |
|
160 | node = rlog.node(rev) | |
161 | if node == nullid: |
|
161 | if node == rlog.nullid: | |
162 | return False |
|
162 | return False | |
163 | flags = rlog.flags(rev) |
|
163 | flags = rlog.flags(rev) | |
164 | return bool(flags & revlog.REVIDX_EXTSTORED) |
|
164 | return bool(flags & revlog.REVIDX_EXTSTORED) |
@@ -73,7 +73,6 b' from mercurial.i18n import _' | |||||
73 | from mercurial.node import ( |
|
73 | from mercurial.node import ( | |
74 | bin, |
|
74 | bin, | |
75 | hex, |
|
75 | hex, | |
76 | nullid, |
|
|||
77 | nullrev, |
|
76 | nullrev, | |
78 | short, |
|
77 | short, | |
79 | ) |
|
78 | ) | |
@@ -908,13 +907,13 b' class queue(object):' | |||||
908 | """ |
|
907 | """ | |
909 | if rev is None: |
|
908 | if rev is None: | |
910 | (p1, p2) = repo.dirstate.parents() |
|
909 | (p1, p2) = repo.dirstate.parents() | |
911 | if p2 == nullid: |
|
910 | if p2 == repo.nullid: | |
912 | return p1 |
|
911 | return p1 | |
913 | if not self.applied: |
|
912 | if not self.applied: | |
914 | return None |
|
913 | return None | |
915 | return self.applied[-1].node |
|
914 | return self.applied[-1].node | |
916 | p1, p2 = repo.changelog.parents(rev) |
|
915 | p1, p2 = repo.changelog.parents(rev) | |
917 | if p2 != nullid and p2 in [x.node for x in self.applied]: |
|
916 | if p2 != repo.nullid and p2 in [x.node for x in self.applied]: | |
918 | return p2 |
|
917 | return p2 | |
919 | return p1 |
|
918 | return p1 | |
920 |
|
919 | |||
@@ -1591,7 +1590,7 b' class queue(object):' | |||||
1591 | for hs in repo.branchmap().iterheads(): |
|
1590 | for hs in repo.branchmap().iterheads(): | |
1592 | heads.extend(hs) |
|
1591 | heads.extend(hs) | |
1593 | if not heads: |
|
1592 | if not heads: | |
1594 | heads = [nullid] |
|
1593 | heads = [repo.nullid] | |
1595 | if repo.dirstate.p1() not in heads and not exact: |
|
1594 | if repo.dirstate.p1() not in heads and not exact: | |
1596 | self.ui.status(_(b"(working directory not at a head)\n")) |
|
1595 | self.ui.status(_(b"(working directory not at a head)\n")) | |
1597 |
|
1596 | |||
@@ -1857,7 +1856,7 b' class queue(object):' | |||||
1857 | fctx = ctx[f] |
|
1856 | fctx = ctx[f] | |
1858 | repo.wwrite(f, fctx.data(), fctx.flags()) |
|
1857 | repo.wwrite(f, fctx.data(), fctx.flags()) | |
1859 | repo.dirstate.normal(f) |
|
1858 | repo.dirstate.normal(f) | |
1860 | repo.setparents(qp, nullid) |
|
1859 | repo.setparents(qp, repo.nullid) | |
1861 | for patch in reversed(self.applied[start:end]): |
|
1860 | for patch in reversed(self.applied[start:end]): | |
1862 | self.ui.status(_(b"popping %s\n") % patch.name) |
|
1861 | self.ui.status(_(b"popping %s\n") % patch.name) | |
1863 | del self.applied[start:end] |
|
1862 | del self.applied[start:end] |
@@ -11,7 +11,6 b' import errno' | |||||
11 | import struct |
|
11 | import struct | |
12 |
|
12 | |||
13 | from mercurial.i18n import _ |
|
13 | from mercurial.i18n import _ | |
14 | from mercurial.node import nullid |
|
|||
15 | from mercurial import ( |
|
14 | from mercurial import ( | |
16 | bundle2, |
|
15 | bundle2, | |
17 | changegroup, |
|
16 | changegroup, | |
@@ -94,7 +93,7 b' def generateellipsesbundle2(' | |||||
94 | raise error.Abort(_(b'depth must be positive, got %d') % depth) |
|
93 | raise error.Abort(_(b'depth must be positive, got %d') % depth) | |
95 |
|
94 | |||
96 | heads = set(heads or repo.heads()) |
|
95 | heads = set(heads or repo.heads()) | |
97 | common = set(common or [nullid]) |
|
96 | common = set(common or [repo.nullid]) | |
98 |
|
97 | |||
99 | visitnodes, relevant_nodes, ellipsisroots = exchange._computeellipsis( |
|
98 | visitnodes, relevant_nodes, ellipsisroots = exchange._computeellipsis( | |
100 | repo, common, heads, set(), match, depth=depth |
|
99 | repo, common, heads, set(), match, depth=depth | |
@@ -128,7 +127,7 b' def generate_ellipses_bundle2_for_wideni' | |||||
128 | common, |
|
127 | common, | |
129 | known, |
|
128 | known, | |
130 | ): |
|
129 | ): | |
131 | common = set(common or [nullid]) |
|
130 | common = set(common or [repo.nullid]) | |
132 | # Steps: |
|
131 | # Steps: | |
133 | # 1. Send kill for "$known & ::common" |
|
132 | # 1. Send kill for "$known & ::common" | |
134 | # |
|
133 | # |
@@ -12,7 +12,6 b' import os' | |||||
12 | from mercurial.i18n import _ |
|
12 | from mercurial.i18n import _ | |
13 | from mercurial.node import ( |
|
13 | from mercurial.node import ( | |
14 | hex, |
|
14 | hex, | |
15 | nullid, |
|
|||
16 | short, |
|
15 | short, | |
17 | ) |
|
16 | ) | |
18 | from mercurial import ( |
|
17 | from mercurial import ( | |
@@ -193,7 +192,7 b' def pullbundle2extraprepare(orig, pullop' | |||||
193 | kwargs[b'known'] = [ |
|
192 | kwargs[b'known'] = [ | |
194 | hex(ctx.node()) |
|
193 | hex(ctx.node()) | |
195 | for ctx in repo.set(b'::%ln', pullop.common) |
|
194 | for ctx in repo.set(b'::%ln', pullop.common) | |
196 | if ctx.node() != nullid |
|
195 | if ctx.node() != repo.nullid | |
197 | ] |
|
196 | ] | |
198 | if not kwargs[b'known']: |
|
197 | if not kwargs[b'known']: | |
199 | # Mercurial serializes an empty list as '' and deserializes it as |
|
198 | # Mercurial serializes an empty list as '' and deserializes it as | |
@@ -370,7 +369,7 b' def _widen(' | |||||
370 | ds = repo.dirstate |
|
369 | ds = repo.dirstate | |
371 | p1, p2 = ds.p1(), ds.p2() |
|
370 | p1, p2 = ds.p1(), ds.p2() | |
372 | with ds.parentchange(): |
|
371 | with ds.parentchange(): | |
373 | ds.setparents(nullid, nullid) |
|
372 | ds.setparents(repo.nullid, repo.nullid) | |
374 | if isoldellipses: |
|
373 | if isoldellipses: | |
375 | with wrappedextraprepare: |
|
374 | with wrappedextraprepare: | |
376 | exchange.pull(repo, remote, heads=common) |
|
375 | exchange.pull(repo, remote, heads=common) | |
@@ -380,7 +379,7 b' def _widen(' | |||||
380 | known = [ |
|
379 | known = [ | |
381 | ctx.node() |
|
380 | ctx.node() | |
382 | for ctx in repo.set(b'::%ln', common) |
|
381 | for ctx in repo.set(b'::%ln', common) | |
383 | if ctx.node() != nullid |
|
382 | if ctx.node() != repo.nullid | |
384 | ] |
|
383 | ] | |
385 | with remote.commandexecutor() as e: |
|
384 | with remote.commandexecutor() as e: | |
386 | bundle = e.callcommand( |
|
385 | bundle = e.callcommand( |
@@ -69,7 +69,7 b' import operator' | |||||
69 | import re |
|
69 | import re | |
70 | import time |
|
70 | import time | |
71 |
|
71 | |||
72 |
from mercurial.node import bin, |
|
72 | from mercurial.node import bin, short | |
73 | from mercurial.i18n import _ |
|
73 | from mercurial.i18n import _ | |
74 | from mercurial.pycompat import getattr |
|
74 | from mercurial.pycompat import getattr | |
75 | from mercurial.thirdparty import attr |
|
75 | from mercurial.thirdparty import attr | |
@@ -586,7 +586,7 b' def getoldnodedrevmap(repo, nodelist):' | |||||
586 | tags.tag( |
|
586 | tags.tag( | |
587 | repo, |
|
587 | repo, | |
588 | tagname, |
|
588 | tagname, | |
589 | nullid, |
|
589 | repo.nullid, | |
590 | message=None, |
|
590 | message=None, | |
591 | user=None, |
|
591 | user=None, | |
592 | date=None, |
|
592 | date=None, | |
@@ -1606,7 +1606,7 b' def phabsend(ui, repo, *revs, **opts):' | |||||
1606 | tags.tag( |
|
1606 | tags.tag( | |
1607 | repo, |
|
1607 | repo, | |
1608 | tagname, |
|
1608 | tagname, | |
1609 | nullid, |
|
1609 | repo.nullid, | |
1610 | message=None, |
|
1610 | message=None, | |
1611 | user=None, |
|
1611 | user=None, | |
1612 | date=None, |
|
1612 | date=None, |
@@ -2,7 +2,10 b' from __future__ import absolute_import' | |||||
2 |
|
2 | |||
3 | import threading |
|
3 | import threading | |
4 |
|
4 | |||
5 |
from mercurial.node import |
|
5 | from mercurial.node import ( | |
|
6 | hex, | |||
|
7 | sha1nodeconstants, | |||
|
8 | ) | |||
6 | from mercurial.pycompat import getattr |
|
9 | from mercurial.pycompat import getattr | |
7 | from mercurial import ( |
|
10 | from mercurial import ( | |
8 | mdiff, |
|
11 | mdiff, | |
@@ -55,7 +58,7 b' class unioncontentstore(basestore.baseun' | |||||
55 | """ |
|
58 | """ | |
56 | chain = self.getdeltachain(name, node) |
|
59 | chain = self.getdeltachain(name, node) | |
57 |
|
60 | |||
58 | if chain[-1][ChainIndicies.BASENODE] != nullid: |
|
61 | if chain[-1][ChainIndicies.BASENODE] != sha1nodeconstants.nullid: | |
59 | # If we didn't receive a full chain, throw |
|
62 | # If we didn't receive a full chain, throw | |
60 | raise KeyError((name, hex(node))) |
|
63 | raise KeyError((name, hex(node))) | |
61 |
|
64 | |||
@@ -92,7 +95,7 b' class unioncontentstore(basestore.baseun' | |||||
92 | deltabasenode. |
|
95 | deltabasenode. | |
93 | """ |
|
96 | """ | |
94 | chain = self._getpartialchain(name, node) |
|
97 | chain = self._getpartialchain(name, node) | |
95 | while chain[-1][ChainIndicies.BASENODE] != nullid: |
|
98 | while chain[-1][ChainIndicies.BASENODE] != sha1nodeconstants.nullid: | |
96 | x, x, deltabasename, deltabasenode, x = chain[-1] |
|
99 | x, x, deltabasename, deltabasenode, x = chain[-1] | |
97 | try: |
|
100 | try: | |
98 | morechain = self._getpartialchain(deltabasename, deltabasenode) |
|
101 | morechain = self._getpartialchain(deltabasename, deltabasenode) | |
@@ -187,7 +190,12 b' class remotefilelogcontentstore(basestor' | |||||
187 | # Since remotefilelog content stores only contain full texts, just |
|
190 | # Since remotefilelog content stores only contain full texts, just | |
188 | # return that. |
|
191 | # return that. | |
189 | revision = self.get(name, node) |
|
192 | revision = self.get(name, node) | |
190 | return revision, name, nullid, self.getmeta(name, node) |
|
193 | return ( | |
|
194 | revision, | |||
|
195 | name, | |||
|
196 | sha1nodeconstants.nullid, | |||
|
197 | self.getmeta(name, node), | |||
|
198 | ) | |||
191 |
|
199 | |||
192 | def getdeltachain(self, name, node): |
|
200 | def getdeltachain(self, name, node): | |
193 | # Since remotefilelog content stores just contain full texts, we return |
|
201 | # Since remotefilelog content stores just contain full texts, we return | |
@@ -195,7 +203,7 b' class remotefilelogcontentstore(basestor' | |||||
195 | # The nullid in the deltabasenode slot indicates that the revision is a |
|
203 | # The nullid in the deltabasenode slot indicates that the revision is a | |
196 | # fulltext. |
|
204 | # fulltext. | |
197 | revision = self.get(name, node) |
|
205 | revision = self.get(name, node) | |
198 | return [(name, node, None, nullid, revision)] |
|
206 | return [(name, node, None, sha1nodeconstants.nullid, revision)] | |
199 |
|
207 | |||
200 | def getmeta(self, name, node): |
|
208 | def getmeta(self, name, node): | |
201 | self._sanitizemetacache() |
|
209 | self._sanitizemetacache() | |
@@ -237,7 +245,12 b' class remotecontentstore(object):' | |||||
237 |
|
245 | |||
238 | def getdelta(self, name, node): |
|
246 | def getdelta(self, name, node): | |
239 | revision = self.get(name, node) |
|
247 | revision = self.get(name, node) | |
240 | return revision, name, nullid, self._shared.getmeta(name, node) |
|
248 | return ( | |
|
249 | revision, | |||
|
250 | name, | |||
|
251 | sha1nodeconstants.nullid, | |||
|
252 | self._shared.getmeta(name, node), | |||
|
253 | ) | |||
241 |
|
254 | |||
242 | def getdeltachain(self, name, node): |
|
255 | def getdeltachain(self, name, node): | |
243 | # Since our remote content stores just contain full texts, we return a |
|
256 | # Since our remote content stores just contain full texts, we return a | |
@@ -245,7 +258,7 b' class remotecontentstore(object):' | |||||
245 | # The nullid in the deltabasenode slot indicates that the revision is a |
|
258 | # The nullid in the deltabasenode slot indicates that the revision is a | |
246 | # fulltext. |
|
259 | # fulltext. | |
247 | revision = self.get(name, node) |
|
260 | revision = self.get(name, node) | |
248 | return [(name, node, None, nullid, revision)] |
|
261 | return [(name, node, None, sha1nodeconstants.nullid, revision)] | |
249 |
|
262 | |||
250 | def getmeta(self, name, node): |
|
263 | def getmeta(self, name, node): | |
251 | self._fileservice.prefetch( |
|
264 | self._fileservice.prefetch( | |
@@ -276,11 +289,11 b' class manifestrevlogstore(object):' | |||||
276 |
|
289 | |||
277 | def getdelta(self, name, node): |
|
290 | def getdelta(self, name, node): | |
278 | revision = self.get(name, node) |
|
291 | revision = self.get(name, node) | |
279 | return revision, name, nullid, self.getmeta(name, node) |
|
292 | return revision, name, self._cl.nullid, self.getmeta(name, node) | |
280 |
|
293 | |||
281 | def getdeltachain(self, name, node): |
|
294 | def getdeltachain(self, name, node): | |
282 | revision = self.get(name, node) |
|
295 | revision = self.get(name, node) | |
283 | return [(name, node, None, nullid, revision)] |
|
296 | return [(name, node, None, self._cl.nullid, revision)] | |
284 |
|
297 | |||
285 | def getmeta(self, name, node): |
|
298 | def getmeta(self, name, node): | |
286 | rl = self._revlog(name) |
|
299 | rl = self._revlog(name) | |
@@ -304,9 +317,9 b' class manifestrevlogstore(object):' | |||||
304 | missing.discard(ancnode) |
|
317 | missing.discard(ancnode) | |
305 |
|
318 | |||
306 | p1, p2 = rl.parents(ancnode) |
|
319 | p1, p2 = rl.parents(ancnode) | |
307 | if p1 != nullid and p1 not in known: |
|
320 | if p1 != self._cl.nullid and p1 not in known: | |
308 | missing.add(p1) |
|
321 | missing.add(p1) | |
309 | if p2 != nullid and p2 not in known: |
|
322 | if p2 != self._cl.nullid and p2 not in known: | |
310 | missing.add(p2) |
|
323 | missing.add(p2) | |
311 |
|
324 | |||
312 | linknode = self._cl.node(rl.linkrev(ancrev)) |
|
325 | linknode = self._cl.node(rl.linkrev(ancrev)) |
@@ -3,7 +3,10 b' from __future__ import absolute_import' | |||||
3 | import struct |
|
3 | import struct | |
4 | import zlib |
|
4 | import zlib | |
5 |
|
5 | |||
6 |
from mercurial.node import |
|
6 | from mercurial.node import ( | |
|
7 | hex, | |||
|
8 | sha1nodeconstants, | |||
|
9 | ) | |||
7 | from mercurial.i18n import _ |
|
10 | from mercurial.i18n import _ | |
8 | from mercurial import ( |
|
11 | from mercurial import ( | |
9 | pycompat, |
|
12 | pycompat, | |
@@ -458,7 +461,7 b' class mutabledatapack(basepack.mutableba' | |||||
458 | rawindex = b'' |
|
461 | rawindex = b'' | |
459 | fmt = self.INDEXFORMAT |
|
462 | fmt = self.INDEXFORMAT | |
460 | for node, deltabase, offset, size in entries: |
|
463 | for node, deltabase, offset, size in entries: | |
461 | if deltabase == nullid: |
|
464 | if deltabase == sha1nodeconstants.nullid: | |
462 | deltabaselocation = FULLTEXTINDEXMARK |
|
465 | deltabaselocation = FULLTEXTINDEXMARK | |
463 | else: |
|
466 | else: | |
464 | # Instead of storing the deltabase node in the index, let's |
|
467 | # Instead of storing the deltabase node in the index, let's |
@@ -12,7 +12,7 b' import zlib' | |||||
12 | from mercurial.node import ( |
|
12 | from mercurial.node import ( | |
13 | bin, |
|
13 | bin, | |
14 | hex, |
|
14 | hex, | |
15 | nullid, |
|
15 | sha1nodeconstants, | |
16 | short, |
|
16 | short, | |
17 | ) |
|
17 | ) | |
18 | from mercurial.i18n import _ |
|
18 | from mercurial.i18n import _ | |
@@ -57,9 +57,9 b' def debugremotefilelog(ui, path, **opts)' | |||||
57 | _(b"%s => %s %s %s %s\n") |
|
57 | _(b"%s => %s %s %s %s\n") | |
58 | % (short(node), short(p1), short(p2), short(linknode), copyfrom) |
|
58 | % (short(node), short(p1), short(p2), short(linknode), copyfrom) | |
59 | ) |
|
59 | ) | |
60 | if p1 != nullid: |
|
60 | if p1 != sha1nodeconstants.nullid: | |
61 | queue.append(p1) |
|
61 | queue.append(p1) | |
62 | if p2 != nullid: |
|
62 | if p2 != sha1nodeconstants.nullid: | |
63 | queue.append(p2) |
|
63 | queue.append(p2) | |
64 |
|
64 | |||
65 |
|
65 | |||
@@ -152,7 +152,7 b' def debugindex(orig, ui, repo, file_=Non' | |||||
152 | try: |
|
152 | try: | |
153 | pp = r.parents(node) |
|
153 | pp = r.parents(node) | |
154 | except Exception: |
|
154 | except Exception: | |
155 | pp = [nullid, nullid] |
|
155 | pp = [repo.nullid, repo.nullid] | |
156 | ui.write( |
|
156 | ui.write( | |
157 | b"% 6d % 9d % 7d % 6d % 7d %s %s %s\n" |
|
157 | b"% 6d % 9d % 7d % 6d % 7d %s %s %s\n" | |
158 | % ( |
|
158 | % ( | |
@@ -197,7 +197,7 b' def debugindexdot(orig, ui, repo, file_)' | |||||
197 | node = r.node(i) |
|
197 | node = r.node(i) | |
198 | pp = r.parents(node) |
|
198 | pp = r.parents(node) | |
199 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) |
|
199 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | |
200 | if pp[1] != nullid: |
|
200 | if pp[1] != repo.nullid: | |
201 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) |
|
201 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | |
202 | ui.write(b"}\n") |
|
202 | ui.write(b"}\n") | |
203 |
|
203 | |||
@@ -212,7 +212,7 b' def verifyremotefilelog(ui, path, **opts' | |||||
212 | filepath = os.path.join(root, file) |
|
212 | filepath = os.path.join(root, file) | |
213 | size, firstnode, mapping = parsefileblob(filepath, decompress) |
|
213 | size, firstnode, mapping = parsefileblob(filepath, decompress) | |
214 | for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping): |
|
214 | for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping): | |
215 | if linknode == nullid: |
|
215 | if linknode == sha1nodeconstants.nullid: | |
216 | actualpath = os.path.relpath(root, path) |
|
216 | actualpath = os.path.relpath(root, path) | |
217 | key = fileserverclient.getcachekey( |
|
217 | key = fileserverclient.getcachekey( | |
218 | b"reponame", actualpath, file |
|
218 | b"reponame", actualpath, file | |
@@ -371,7 +371,7 b' def _sanitycheck(ui, nodes, bases):' | |||||
371 | current = node |
|
371 | current = node | |
372 | deltabase = bases[current] |
|
372 | deltabase = bases[current] | |
373 |
|
373 | |||
374 | while deltabase != nullid: |
|
374 | while deltabase != sha1nodeconstants.nullid: | |
375 | if deltabase not in nodes: |
|
375 | if deltabase not in nodes: | |
376 | ui.warn( |
|
376 | ui.warn( | |
377 | ( |
|
377 | ( | |
@@ -397,7 +397,7 b' def _sanitycheck(ui, nodes, bases):' | |||||
397 | deltabase = bases[current] |
|
397 | deltabase = bases[current] | |
398 | # Since ``node`` begins a valid chain, reset/memoize its base to nullid |
|
398 | # Since ``node`` begins a valid chain, reset/memoize its base to nullid | |
399 | # so we don't traverse it again. |
|
399 | # so we don't traverse it again. | |
400 | bases[node] = nullid |
|
400 | bases[node] = sha1nodeconstants.nullid | |
401 | return failures |
|
401 | return failures | |
402 |
|
402 | |||
403 |
|
403 |
@@ -14,7 +14,7 b' import time' | |||||
14 | import zlib |
|
14 | import zlib | |
15 |
|
15 | |||
16 | from mercurial.i18n import _ |
|
16 | from mercurial.i18n import _ | |
17 |
from mercurial.node import bin, hex |
|
17 | from mercurial.node import bin, hex | |
18 | from mercurial import ( |
|
18 | from mercurial import ( | |
19 | error, |
|
19 | error, | |
20 | pycompat, |
|
20 | pycompat, | |
@@ -599,9 +599,13 b' class fileserverclient(object):' | |||||
599 |
|
599 | |||
600 | # partition missing nodes into nullid and not-nullid so we can |
|
600 | # partition missing nodes into nullid and not-nullid so we can | |
601 | # warn about this filtering potentially shadowing bugs. |
|
601 | # warn about this filtering potentially shadowing bugs. | |
602 | nullids = len([None for unused, id in missingids if id == nullid]) |
|
602 | nullids = len( | |
|
603 | [None for unused, id in missingids if id == self.repo.nullid] | |||
|
604 | ) | |||
603 | if nullids: |
|
605 | if nullids: | |
604 | missingids = [(f, id) for f, id in missingids if id != nullid] |
|
606 | missingids = [ | |
|
607 | (f, id) for f, id in missingids if id != self.repo.nullid | |||
|
608 | ] | |||
605 | repo.ui.develwarn( |
|
609 | repo.ui.develwarn( | |
606 | ( |
|
610 | ( | |
607 | b'remotefilelog not fetching %d null revs' |
|
611 | b'remotefilelog not fetching %d null revs' |
@@ -2,7 +2,10 b' from __future__ import absolute_import' | |||||
2 |
|
2 | |||
3 | import struct |
|
3 | import struct | |
4 |
|
4 | |||
5 |
from mercurial.node import |
|
5 | from mercurial.node import ( | |
|
6 | hex, | |||
|
7 | sha1nodeconstants, | |||
|
8 | ) | |||
6 | from mercurial import ( |
|
9 | from mercurial import ( | |
7 | pycompat, |
|
10 | pycompat, | |
8 | util, |
|
11 | util, | |
@@ -147,9 +150,9 b' class historypack(basepack.basepack):' | |||||
147 | pending.remove(ancnode) |
|
150 | pending.remove(ancnode) | |
148 | p1node = entry[ANC_P1NODE] |
|
151 | p1node = entry[ANC_P1NODE] | |
149 | p2node = entry[ANC_P2NODE] |
|
152 | p2node = entry[ANC_P2NODE] | |
150 | if p1node != nullid and p1node not in known: |
|
153 | if p1node != sha1nodeconstants.nullid and p1node not in known: | |
151 | pending.add(p1node) |
|
154 | pending.add(p1node) | |
152 | if p2node != nullid and p2node not in known: |
|
155 | if p2node != sha1nodeconstants.nullid and p2node not in known: | |
153 | pending.add(p2node) |
|
156 | pending.add(p2node) | |
154 |
|
157 | |||
155 | yield (ancnode, p1node, p2node, entry[ANC_LINKNODE], copyfrom) |
|
158 | yield (ancnode, p1node, p2node, entry[ANC_LINKNODE], copyfrom) | |
@@ -457,9 +460,9 b' class mutablehistorypack(basepack.mutabl' | |||||
457 | def parentfunc(node): |
|
460 | def parentfunc(node): | |
458 | x, p1, p2, x, x, x = entrymap[node] |
|
461 | x, p1, p2, x, x, x = entrymap[node] | |
459 | parents = [] |
|
462 | parents = [] | |
460 | if p1 != nullid: |
|
463 | if p1 != sha1nodeconstants.nullid: | |
461 | parents.append(p1) |
|
464 | parents.append(p1) | |
462 | if p2 != nullid: |
|
465 | if p2 != sha1nodeconstants.nullid: | |
463 | parents.append(p2) |
|
466 | parents.append(p2) | |
464 | return parents |
|
467 | return parents | |
465 |
|
468 |
@@ -1,6 +1,9 b'' | |||||
1 | from __future__ import absolute_import |
|
1 | from __future__ import absolute_import | |
2 |
|
2 | |||
3 |
from mercurial.node import |
|
3 | from mercurial.node import ( | |
|
4 | hex, | |||
|
5 | sha1nodeconstants, | |||
|
6 | ) | |||
4 | from . import ( |
|
7 | from . import ( | |
5 | basestore, |
|
8 | basestore, | |
6 | shallowutil, |
|
9 | shallowutil, | |
@@ -51,9 +54,9 b' class unionmetadatastore(basestore.baseu' | |||||
51 | missing.append((name, node)) |
|
54 | missing.append((name, node)) | |
52 | continue |
|
55 | continue | |
53 | p1, p2, linknode, copyfrom = value |
|
56 | p1, p2, linknode, copyfrom = value | |
54 | if p1 != nullid and p1 not in known: |
|
57 | if p1 != sha1nodeconstants.nullid and p1 not in known: | |
55 | queue.append((copyfrom or curname, p1)) |
|
58 | queue.append((copyfrom or curname, p1)) | |
56 | if p2 != nullid and p2 not in known: |
|
59 | if p2 != sha1nodeconstants.nullid and p2 not in known: | |
57 | queue.append((curname, p2)) |
|
60 | queue.append((curname, p2)) | |
58 | return missing |
|
61 | return missing | |
59 |
|
62 |
@@ -9,7 +9,7 b' from __future__ import absolute_import' | |||||
9 | import collections |
|
9 | import collections | |
10 | import time |
|
10 | import time | |
11 |
|
11 | |||
12 |
from mercurial.node import bin, hex, |
|
12 | from mercurial.node import bin, hex, nullrev | |
13 | from mercurial import ( |
|
13 | from mercurial import ( | |
14 | ancestor, |
|
14 | ancestor, | |
15 | context, |
|
15 | context, | |
@@ -35,7 +35,7 b' class remotefilectx(context.filectx):' | |||||
35 | ancestormap=None, |
|
35 | ancestormap=None, | |
36 | ): |
|
36 | ): | |
37 | if fileid == nullrev: |
|
37 | if fileid == nullrev: | |
38 | fileid = nullid |
|
38 | fileid = repo.nullid | |
39 | if fileid and len(fileid) == 40: |
|
39 | if fileid and len(fileid) == 40: | |
40 | fileid = bin(fileid) |
|
40 | fileid = bin(fileid) | |
41 | super(remotefilectx, self).__init__( |
|
41 | super(remotefilectx, self).__init__( | |
@@ -78,7 +78,7 b' class remotefilectx(context.filectx):' | |||||
78 |
|
78 | |||
79 | @propertycache |
|
79 | @propertycache | |
80 | def _linkrev(self): |
|
80 | def _linkrev(self): | |
81 | if self._filenode == nullid: |
|
81 | if self._filenode == self._repo.nullid: | |
82 | return nullrev |
|
82 | return nullrev | |
83 |
|
83 | |||
84 | ancestormap = self.ancestormap() |
|
84 | ancestormap = self.ancestormap() | |
@@ -174,7 +174,7 b' class remotefilectx(context.filectx):' | |||||
174 |
|
174 | |||
175 | p1, p2, linknode, copyfrom = ancestormap[self._filenode] |
|
175 | p1, p2, linknode, copyfrom = ancestormap[self._filenode] | |
176 | results = [] |
|
176 | results = [] | |
177 | if p1 != nullid: |
|
177 | if p1 != repo.nullid: | |
178 | path = copyfrom or self._path |
|
178 | path = copyfrom or self._path | |
179 | flog = repo.file(path) |
|
179 | flog = repo.file(path) | |
180 | p1ctx = remotefilectx( |
|
180 | p1ctx = remotefilectx( | |
@@ -183,7 +183,7 b' class remotefilectx(context.filectx):' | |||||
183 | p1ctx._descendantrev = self.rev() |
|
183 | p1ctx._descendantrev = self.rev() | |
184 | results.append(p1ctx) |
|
184 | results.append(p1ctx) | |
185 |
|
185 | |||
186 | if p2 != nullid: |
|
186 | if p2 != repo.nullid: | |
187 | path = self._path |
|
187 | path = self._path | |
188 | flog = repo.file(path) |
|
188 | flog = repo.file(path) | |
189 | p2ctx = remotefilectx( |
|
189 | p2ctx = remotefilectx( | |
@@ -504,25 +504,25 b' class remoteworkingfilectx(context.worki' | |||||
504 | if renamed: |
|
504 | if renamed: | |
505 | p1 = renamed |
|
505 | p1 = renamed | |
506 | else: |
|
506 | else: | |
507 | p1 = (path, pcl[0]._manifest.get(path, nullid)) |
|
507 | p1 = (path, pcl[0]._manifest.get(path, self._repo.nullid)) | |
508 |
|
508 | |||
509 | p2 = (path, nullid) |
|
509 | p2 = (path, self._repo.nullid) | |
510 | if len(pcl) > 1: |
|
510 | if len(pcl) > 1: | |
511 | p2 = (path, pcl[1]._manifest.get(path, nullid)) |
|
511 | p2 = (path, pcl[1]._manifest.get(path, self._repo.nullid)) | |
512 |
|
512 | |||
513 | m = {} |
|
513 | m = {} | |
514 | if p1[1] != nullid: |
|
514 | if p1[1] != self._repo.nullid: | |
515 | p1ctx = self._repo.filectx(p1[0], fileid=p1[1]) |
|
515 | p1ctx = self._repo.filectx(p1[0], fileid=p1[1]) | |
516 | m.update(p1ctx.filelog().ancestormap(p1[1])) |
|
516 | m.update(p1ctx.filelog().ancestormap(p1[1])) | |
517 |
|
517 | |||
518 | if p2[1] != nullid: |
|
518 | if p2[1] != self._repo.nullid: | |
519 | p2ctx = self._repo.filectx(p2[0], fileid=p2[1]) |
|
519 | p2ctx = self._repo.filectx(p2[0], fileid=p2[1]) | |
520 | m.update(p2ctx.filelog().ancestormap(p2[1])) |
|
520 | m.update(p2ctx.filelog().ancestormap(p2[1])) | |
521 |
|
521 | |||
522 | copyfrom = b'' |
|
522 | copyfrom = b'' | |
523 | if renamed: |
|
523 | if renamed: | |
524 | copyfrom = renamed[0] |
|
524 | copyfrom = renamed[0] | |
525 | m[None] = (p1[1], p2[1], nullid, copyfrom) |
|
525 | m[None] = (p1[1], p2[1], self._repo.nullid, copyfrom) | |
526 | self._ancestormap = m |
|
526 | self._ancestormap = m | |
527 |
|
527 | |||
528 | return self._ancestormap |
|
528 | return self._ancestormap |
@@ -10,12 +10,7 b' from __future__ import absolute_import' | |||||
10 | import collections |
|
10 | import collections | |
11 | import os |
|
11 | import os | |
12 |
|
12 | |||
13 |
from mercurial.node import |
|
13 | from mercurial.node import bin | |
14 | bin, |
|
|||
15 | nullid, |
|
|||
16 | wdirfilenodeids, |
|
|||
17 | wdirid, |
|
|||
18 | ) |
|
|||
19 | from mercurial.i18n import _ |
|
14 | from mercurial.i18n import _ | |
20 | from mercurial import ( |
|
15 | from mercurial import ( | |
21 | ancestor, |
|
16 | ancestor, | |
@@ -100,7 +95,7 b' class remotefilelog(object):' | |||||
100 |
|
95 | |||
101 | pancestors = {} |
|
96 | pancestors = {} | |
102 | queue = [] |
|
97 | queue = [] | |
103 | if realp1 != nullid: |
|
98 | if realp1 != self.repo.nullid: | |
104 | p1flog = self |
|
99 | p1flog = self | |
105 | if copyfrom: |
|
100 | if copyfrom: | |
106 | p1flog = remotefilelog(self.opener, copyfrom, self.repo) |
|
101 | p1flog = remotefilelog(self.opener, copyfrom, self.repo) | |
@@ -108,7 +103,7 b' class remotefilelog(object):' | |||||
108 | pancestors.update(p1flog.ancestormap(realp1)) |
|
103 | pancestors.update(p1flog.ancestormap(realp1)) | |
109 | queue.append(realp1) |
|
104 | queue.append(realp1) | |
110 | visited.add(realp1) |
|
105 | visited.add(realp1) | |
111 | if p2 != nullid: |
|
106 | if p2 != self.repo.nullid: | |
112 | pancestors.update(self.ancestormap(p2)) |
|
107 | pancestors.update(self.ancestormap(p2)) | |
113 | queue.append(p2) |
|
108 | queue.append(p2) | |
114 | visited.add(p2) |
|
109 | visited.add(p2) | |
@@ -129,10 +124,10 b' class remotefilelog(object):' | |||||
129 | pacopyfrom, |
|
124 | pacopyfrom, | |
130 | ) |
|
125 | ) | |
131 |
|
126 | |||
132 | if pa1 != nullid and pa1 not in visited: |
|
127 | if pa1 != self.repo.nullid and pa1 not in visited: | |
133 | queue.append(pa1) |
|
128 | queue.append(pa1) | |
134 | visited.add(pa1) |
|
129 | visited.add(pa1) | |
135 | if pa2 != nullid and pa2 not in visited: |
|
130 | if pa2 != self.repo.nullid and pa2 not in visited: | |
136 | queue.append(pa2) |
|
131 | queue.append(pa2) | |
137 | visited.add(pa2) |
|
132 | visited.add(pa2) | |
138 |
|
133 | |||
@@ -238,7 +233,7 b' class remotefilelog(object):' | |||||
238 | returns True if text is different than what is stored. |
|
233 | returns True if text is different than what is stored. | |
239 | """ |
|
234 | """ | |
240 |
|
235 | |||
241 | if node == nullid: |
|
236 | if node == self.repo.nullid: | |
242 | return True |
|
237 | return True | |
243 |
|
238 | |||
244 | nodetext = self.read(node) |
|
239 | nodetext = self.read(node) | |
@@ -275,13 +270,13 b' class remotefilelog(object):' | |||||
275 | return store.getmeta(self.filename, node).get(constants.METAKEYFLAG, 0) |
|
270 | return store.getmeta(self.filename, node).get(constants.METAKEYFLAG, 0) | |
276 |
|
271 | |||
277 | def parents(self, node): |
|
272 | def parents(self, node): | |
278 | if node == nullid: |
|
273 | if node == self.repo.nullid: | |
279 | return nullid, nullid |
|
274 | return self.repo.nullid, self.repo.nullid | |
280 |
|
275 | |||
281 | ancestormap = self.repo.metadatastore.getancestors(self.filename, node) |
|
276 | ancestormap = self.repo.metadatastore.getancestors(self.filename, node) | |
282 | p1, p2, linknode, copyfrom = ancestormap[node] |
|
277 | p1, p2, linknode, copyfrom = ancestormap[node] | |
283 | if copyfrom: |
|
278 | if copyfrom: | |
284 | p1 = nullid |
|
279 | p1 = self.repo.nullid | |
285 |
|
280 | |||
286 | return p1, p2 |
|
281 | return p1, p2 | |
287 |
|
282 | |||
@@ -317,8 +312,8 b' class remotefilelog(object):' | |||||
317 | if prevnode is None: |
|
312 | if prevnode is None: | |
318 | basenode = prevnode = p1 |
|
313 | basenode = prevnode = p1 | |
319 | if basenode == node: |
|
314 | if basenode == node: | |
320 | basenode = nullid |
|
315 | basenode = self.repo.nullid | |
321 | if basenode != nullid: |
|
316 | if basenode != self.repo.nullid: | |
322 | revision = None |
|
317 | revision = None | |
323 | delta = self.revdiff(basenode, node) |
|
318 | delta = self.revdiff(basenode, node) | |
324 | else: |
|
319 | else: | |
@@ -380,13 +375,16 b' class remotefilelog(object):' | |||||
380 | this is generally only used for bundling and communicating with vanilla |
|
375 | this is generally only used for bundling and communicating with vanilla | |
381 | hg clients. |
|
376 | hg clients. | |
382 | """ |
|
377 | """ | |
383 | if node == nullid: |
|
378 | if node == self.repo.nullid: | |
384 | return b"" |
|
379 | return b"" | |
385 | if len(node) != 20: |
|
380 | if len(node) != 20: | |
386 | raise error.LookupError( |
|
381 | raise error.LookupError( | |
387 | node, self.filename, _(b'invalid revision input') |
|
382 | node, self.filename, _(b'invalid revision input') | |
388 | ) |
|
383 | ) | |
389 | if node == wdirid or node in wdirfilenodeids: |
|
384 | if ( | |
|
385 | node == self.repo.nodeconstants.wdirid | |||
|
386 | or node in self.repo.nodeconstants.wdirfilenodeids | |||
|
387 | ): | |||
390 | raise error.WdirUnsupported |
|
388 | raise error.WdirUnsupported | |
391 |
|
389 | |||
392 | store = self.repo.contentstore |
|
390 | store = self.repo.contentstore | |
@@ -432,8 +430,8 b' class remotefilelog(object):' | |||||
432 | return self.repo.metadatastore.getancestors(self.filename, node) |
|
430 | return self.repo.metadatastore.getancestors(self.filename, node) | |
433 |
|
431 | |||
434 | def ancestor(self, a, b): |
|
432 | def ancestor(self, a, b): | |
435 | if a == nullid or b == nullid: |
|
433 | if a == self.repo.nullid or b == self.repo.nullid: | |
436 | return nullid |
|
434 | return self.repo.nullid | |
437 |
|
435 | |||
438 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
436 | revmap, parentfunc = self._buildrevgraph(a, b) | |
439 | nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} |
|
437 | nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} | |
@@ -442,13 +440,13 b' class remotefilelog(object):' | |||||
442 | if ancs: |
|
440 | if ancs: | |
443 | # choose a consistent winner when there's a tie |
|
441 | # choose a consistent winner when there's a tie | |
444 | return min(map(nodemap.__getitem__, ancs)) |
|
442 | return min(map(nodemap.__getitem__, ancs)) | |
445 | return nullid |
|
443 | return self.repo.nullid | |
446 |
|
444 | |||
447 | def commonancestorsheads(self, a, b): |
|
445 | def commonancestorsheads(self, a, b): | |
448 | """calculate all the heads of the common ancestors of nodes a and b""" |
|
446 | """calculate all the heads of the common ancestors of nodes a and b""" | |
449 |
|
447 | |||
450 | if a == nullid or b == nullid: |
|
448 | if a == self.repo.nullid or b == self.repo.nullid: | |
451 | return nullid |
|
449 | return self.repo.nullid | |
452 |
|
450 | |||
453 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
451 | revmap, parentfunc = self._buildrevgraph(a, b) | |
454 | nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} |
|
452 | nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} | |
@@ -472,10 +470,10 b' class remotefilelog(object):' | |||||
472 | p1, p2, linknode, copyfrom = pdata |
|
470 | p1, p2, linknode, copyfrom = pdata | |
473 | # Don't follow renames (copyfrom). |
|
471 | # Don't follow renames (copyfrom). | |
474 | # remotefilectx.ancestor does that. |
|
472 | # remotefilectx.ancestor does that. | |
475 | if p1 != nullid and not copyfrom: |
|
473 | if p1 != self.repo.nullid and not copyfrom: | |
476 | parents.append(p1) |
|
474 | parents.append(p1) | |
477 | allparents.add(p1) |
|
475 | allparents.add(p1) | |
478 | if p2 != nullid: |
|
476 | if p2 != self.repo.nullid: | |
479 | parents.append(p2) |
|
477 | parents.append(p2) | |
480 | allparents.add(p2) |
|
478 | allparents.add(p2) | |
481 |
|
479 |
@@ -13,7 +13,7 b' import time' | |||||
13 | import zlib |
|
13 | import zlib | |
14 |
|
14 | |||
15 | from mercurial.i18n import _ |
|
15 | from mercurial.i18n import _ | |
16 |
from mercurial.node import bin, hex |
|
16 | from mercurial.node import bin, hex | |
17 | from mercurial.pycompat import open |
|
17 | from mercurial.pycompat import open | |
18 | from mercurial import ( |
|
18 | from mercurial import ( | |
19 | changegroup, |
|
19 | changegroup, | |
@@ -242,7 +242,7 b' def _loadfileblob(repo, cachepath, path,' | |||||
242 | filecachepath = os.path.join(cachepath, path, hex(node)) |
|
242 | filecachepath = os.path.join(cachepath, path, hex(node)) | |
243 | if not os.path.exists(filecachepath) or os.path.getsize(filecachepath) == 0: |
|
243 | if not os.path.exists(filecachepath) or os.path.getsize(filecachepath) == 0: | |
244 | filectx = repo.filectx(path, fileid=node) |
|
244 | filectx = repo.filectx(path, fileid=node) | |
245 | if filectx.node() == nullid: |
|
245 | if filectx.node() == repo.nullid: | |
246 | repo.changelog = changelog.changelog(repo.svfs) |
|
246 | repo.changelog = changelog.changelog(repo.svfs) | |
247 | filectx = repo.filectx(path, fileid=node) |
|
247 | filectx = repo.filectx(path, fileid=node) | |
248 |
|
248 | |||
@@ -284,7 +284,7 b' def getflogheads(repo, proto, path):' | |||||
284 | """A server api for requesting a filelog's heads""" |
|
284 | """A server api for requesting a filelog's heads""" | |
285 | flog = repo.file(path) |
|
285 | flog = repo.file(path) | |
286 | heads = flog.heads() |
|
286 | heads = flog.heads() | |
287 | return b'\n'.join((hex(head) for head in heads if head != nullid)) |
|
287 | return b'\n'.join((hex(head) for head in heads if head != repo.nullid)) | |
288 |
|
288 | |||
289 |
|
289 | |||
290 | def getfile(repo, proto, file, node): |
|
290 | def getfile(repo, proto, file, node): | |
@@ -302,7 +302,7 b' def getfile(repo, proto, file, node):' | |||||
302 | if not cachepath: |
|
302 | if not cachepath: | |
303 | cachepath = os.path.join(repo.path, b"remotefilelogcache") |
|
303 | cachepath = os.path.join(repo.path, b"remotefilelogcache") | |
304 | node = bin(node.strip()) |
|
304 | node = bin(node.strip()) | |
305 | if node == nullid: |
|
305 | if node == repo.nullid: | |
306 | return b'0\0' |
|
306 | return b'0\0' | |
307 | return b'0\0' + _loadfileblob(repo, cachepath, file, node) |
|
307 | return b'0\0' + _loadfileblob(repo, cachepath, file, node) | |
308 |
|
308 | |||
@@ -327,7 +327,7 b' def getfiles(repo, proto):' | |||||
327 | break |
|
327 | break | |
328 |
|
328 | |||
329 | node = bin(request[:40]) |
|
329 | node = bin(request[:40]) | |
330 | if node == nullid: |
|
330 | if node == repo.nullid: | |
331 | yield b'0\n' |
|
331 | yield b'0\n' | |
332 | continue |
|
332 | continue | |
333 |
|
333 | |||
@@ -380,8 +380,8 b' def createfileblob(filectx):' | |||||
380 | ancestortext = b"" |
|
380 | ancestortext = b"" | |
381 | for ancestorctx in ancestors: |
|
381 | for ancestorctx in ancestors: | |
382 | parents = ancestorctx.parents() |
|
382 | parents = ancestorctx.parents() | |
383 | p1 = nullid |
|
383 | p1 = repo.nullid | |
384 | p2 = nullid |
|
384 | p2 = repo.nullid | |
385 | if len(parents) > 0: |
|
385 | if len(parents) > 0: | |
386 | p1 = parents[0].filenode() |
|
386 | p1 = parents[0].filenode() | |
387 | if len(parents) > 1: |
|
387 | if len(parents) > 1: |
@@ -4,10 +4,7 b' import os' | |||||
4 | import time |
|
4 | import time | |
5 |
|
5 | |||
6 | from mercurial.i18n import _ |
|
6 | from mercurial.i18n import _ | |
7 |
from mercurial.node import |
|
7 | from mercurial.node import short | |
8 | nullid, |
|
|||
9 | short, |
|
|||
10 | ) |
|
|||
11 | from mercurial import ( |
|
8 | from mercurial import ( | |
12 | encoding, |
|
9 | encoding, | |
13 | error, |
|
10 | error, | |
@@ -586,7 +583,7 b' class repacker(object):' | |||||
586 | # Create one contiguous chain and reassign deltabases. |
|
583 | # Create one contiguous chain and reassign deltabases. | |
587 | for i, node in enumerate(orphans): |
|
584 | for i, node in enumerate(orphans): | |
588 | if i == 0: |
|
585 | if i == 0: | |
589 | deltabases[node] = (nullid, 0) |
|
586 | deltabases[node] = (self.repo.nullid, 0) | |
590 | else: |
|
587 | else: | |
591 | parent = orphans[i - 1] |
|
588 | parent = orphans[i - 1] | |
592 | deltabases[node] = (parent, deltabases[parent][1] + 1) |
|
589 | deltabases[node] = (parent, deltabases[parent][1] + 1) | |
@@ -676,8 +673,8 b' class repacker(object):' | |||||
676 | # of immediate child |
|
673 | # of immediate child | |
677 | deltatuple = deltabases.get(node, None) |
|
674 | deltatuple = deltabases.get(node, None) | |
678 | if deltatuple is None: |
|
675 | if deltatuple is None: | |
679 | deltabase, chainlen = nullid, 0 |
|
676 | deltabase, chainlen = self.repo.nullid, 0 | |
680 | deltabases[node] = (nullid, 0) |
|
677 | deltabases[node] = (self.repo.nullid, 0) | |
681 | nobase.add(node) |
|
678 | nobase.add(node) | |
682 | else: |
|
679 | else: | |
683 | deltabase, chainlen = deltatuple |
|
680 | deltabase, chainlen = deltatuple | |
@@ -692,7 +689,7 b' class repacker(object):' | |||||
692 | # file was copied from elsewhere. So don't attempt to do any |
|
689 | # file was copied from elsewhere. So don't attempt to do any | |
693 | # deltas with the other file. |
|
690 | # deltas with the other file. | |
694 | if copyfrom: |
|
691 | if copyfrom: | |
695 | p1 = nullid |
|
692 | p1 = self.repo.nullid | |
696 |
|
693 | |||
697 | if chainlen < maxchainlen: |
|
694 | if chainlen < maxchainlen: | |
698 | # Record this child as the delta base for its parents. |
|
695 | # Record this child as the delta base for its parents. | |
@@ -700,9 +697,9 b' class repacker(object):' | |||||
700 | # many children, and this will only choose the last one. |
|
697 | # many children, and this will only choose the last one. | |
701 | # TODO: record all children and try all deltas to find |
|
698 | # TODO: record all children and try all deltas to find | |
702 | # best |
|
699 | # best | |
703 | if p1 != nullid: |
|
700 | if p1 != self.repo.nullid: | |
704 | deltabases[p1] = (node, chainlen + 1) |
|
701 | deltabases[p1] = (node, chainlen + 1) | |
705 | if p2 != nullid: |
|
702 | if p2 != self.repo.nullid: | |
706 | deltabases[p2] = (node, chainlen + 1) |
|
703 | deltabases[p2] = (node, chainlen + 1) | |
707 |
|
704 | |||
708 | # experimental config: repack.chainorphansbysize |
|
705 | # experimental config: repack.chainorphansbysize | |
@@ -719,7 +716,7 b' class repacker(object):' | |||||
719 | # TODO: Optimize the deltachain fetching. Since we're |
|
716 | # TODO: Optimize the deltachain fetching. Since we're | |
720 | # iterating over the different version of the file, we may |
|
717 | # iterating over the different version of the file, we may | |
721 | # be fetching the same deltachain over and over again. |
|
718 | # be fetching the same deltachain over and over again. | |
722 | if deltabase != nullid: |
|
719 | if deltabase != self.repo.nullid: | |
723 | deltaentry = self.data.getdelta(filename, node) |
|
720 | deltaentry = self.data.getdelta(filename, node) | |
724 | delta, deltabasename, origdeltabase, meta = deltaentry |
|
721 | delta, deltabasename, origdeltabase, meta = deltaentry | |
725 | size = meta.get(constants.METAKEYSIZE) |
|
722 | size = meta.get(constants.METAKEYSIZE) | |
@@ -791,9 +788,9 b' class repacker(object):' | |||||
791 | # If copyfrom == filename, it means the copy history |
|
788 | # If copyfrom == filename, it means the copy history | |
792 | # went to come other file, then came back to this one, so we |
|
789 | # went to come other file, then came back to this one, so we | |
793 | # should continue processing it. |
|
790 | # should continue processing it. | |
794 | if p1 != nullid and copyfrom != filename: |
|
791 | if p1 != self.repo.nullid and copyfrom != filename: | |
795 | dontprocess.add(p1) |
|
792 | dontprocess.add(p1) | |
796 | if p2 != nullid: |
|
793 | if p2 != self.repo.nullid: | |
797 | dontprocess.add(p2) |
|
794 | dontprocess.add(p2) | |
798 | continue |
|
795 | continue | |
799 |
|
796 | |||
@@ -814,9 +811,9 b' class repacker(object):' | |||||
814 | def parentfunc(node): |
|
811 | def parentfunc(node): | |
815 | p1, p2, linknode, copyfrom = ancestors[node] |
|
812 | p1, p2, linknode, copyfrom = ancestors[node] | |
816 | parents = [] |
|
813 | parents = [] | |
817 | if p1 != nullid: |
|
814 | if p1 != self.repo.nullid: | |
818 | parents.append(p1) |
|
815 | parents.append(p1) | |
819 | if p2 != nullid: |
|
816 | if p2 != self.repo.nullid: | |
820 | parents.append(p2) |
|
817 | parents.append(p2) | |
821 | return parents |
|
818 | return parents | |
822 |
|
819 |
@@ -7,7 +7,7 b'' | |||||
7 | from __future__ import absolute_import |
|
7 | from __future__ import absolute_import | |
8 |
|
8 | |||
9 | from mercurial.i18n import _ |
|
9 | from mercurial.i18n import _ | |
10 |
from mercurial.node import bin, hex |
|
10 | from mercurial.node import bin, hex | |
11 | from mercurial import ( |
|
11 | from mercurial import ( | |
12 | bundlerepo, |
|
12 | bundlerepo, | |
13 | changegroup, |
|
13 | changegroup, | |
@@ -143,7 +143,7 b' class shallowcg1packer(changegroup.cgpac' | |||||
143 |
|
143 | |||
144 | def nodechunk(self, revlog, node, prevnode, linknode): |
|
144 | def nodechunk(self, revlog, node, prevnode, linknode): | |
145 | prefix = b'' |
|
145 | prefix = b'' | |
146 | if prevnode == nullid: |
|
146 | if prevnode == revlog.nullid: | |
147 | delta = revlog.rawdata(node) |
|
147 | delta = revlog.rawdata(node) | |
148 | prefix = mdiff.trivialdiffheader(len(delta)) |
|
148 | prefix = mdiff.trivialdiffheader(len(delta)) | |
149 | else: |
|
149 | else: | |
@@ -245,7 +245,7 b' def addchangegroupfiles(' | |||||
245 | processed = set() |
|
245 | processed = set() | |
246 |
|
246 | |||
247 | def available(f, node, depf, depnode): |
|
247 | def available(f, node, depf, depnode): | |
248 | if depnode != nullid and (depf, depnode) not in processed: |
|
248 | if depnode != repo.nullid and (depf, depnode) not in processed: | |
249 | if not (depf, depnode) in revisiondatas: |
|
249 | if not (depf, depnode) in revisiondatas: | |
250 | # It's not in the changegroup, assume it's already |
|
250 | # It's not in the changegroup, assume it's already | |
251 | # in the repo |
|
251 | # in the repo | |
@@ -267,7 +267,7 b' def addchangegroupfiles(' | |||||
267 | dependents = [revisiondata[1], revisiondata[2], revisiondata[4]] |
|
267 | dependents = [revisiondata[1], revisiondata[2], revisiondata[4]] | |
268 |
|
268 | |||
269 | for dependent in dependents: |
|
269 | for dependent in dependents: | |
270 | if dependent == nullid or (f, dependent) in revisiondatas: |
|
270 | if dependent == repo.nullid or (f, dependent) in revisiondatas: | |
271 | continue |
|
271 | continue | |
272 | prefetchfiles.append((f, hex(dependent))) |
|
272 | prefetchfiles.append((f, hex(dependent))) | |
273 |
|
273 | |||
@@ -306,7 +306,7 b' def addchangegroupfiles(' | |||||
306 | continue |
|
306 | continue | |
307 |
|
307 | |||
308 | for p in [p1, p2]: |
|
308 | for p in [p1, p2]: | |
309 | if p != nullid: |
|
309 | if p != repo.nullid: | |
310 | if not available(f, node, f, p): |
|
310 | if not available(f, node, f, p): | |
311 | continue |
|
311 | continue | |
312 |
|
312 |
@@ -9,7 +9,7 b' from __future__ import absolute_import' | |||||
9 | import os |
|
9 | import os | |
10 |
|
10 | |||
11 | from mercurial.i18n import _ |
|
11 | from mercurial.i18n import _ | |
12 |
from mercurial.node import hex, |
|
12 | from mercurial.node import hex, nullrev | |
13 | from mercurial import ( |
|
13 | from mercurial import ( | |
14 | encoding, |
|
14 | encoding, | |
15 | error, |
|
15 | error, | |
@@ -206,8 +206,8 b' def wraprepo(repo):' | |||||
206 | m1 = ctx.p1().manifest() |
|
206 | m1 = ctx.p1().manifest() | |
207 | files = [] |
|
207 | files = [] | |
208 | for f in ctx.modified() + ctx.added(): |
|
208 | for f in ctx.modified() + ctx.added(): | |
209 | fparent1 = m1.get(f, nullid) |
|
209 | fparent1 = m1.get(f, self.nullid) | |
210 | if fparent1 != nullid: |
|
210 | if fparent1 != self.nullid: | |
211 | files.append((f, hex(fparent1))) |
|
211 | files.append((f, hex(fparent1))) | |
212 | self.fileservice.prefetch(files) |
|
212 | self.fileservice.prefetch(files) | |
213 | return super(shallowrepository, self).commitctx( |
|
213 | return super(shallowrepository, self).commitctx( |
@@ -52,7 +52,6 b' import zlib' | |||||
52 |
|
52 | |||
53 | from mercurial.i18n import _ |
|
53 | from mercurial.i18n import _ | |
54 | from mercurial.node import ( |
|
54 | from mercurial.node import ( | |
55 | nullid, |
|
|||
56 | nullrev, |
|
55 | nullrev, | |
57 | sha1nodeconstants, |
|
56 | sha1nodeconstants, | |
58 | short, |
|
57 | short, | |
@@ -366,12 +365,12 b' class sqlitefilestore(object):' | |||||
366 | ) |
|
365 | ) | |
367 |
|
366 | |||
368 | if p1rev == nullrev: |
|
367 | if p1rev == nullrev: | |
369 | p1node = nullid |
|
368 | p1node = sha1nodeconstants.nullid | |
370 | else: |
|
369 | else: | |
371 | p1node = self._revtonode[p1rev] |
|
370 | p1node = self._revtonode[p1rev] | |
372 |
|
371 | |||
373 | if p2rev == nullrev: |
|
372 | if p2rev == nullrev: | |
374 | p2node = nullid |
|
373 | p2node = sha1nodeconstants.nullid | |
375 | else: |
|
374 | else: | |
376 | p2node = self._revtonode[p2rev] |
|
375 | p2node = self._revtonode[p2rev] | |
377 |
|
376 | |||
@@ -400,7 +399,7 b' class sqlitefilestore(object):' | |||||
400 | return iter(pycompat.xrange(len(self._revisions))) |
|
399 | return iter(pycompat.xrange(len(self._revisions))) | |
401 |
|
400 | |||
402 | def hasnode(self, node): |
|
401 | def hasnode(self, node): | |
403 | if node == nullid: |
|
402 | if node == sha1nodeconstants.nullid: | |
404 | return False |
|
403 | return False | |
405 |
|
404 | |||
406 | return node in self._nodetorev |
|
405 | return node in self._nodetorev | |
@@ -411,8 +410,8 b' class sqlitefilestore(object):' | |||||
411 | ) |
|
410 | ) | |
412 |
|
411 | |||
413 | def parents(self, node): |
|
412 | def parents(self, node): | |
414 | if node == nullid: |
|
413 | if node == sha1nodeconstants.nullid: | |
415 | return nullid, nullid |
|
414 | return sha1nodeconstants.nullid, sha1nodeconstants.nullid | |
416 |
|
415 | |||
417 | if node not in self._revisions: |
|
416 | if node not in self._revisions: | |
418 | raise error.LookupError(node, self._path, _(b'no node')) |
|
417 | raise error.LookupError(node, self._path, _(b'no node')) | |
@@ -431,7 +430,7 b' class sqlitefilestore(object):' | |||||
431 | return entry.p1rev, entry.p2rev |
|
430 | return entry.p1rev, entry.p2rev | |
432 |
|
431 | |||
433 | def rev(self, node): |
|
432 | def rev(self, node): | |
434 | if node == nullid: |
|
433 | if node == sha1nodeconstants.nullid: | |
435 | return nullrev |
|
434 | return nullrev | |
436 |
|
435 | |||
437 | if node not in self._nodetorev: |
|
436 | if node not in self._nodetorev: | |
@@ -441,7 +440,7 b' class sqlitefilestore(object):' | |||||
441 |
|
440 | |||
442 | def node(self, rev): |
|
441 | def node(self, rev): | |
443 | if rev == nullrev: |
|
442 | if rev == nullrev: | |
444 | return nullid |
|
443 | return sha1nodeconstants.nullid | |
445 |
|
444 | |||
446 | if rev not in self._revtonode: |
|
445 | if rev not in self._revtonode: | |
447 | raise IndexError(rev) |
|
446 | raise IndexError(rev) | |
@@ -485,7 +484,7 b' class sqlitefilestore(object):' | |||||
485 | def heads(self, start=None, stop=None): |
|
484 | def heads(self, start=None, stop=None): | |
486 | if start is None and stop is None: |
|
485 | if start is None and stop is None: | |
487 | if not len(self): |
|
486 | if not len(self): | |
488 | return [nullid] |
|
487 | return [sha1nodeconstants.nullid] | |
489 |
|
488 | |||
490 | startrev = self.rev(start) if start is not None else nullrev |
|
489 | startrev = self.rev(start) if start is not None else nullrev | |
491 | stoprevs = {self.rev(n) for n in stop or []} |
|
490 | stoprevs = {self.rev(n) for n in stop or []} | |
@@ -529,7 +528,7 b' class sqlitefilestore(object):' | |||||
529 | return len(self.revision(node)) |
|
528 | return len(self.revision(node)) | |
530 |
|
529 | |||
531 | def revision(self, node, raw=False, _verifyhash=True): |
|
530 | def revision(self, node, raw=False, _verifyhash=True): | |
532 | if node in (nullid, nullrev): |
|
531 | if node in (sha1nodeconstants.nullid, nullrev): | |
533 | return b'' |
|
532 | return b'' | |
534 |
|
533 | |||
535 | if isinstance(node, int): |
|
534 | if isinstance(node, int): | |
@@ -596,7 +595,7 b' class sqlitefilestore(object):' | |||||
596 | b'unhandled value for nodesorder: %s' % nodesorder |
|
595 | b'unhandled value for nodesorder: %s' % nodesorder | |
597 | ) |
|
596 | ) | |
598 |
|
597 | |||
599 | nodes = [n for n in nodes if n != nullid] |
|
598 | nodes = [n for n in nodes if n != sha1nodeconstants.nullid] | |
600 |
|
599 | |||
601 | if not nodes: |
|
600 | if not nodes: | |
602 | return |
|
601 | return | |
@@ -705,12 +704,12 b' class sqlitefilestore(object):' | |||||
705 | raise SQLiteStoreError(b'unhandled revision flag') |
|
704 | raise SQLiteStoreError(b'unhandled revision flag') | |
706 |
|
705 | |||
707 | if maybemissingparents: |
|
706 | if maybemissingparents: | |
708 | if p1 != nullid and not self.hasnode(p1): |
|
707 | if p1 != sha1nodeconstants.nullid and not self.hasnode(p1): | |
709 | p1 = nullid |
|
708 | p1 = sha1nodeconstants.nullid | |
710 | storeflags |= FLAG_MISSING_P1 |
|
709 | storeflags |= FLAG_MISSING_P1 | |
711 |
|
710 | |||
712 | if p2 != nullid and not self.hasnode(p2): |
|
711 | if p2 != sha1nodeconstants.nullid and not self.hasnode(p2): | |
713 | p2 = nullid |
|
712 | p2 = sha1nodeconstants.nullid | |
714 | storeflags |= FLAG_MISSING_P2 |
|
713 | storeflags |= FLAG_MISSING_P2 | |
715 |
|
714 | |||
716 | baserev = self.rev(deltabase) |
|
715 | baserev = self.rev(deltabase) | |
@@ -736,7 +735,10 b' class sqlitefilestore(object):' | |||||
736 | # Possibly reset parents to make them proper. |
|
735 | # Possibly reset parents to make them proper. | |
737 | entry = self._revisions[node] |
|
736 | entry = self._revisions[node] | |
738 |
|
737 | |||
739 | if entry.flags & FLAG_MISSING_P1 and p1 != nullid: |
|
738 | if ( | |
|
739 | entry.flags & FLAG_MISSING_P1 | |||
|
740 | and p1 != sha1nodeconstants.nullid | |||
|
741 | ): | |||
740 | entry.p1node = p1 |
|
742 | entry.p1node = p1 | |
741 | entry.p1rev = self._nodetorev[p1] |
|
743 | entry.p1rev = self._nodetorev[p1] | |
742 | entry.flags &= ~FLAG_MISSING_P1 |
|
744 | entry.flags &= ~FLAG_MISSING_P1 | |
@@ -746,7 +748,10 b' class sqlitefilestore(object):' | |||||
746 | (self._nodetorev[p1], entry.flags, entry.rid), |
|
748 | (self._nodetorev[p1], entry.flags, entry.rid), | |
747 | ) |
|
749 | ) | |
748 |
|
750 | |||
749 | if entry.flags & FLAG_MISSING_P2 and p2 != nullid: |
|
751 | if ( | |
|
752 | entry.flags & FLAG_MISSING_P2 | |||
|
753 | and p2 != sha1nodeconstants.nullid | |||
|
754 | ): | |||
750 | entry.p2node = p2 |
|
755 | entry.p2node = p2 | |
751 | entry.p2rev = self._nodetorev[p2] |
|
756 | entry.p2rev = self._nodetorev[p2] | |
752 | entry.flags &= ~FLAG_MISSING_P2 |
|
757 | entry.flags &= ~FLAG_MISSING_P2 | |
@@ -761,7 +766,7 b' class sqlitefilestore(object):' | |||||
761 | empty = False |
|
766 | empty = False | |
762 | continue |
|
767 | continue | |
763 |
|
768 | |||
764 | if deltabase == nullid: |
|
769 | if deltabase == sha1nodeconstants.nullid: | |
765 | text = mdiff.patch(b'', delta) |
|
770 | text = mdiff.patch(b'', delta) | |
766 | storedelta = None |
|
771 | storedelta = None | |
767 | else: |
|
772 | else: | |
@@ -1012,7 +1017,7 b' class sqlitefilestore(object):' | |||||
1012 | assert revisiondata is not None |
|
1017 | assert revisiondata is not None | |
1013 | deltabase = p1 |
|
1018 | deltabase = p1 | |
1014 |
|
1019 | |||
1015 | if deltabase == nullid: |
|
1020 | if deltabase == sha1nodeconstants.nullid: | |
1016 | delta = revisiondata |
|
1021 | delta = revisiondata | |
1017 | else: |
|
1022 | else: | |
1018 | delta = mdiff.textdiff( |
|
1023 | delta = mdiff.textdiff( | |
@@ -1021,7 +1026,7 b' class sqlitefilestore(object):' | |||||
1021 |
|
1026 | |||
1022 | # File index stores a pointer to its delta and the parent delta. |
|
1027 | # File index stores a pointer to its delta and the parent delta. | |
1023 | # The parent delta is stored via a pointer to the fileindex PK. |
|
1028 | # The parent delta is stored via a pointer to the fileindex PK. | |
1024 | if deltabase == nullid: |
|
1029 | if deltabase == sha1nodeconstants.nullid: | |
1025 | baseid = None |
|
1030 | baseid = None | |
1026 | else: |
|
1031 | else: | |
1027 | baseid = self._revisions[deltabase].rid |
|
1032 | baseid = self._revisions[deltabase].rid | |
@@ -1055,12 +1060,12 b' class sqlitefilestore(object):' | |||||
1055 |
|
1060 | |||
1056 | rev = len(self) |
|
1061 | rev = len(self) | |
1057 |
|
1062 | |||
1058 | if p1 == nullid: |
|
1063 | if p1 == sha1nodeconstants.nullid: | |
1059 | p1rev = nullrev |
|
1064 | p1rev = nullrev | |
1060 | else: |
|
1065 | else: | |
1061 | p1rev = self._nodetorev[p1] |
|
1066 | p1rev = self._nodetorev[p1] | |
1062 |
|
1067 | |||
1063 | if p2 == nullid: |
|
1068 | if p2 == sha1nodeconstants.nullid: | |
1064 | p2rev = nullrev |
|
1069 | p2rev = nullrev | |
1065 | else: |
|
1070 | else: | |
1066 | p2rev = self._nodetorev[p2] |
|
1071 | p2rev = self._nodetorev[p2] |
@@ -22,7 +22,6 b' from mercurial.pycompat import open' | |||||
22 | from mercurial.node import ( |
|
22 | from mercurial.node import ( | |
23 | bin, |
|
23 | bin, | |
24 | hex, |
|
24 | hex, | |
25 | nullid, |
|
|||
26 | short, |
|
25 | short, | |
27 | ) |
|
26 | ) | |
28 | from mercurial import ( |
|
27 | from mercurial import ( | |
@@ -134,6 +133,7 b' class transplants(object):' | |||||
134 | class transplanter(object): |
|
133 | class transplanter(object): | |
135 | def __init__(self, ui, repo, opts): |
|
134 | def __init__(self, ui, repo, opts): | |
136 | self.ui = ui |
|
135 | self.ui = ui | |
|
136 | self.repo = repo | |||
137 | self.path = repo.vfs.join(b'transplant') |
|
137 | self.path = repo.vfs.join(b'transplant') | |
138 | self.opener = vfsmod.vfs(self.path) |
|
138 | self.opener = vfsmod.vfs(self.path) | |
139 | self.transplants = transplants( |
|
139 | self.transplants = transplants( | |
@@ -221,7 +221,7 b' class transplanter(object):' | |||||
221 | exchange.pull(repo, source.peer(), heads=[node]) |
|
221 | exchange.pull(repo, source.peer(), heads=[node]) | |
222 |
|
222 | |||
223 | skipmerge = False |
|
223 | skipmerge = False | |
224 | if parents[1] != nullid: |
|
224 | if parents[1] != repo.nullid: | |
225 | if not opts.get(b'parent'): |
|
225 | if not opts.get(b'parent'): | |
226 | self.ui.note( |
|
226 | self.ui.note( | |
227 | _(b'skipping merge changeset %d:%s\n') |
|
227 | _(b'skipping merge changeset %d:%s\n') | |
@@ -516,7 +516,7 b' class transplanter(object):' | |||||
516 | def parselog(self, fp): |
|
516 | def parselog(self, fp): | |
517 | parents = [] |
|
517 | parents = [] | |
518 | message = [] |
|
518 | message = [] | |
519 | node = nullid |
|
519 | node = self.repo.nullid | |
520 | inmsg = False |
|
520 | inmsg = False | |
521 | user = None |
|
521 | user = None | |
522 | date = None |
|
522 | date = None | |
@@ -568,7 +568,7 b' class transplanter(object):' | |||||
568 | def matchfn(node): |
|
568 | def matchfn(node): | |
569 | if self.applied(repo, node, root): |
|
569 | if self.applied(repo, node, root): | |
570 | return False |
|
570 | return False | |
571 | if source.changelog.parents(node)[1] != nullid: |
|
571 | if source.changelog.parents(node)[1] != repo.nullid: | |
572 | return False |
|
572 | return False | |
573 | extra = source.changelog.read(node)[5] |
|
573 | extra = source.changelog.read(node)[5] | |
574 | cnode = extra.get(b'transplant_source') |
|
574 | cnode = extra.get(b'transplant_source') | |
@@ -804,7 +804,7 b' def _dotransplant(ui, repo, *revs, **opt' | |||||
804 | tp = transplanter(ui, repo, opts) |
|
804 | tp = transplanter(ui, repo, opts) | |
805 |
|
805 | |||
806 | p1 = repo.dirstate.p1() |
|
806 | p1 = repo.dirstate.p1() | |
807 | if len(repo) > 0 and p1 == nullid: |
|
807 | if len(repo) > 0 and p1 == repo.nullid: | |
808 | raise error.Abort(_(b'no revision checked out')) |
|
808 | raise error.Abort(_(b'no revision checked out')) | |
809 | if opts.get(b'continue'): |
|
809 | if opts.get(b'continue'): | |
810 | if not tp.canresume(): |
|
810 | if not tp.canresume(): |
@@ -20,7 +20,6 b' added and removed in the working directo' | |||||
20 | from __future__ import absolute_import |
|
20 | from __future__ import absolute_import | |
21 |
|
21 | |||
22 | from mercurial.i18n import _ |
|
22 | from mercurial.i18n import _ | |
23 | from mercurial.node import nullid |
|
|||
24 |
|
23 | |||
25 | from mercurial import ( |
|
24 | from mercurial import ( | |
26 | cmdutil, |
|
25 | cmdutil, | |
@@ -113,7 +112,7 b' def _commitfiltered(' | |||||
113 |
|
112 | |||
114 | new = context.memctx( |
|
113 | new = context.memctx( | |
115 | repo, |
|
114 | repo, | |
116 | parents=[base.node(), nullid], |
|
115 | parents=[base.node(), repo.nullid], | |
117 | text=message, |
|
116 | text=message, | |
118 | files=files, |
|
117 | files=files, | |
119 | filectxfn=filectxfn, |
|
118 | filectxfn=filectxfn, |
@@ -15,7 +15,6 b' from .node import (' | |||||
15 | bin, |
|
15 | bin, | |
16 | hex, |
|
16 | hex, | |
17 | short, |
|
17 | short, | |
18 | wdirid, |
|
|||
19 | ) |
|
18 | ) | |
20 | from .pycompat import getattr |
|
19 | from .pycompat import getattr | |
21 | from . import ( |
|
20 | from . import ( | |
@@ -642,7 +641,7 b' def binaryencode(repo, bookmarks):' | |||||
642 | binarydata = [] |
|
641 | binarydata = [] | |
643 | for book, node in bookmarks: |
|
642 | for book, node in bookmarks: | |
644 | if not node: # None or '' |
|
643 | if not node: # None or '' | |
645 | node = wdirid |
|
644 | node = repo.nodeconstants.wdirid | |
646 | binarydata.append(_binaryentry.pack(node, len(book))) |
|
645 | binarydata.append(_binaryentry.pack(node, len(book))) | |
647 | binarydata.append(book) |
|
646 | binarydata.append(book) | |
648 | return b''.join(binarydata) |
|
647 | return b''.join(binarydata) | |
@@ -674,7 +673,7 b' def binarydecode(repo, stream):' | |||||
674 | if len(bookmark) < length: |
|
673 | if len(bookmark) < length: | |
675 | if entry: |
|
674 | if entry: | |
676 | raise error.Abort(_(b'bad bookmark stream')) |
|
675 | raise error.Abort(_(b'bad bookmark stream')) | |
677 | if node == wdirid: |
|
676 | if node == repo.nodeconstants.wdirid: | |
678 | node = None |
|
677 | node = None | |
679 | books.append((bookmark, node)) |
|
678 | books.append((bookmark, node)) | |
680 | return books |
|
679 | return books |
@@ -12,7 +12,6 b' import struct' | |||||
12 | from .node import ( |
|
12 | from .node import ( | |
13 | bin, |
|
13 | bin, | |
14 | hex, |
|
14 | hex, | |
15 | nullid, |
|
|||
16 | nullrev, |
|
15 | nullrev, | |
17 | ) |
|
16 | ) | |
18 | from . import ( |
|
17 | from . import ( | |
@@ -189,7 +188,7 b' class branchcache(object):' | |||||
189 | self, |
|
188 | self, | |
190 | repo, |
|
189 | repo, | |
191 | entries=(), |
|
190 | entries=(), | |
192 |
tipnode= |
|
191 | tipnode=None, | |
193 | tiprev=nullrev, |
|
192 | tiprev=nullrev, | |
194 | filteredhash=None, |
|
193 | filteredhash=None, | |
195 | closednodes=None, |
|
194 | closednodes=None, | |
@@ -200,7 +199,10 b' class branchcache(object):' | |||||
200 | has a given node or not. If it's not provided, we assume that every node |
|
199 | has a given node or not. If it's not provided, we assume that every node | |
201 | we have exists in changelog""" |
|
200 | we have exists in changelog""" | |
202 | self._repo = repo |
|
201 | self._repo = repo | |
203 |
|
|
202 | if tipnode is None: | |
|
203 | self.tipnode = repo.nullid | |||
|
204 | else: | |||
|
205 | self.tipnode = tipnode | |||
204 | self.tiprev = tiprev |
|
206 | self.tiprev = tiprev | |
205 | self.filteredhash = filteredhash |
|
207 | self.filteredhash = filteredhash | |
206 | # closednodes is a set of nodes that close their branch. If the branch |
|
208 | # closednodes is a set of nodes that close their branch. If the branch | |
@@ -536,7 +538,7 b' class branchcache(object):' | |||||
536 |
|
538 | |||
537 | if not self.validfor(repo): |
|
539 | if not self.validfor(repo): | |
538 | # cache key are not valid anymore |
|
540 | # cache key are not valid anymore | |
539 | self.tipnode = nullid |
|
541 | self.tipnode = repo.nullid | |
540 | self.tiprev = nullrev |
|
542 | self.tiprev = nullrev | |
541 | for heads in self.iterheads(): |
|
543 | for heads in self.iterheads(): | |
542 | tiprev = max(cl.rev(node) for node in heads) |
|
544 | tiprev = max(cl.rev(node) for node in heads) |
@@ -158,7 +158,6 b' import sys' | |||||
158 | from .i18n import _ |
|
158 | from .i18n import _ | |
159 | from .node import ( |
|
159 | from .node import ( | |
160 | hex, |
|
160 | hex, | |
161 | nullid, |
|
|||
162 | short, |
|
161 | short, | |
163 | ) |
|
162 | ) | |
164 | from . import ( |
|
163 | from . import ( | |
@@ -2576,7 +2575,7 b' def widen_bundle(' | |||||
2576 | fullnodes=commonnodes, |
|
2575 | fullnodes=commonnodes, | |
2577 | ) |
|
2576 | ) | |
2578 | cgdata = packer.generate( |
|
2577 | cgdata = packer.generate( | |
2579 | {nullid}, |
|
2578 | {repo.nullid}, | |
2580 | list(commonnodes), |
|
2579 | list(commonnodes), | |
2581 | False, |
|
2580 | False, | |
2582 | b'narrow_widen', |
|
2581 | b'narrow_widen', |
@@ -19,7 +19,6 b' import shutil' | |||||
19 | from .i18n import _ |
|
19 | from .i18n import _ | |
20 | from .node import ( |
|
20 | from .node import ( | |
21 | hex, |
|
21 | hex, | |
22 | nullid, |
|
|||
23 | nullrev, |
|
22 | nullrev, | |
24 | ) |
|
23 | ) | |
25 |
|
24 | |||
@@ -447,7 +446,9 b' class bundlerepository(object):' | |||||
447 | return encoding.getcwd() # always outside the repo |
|
446 | return encoding.getcwd() # always outside the repo | |
448 |
|
447 | |||
449 | # Check if parents exist in localrepo before setting |
|
448 | # Check if parents exist in localrepo before setting | |
450 |
def setparents(self, p1, p2= |
|
449 | def setparents(self, p1, p2=None): | |
|
450 | if p2 is None: | |||
|
451 | p2 = self.nullid | |||
451 | p1rev = self.changelog.rev(p1) |
|
452 | p1rev = self.changelog.rev(p1) | |
452 | p2rev = self.changelog.rev(p2) |
|
453 | p2rev = self.changelog.rev(p2) | |
453 | msg = _(b"setting parent to node %s that only exists in the bundle\n") |
|
454 | msg = _(b"setting parent to node %s that only exists in the bundle\n") |
@@ -15,7 +15,6 b' import weakref' | |||||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import ( |
|
16 | from .node import ( | |
17 | hex, |
|
17 | hex, | |
18 | nullid, |
|
|||
19 | nullrev, |
|
18 | nullrev, | |
20 | short, |
|
19 | short, | |
21 | ) |
|
20 | ) | |
@@ -673,7 +672,7 b' def _revisiondeltatochunks(repo, delta, ' | |||||
673 |
|
672 | |||
674 | if delta.delta is not None: |
|
673 | if delta.delta is not None: | |
675 | prefix, data = b'', delta.delta |
|
674 | prefix, data = b'', delta.delta | |
676 | elif delta.basenode == nullid: |
|
675 | elif delta.basenode == repo.nullid: | |
677 | data = delta.revision |
|
676 | data = delta.revision | |
678 | prefix = mdiff.trivialdiffheader(len(data)) |
|
677 | prefix = mdiff.trivialdiffheader(len(data)) | |
679 | else: |
|
678 | else: |
@@ -11,7 +11,6 b' from .i18n import _' | |||||
11 | from .node import ( |
|
11 | from .node import ( | |
12 | bin, |
|
12 | bin, | |
13 | hex, |
|
13 | hex, | |
14 | nullid, |
|
|||
15 | ) |
|
14 | ) | |
16 | from .thirdparty import attr |
|
15 | from .thirdparty import attr | |
17 |
|
16 | |||
@@ -221,7 +220,7 b' class changelogrevision(object):' | |||||
221 |
|
220 | |||
222 | def __new__(cls, cl, text, sidedata, cpsd): |
|
221 | def __new__(cls, cl, text, sidedata, cpsd): | |
223 | if not text: |
|
222 | if not text: | |
224 | return _changelogrevision(extra=_defaultextra, manifest=nullid) |
|
223 | return _changelogrevision(extra=_defaultextra, manifest=cl.nullid) | |
225 |
|
224 | |||
226 | self = super(changelogrevision, cls).__new__(cls) |
|
225 | self = super(changelogrevision, cls).__new__(cls) | |
227 | # We could return here and implement the following as an __init__. |
|
226 | # We could return here and implement the following as an __init__. |
@@ -15,7 +15,6 b' import re' | |||||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import ( |
|
16 | from .node import ( | |
17 | hex, |
|
17 | hex, | |
18 | nullid, |
|
|||
19 | nullrev, |
|
18 | nullrev, | |
20 | short, |
|
19 | short, | |
21 | ) |
|
20 | ) | |
@@ -1097,7 +1096,7 b' def bailifchanged(repo, merge=True, hint' | |||||
1097 | 'hint' is the usual hint given to Abort exception. |
|
1096 | 'hint' is the usual hint given to Abort exception. | |
1098 | """ |
|
1097 | """ | |
1099 |
|
1098 | |||
1100 | if merge and repo.dirstate.p2() != nullid: |
|
1099 | if merge and repo.dirstate.p2() != repo.nullid: | |
1101 | raise error.StateError(_(b'outstanding uncommitted merge'), hint=hint) |
|
1100 | raise error.StateError(_(b'outstanding uncommitted merge'), hint=hint) | |
1102 | st = repo.status() |
|
1101 | st = repo.status() | |
1103 | if st.modified or st.added or st.removed or st.deleted: |
|
1102 | if st.modified or st.added or st.removed or st.deleted: | |
@@ -2104,7 +2103,7 b' def _exportsingle(repo, ctx, fm, match, ' | |||||
2104 | if parents: |
|
2103 | if parents: | |
2105 | prev = parents[0] |
|
2104 | prev = parents[0] | |
2106 | else: |
|
2105 | else: | |
2107 | prev = nullid |
|
2106 | prev = repo.nullid | |
2108 |
|
2107 | |||
2109 | fm.context(ctx=ctx) |
|
2108 | fm.context(ctx=ctx) | |
2110 | fm.plain(b'# HG changeset patch\n') |
|
2109 | fm.plain(b'# HG changeset patch\n') | |
@@ -2967,7 +2966,7 b' def amend(ui, repo, old, extra, pats, op' | |||||
2967 | ms.reset() |
|
2966 | ms.reset() | |
2968 |
|
2967 | |||
2969 | # Reroute the working copy parent to the new changeset |
|
2968 | # Reroute the working copy parent to the new changeset | |
2970 | repo.setparents(newid, nullid) |
|
2969 | repo.setparents(newid, repo.nullid) | |
2971 |
|
2970 | |||
2972 | # Fixing the dirstate because localrepo.commitctx does not update |
|
2971 | # Fixing the dirstate because localrepo.commitctx does not update | |
2973 | # it. This is rather convenient because we did not need to update |
|
2972 | # it. This is rather convenient because we did not need to update | |
@@ -3322,7 +3321,7 b' def revert(ui, repo, ctx, *pats, **opts)' | |||||
3322 |
|
3321 | |||
3323 | # in case of merge, files that are actually added can be reported as |
|
3322 | # in case of merge, files that are actually added can be reported as | |
3324 | # modified, we need to post process the result |
|
3323 | # modified, we need to post process the result | |
3325 | if p2 != nullid: |
|
3324 | if p2 != repo.nullid: | |
3326 | mergeadd = set(dsmodified) |
|
3325 | mergeadd = set(dsmodified) | |
3327 | for path in dsmodified: |
|
3326 | for path in dsmodified: | |
3328 | if path in mf: |
|
3327 | if path in mf: | |
@@ -3593,7 +3592,7 b' def _performrevert(' | |||||
3593 | # We're reverting to our parent. If possible, we'd like status |
|
3592 | # We're reverting to our parent. If possible, we'd like status | |
3594 | # to report the file as clean. We have to use normallookup for |
|
3593 | # to report the file as clean. We have to use normallookup for | |
3595 | # merges to avoid losing information about merged/dirty files. |
|
3594 | # merges to avoid losing information about merged/dirty files. | |
3596 | if p2 != nullid: |
|
3595 | if p2 != repo.nullid: | |
3597 | normal = repo.dirstate.normallookup |
|
3596 | normal = repo.dirstate.normallookup | |
3598 | else: |
|
3597 | else: | |
3599 | normal = repo.dirstate.normal |
|
3598 | normal = repo.dirstate.normal | |
@@ -3690,7 +3689,7 b' def _performrevert(' | |||||
3690 | repo.dirstate.add(f) |
|
3689 | repo.dirstate.add(f) | |
3691 |
|
3690 | |||
3692 | normal = repo.dirstate.normallookup |
|
3691 | normal = repo.dirstate.normallookup | |
3693 | if node == parent and p2 == nullid: |
|
3692 | if node == parent and p2 == repo.nullid: | |
3694 | normal = repo.dirstate.normal |
|
3693 | normal = repo.dirstate.normal | |
3695 | for f in actions[b'undelete'][0]: |
|
3694 | for f in actions[b'undelete'][0]: | |
3696 | if interactive: |
|
3695 | if interactive: |
@@ -15,10 +15,8 b' import sys' | |||||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import ( |
|
16 | from .node import ( | |
17 | hex, |
|
17 | hex, | |
18 | nullid, |
|
|||
19 | nullrev, |
|
18 | nullrev, | |
20 | short, |
|
19 | short, | |
21 | wdirhex, |
|
|||
22 | wdirrev, |
|
20 | wdirrev, | |
23 | ) |
|
21 | ) | |
24 | from .pycompat import open |
|
22 | from .pycompat import open | |
@@ -486,7 +484,7 b' def annotate(ui, repo, *pats, **opts):' | |||||
486 | return b'%d ' % rev |
|
484 | return b'%d ' % rev | |
487 |
|
485 | |||
488 | def formathex(h): |
|
486 | def formathex(h): | |
489 | if h == wdirhex: |
|
487 | if h == repo.nodeconstants.wdirhex: | |
490 | return b'%s+' % shorthex(hex(ctx.p1().node())) |
|
488 | return b'%s+' % shorthex(hex(ctx.p1().node())) | |
491 | else: |
|
489 | else: | |
492 | return b'%s ' % shorthex(h) |
|
490 | return b'%s ' % shorthex(h) | |
@@ -809,9 +807,9 b' def _dobackout(ui, repo, node=None, rev=' | |||||
809 | ) |
|
807 | ) | |
810 |
|
808 | |||
811 | p1, p2 = repo.changelog.parents(node) |
|
809 | p1, p2 = repo.changelog.parents(node) | |
812 | if p1 == nullid: |
|
810 | if p1 == repo.nullid: | |
813 | raise error.InputError(_(b'cannot backout a change with no parents')) |
|
811 | raise error.InputError(_(b'cannot backout a change with no parents')) | |
814 | if p2 != nullid: |
|
812 | if p2 != repo.nullid: | |
815 | if not opts.get(b'parent'): |
|
813 | if not opts.get(b'parent'): | |
816 | raise error.InputError(_(b'cannot backout a merge changeset')) |
|
814 | raise error.InputError(_(b'cannot backout a merge changeset')) | |
817 | p = repo.lookup(opts[b'parent']) |
|
815 | p = repo.lookup(opts[b'parent']) | |
@@ -1085,7 +1083,7 b' def bisect(' | |||||
1085 | ) |
|
1083 | ) | |
1086 | else: |
|
1084 | else: | |
1087 | node, p2 = repo.dirstate.parents() |
|
1085 | node, p2 = repo.dirstate.parents() | |
1088 | if p2 != nullid: |
|
1086 | if p2 != repo.nullid: | |
1089 | raise error.StateError(_(b'current bisect revision is a merge')) |
|
1087 | raise error.StateError(_(b'current bisect revision is a merge')) | |
1090 | if rev: |
|
1088 | if rev: | |
1091 | if not nodes: |
|
1089 | if not nodes: | |
@@ -4847,7 +4845,7 b' def merge(ui, repo, node=None, **opts):' | |||||
4847 |
|
4845 | |||
4848 | opts = pycompat.byteskwargs(opts) |
|
4846 | opts = pycompat.byteskwargs(opts) | |
4849 | abort = opts.get(b'abort') |
|
4847 | abort = opts.get(b'abort') | |
4850 | if abort and repo.dirstate.p2() == nullid: |
|
4848 | if abort and repo.dirstate.p2() == repo.nullid: | |
4851 | cmdutil.wrongtooltocontinue(repo, _(b'merge')) |
|
4849 | cmdutil.wrongtooltocontinue(repo, _(b'merge')) | |
4852 | cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview']) |
|
4850 | cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview']) | |
4853 | if abort: |
|
4851 | if abort: | |
@@ -5072,7 +5070,7 b' def parents(ui, repo, file_=None, **opts' | |||||
5072 |
|
5070 | |||
5073 | displayer = logcmdutil.changesetdisplayer(ui, repo, opts) |
|
5071 | displayer = logcmdutil.changesetdisplayer(ui, repo, opts) | |
5074 | for n in p: |
|
5072 | for n in p: | |
5075 | if n != nullid: |
|
5073 | if n != repo.nullid: | |
5076 | displayer.show(repo[n]) |
|
5074 | displayer.show(repo[n]) | |
5077 | displayer.close() |
|
5075 | displayer.close() | |
5078 |
|
5076 | |||
@@ -6105,7 +6103,7 b' def resolve(ui, repo, *pats, **opts):' | |||||
6105 | with repo.wlock(): |
|
6103 | with repo.wlock(): | |
6106 | ms = mergestatemod.mergestate.read(repo) |
|
6104 | ms = mergestatemod.mergestate.read(repo) | |
6107 |
|
6105 | |||
6108 | if not (ms.active() or repo.dirstate.p2() != nullid): |
|
6106 | if not (ms.active() or repo.dirstate.p2() != repo.nullid): | |
6109 | raise error.StateError( |
|
6107 | raise error.StateError( | |
6110 | _(b'resolve command not applicable when not merging') |
|
6108 | _(b'resolve command not applicable when not merging') | |
6111 | ) |
|
6109 | ) | |
@@ -6223,7 +6221,7 b' def resolve(ui, repo, *pats, **opts):' | |||||
6223 | raise |
|
6221 | raise | |
6224 |
|
6222 | |||
6225 | ms.commit() |
|
6223 | ms.commit() | |
6226 | branchmerge = repo.dirstate.p2() != nullid |
|
6224 | branchmerge = repo.dirstate.p2() != repo.nullid | |
6227 | mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None) |
|
6225 | mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None) | |
6228 |
|
6226 | |||
6229 | if not didwork and pats: |
|
6227 | if not didwork and pats: | |
@@ -6315,7 +6313,7 b' def revert(ui, repo, *pats, **opts):' | |||||
6315 | opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"]) |
|
6313 | opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"]) | |
6316 |
|
6314 | |||
6317 | parent, p2 = repo.dirstate.parents() |
|
6315 | parent, p2 = repo.dirstate.parents() | |
6318 | if not opts.get(b'rev') and p2 != nullid: |
|
6316 | if not opts.get(b'rev') and p2 != repo.nullid: | |
6319 | # revert after merge is a trap for new users (issue2915) |
|
6317 | # revert after merge is a trap for new users (issue2915) | |
6320 | raise error.InputError( |
|
6318 | raise error.InputError( | |
6321 | _(b'uncommitted merge with no revision specified'), |
|
6319 | _(b'uncommitted merge with no revision specified'), | |
@@ -6335,7 +6333,7 b' def revert(ui, repo, *pats, **opts):' | |||||
6335 | or opts.get(b'interactive') |
|
6333 | or opts.get(b'interactive') | |
6336 | ): |
|
6334 | ): | |
6337 | msg = _(b"no files or directories specified") |
|
6335 | msg = _(b"no files or directories specified") | |
6338 | if p2 != nullid: |
|
6336 | if p2 != repo.nullid: | |
6339 | hint = _( |
|
6337 | hint = _( | |
6340 | b"uncommitted merge, use --all to discard all changes," |
|
6338 | b"uncommitted merge, use --all to discard all changes," | |
6341 | b" or 'hg update -C .' to abort the merge" |
|
6339 | b" or 'hg update -C .' to abort the merge" | |
@@ -7396,7 +7394,7 b' def tag(ui, repo, name1, *names, **opts)' | |||||
7396 | for n in names: |
|
7394 | for n in names: | |
7397 | if repo.tagtype(n) == b'global': |
|
7395 | if repo.tagtype(n) == b'global': | |
7398 | alltags = tagsmod.findglobaltags(ui, repo) |
|
7396 | alltags = tagsmod.findglobaltags(ui, repo) | |
7399 | if alltags[n][0] == nullid: |
|
7397 | if alltags[n][0] == repo.nullid: | |
7400 | raise error.InputError( |
|
7398 | raise error.InputError( | |
7401 | _(b"tag '%s' is already removed") % n |
|
7399 | _(b"tag '%s' is already removed") % n | |
7402 | ) |
|
7400 | ) | |
@@ -7423,7 +7421,7 b' def tag(ui, repo, name1, *names, **opts)' | |||||
7423 | ) |
|
7421 | ) | |
7424 | if not opts.get(b'local'): |
|
7422 | if not opts.get(b'local'): | |
7425 | p1, p2 = repo.dirstate.parents() |
|
7423 | p1, p2 = repo.dirstate.parents() | |
7426 | if p2 != nullid: |
|
7424 | if p2 != repo.nullid: | |
7427 | raise error.StateError(_(b'uncommitted merge')) |
|
7425 | raise error.StateError(_(b'uncommitted merge')) | |
7428 | bheads = repo.branchheads() |
|
7426 | bheads = repo.branchheads() | |
7429 | if not opts.get(b'force') and bheads and p1 not in bheads: |
|
7427 | if not opts.get(b'force') and bheads and p1 not in bheads: |
@@ -10,7 +10,6 b' import errno' | |||||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 | from .node import ( |
|
11 | from .node import ( | |
12 | hex, |
|
12 | hex, | |
13 | nullid, |
|
|||
14 | nullrev, |
|
13 | nullrev, | |
15 | ) |
|
14 | ) | |
16 |
|
15 | |||
@@ -277,10 +276,10 b' def _filecommit(' | |||||
277 | """ |
|
276 | """ | |
278 |
|
277 | |||
279 | fname = fctx.path() |
|
278 | fname = fctx.path() | |
280 | fparent1 = manifest1.get(fname, nullid) |
|
279 | fparent1 = manifest1.get(fname, repo.nullid) | |
281 | fparent2 = manifest2.get(fname, nullid) |
|
280 | fparent2 = manifest2.get(fname, repo.nullid) | |
282 | touched = None |
|
281 | touched = None | |
283 | if fparent1 == fparent2 == nullid: |
|
282 | if fparent1 == fparent2 == repo.nullid: | |
284 | touched = 'added' |
|
283 | touched = 'added' | |
285 |
|
284 | |||
286 | if isinstance(fctx, context.filectx): |
|
285 | if isinstance(fctx, context.filectx): | |
@@ -291,9 +290,11 b' def _filecommit(' | |||||
291 | if node in [fparent1, fparent2]: |
|
290 | if node in [fparent1, fparent2]: | |
292 | repo.ui.debug(b'reusing %s filelog entry\n' % fname) |
|
291 | repo.ui.debug(b'reusing %s filelog entry\n' % fname) | |
293 | if ( |
|
292 | if ( | |
294 | fparent1 != nullid and manifest1.flags(fname) != fctx.flags() |
|
293 | fparent1 != repo.nullid | |
|
294 | and manifest1.flags(fname) != fctx.flags() | |||
295 | ) or ( |
|
295 | ) or ( | |
296 | fparent2 != nullid and manifest2.flags(fname) != fctx.flags() |
|
296 | fparent2 != repo.nullid | |
|
297 | and manifest2.flags(fname) != fctx.flags() | |||
297 | ): |
|
298 | ): | |
298 | touched = 'modified' |
|
299 | touched = 'modified' | |
299 | return node, touched |
|
300 | return node, touched | |
@@ -327,7 +328,9 b' def _filecommit(' | |||||
327 | newfparent = fparent2 |
|
328 | newfparent = fparent2 | |
328 |
|
329 | |||
329 | if manifest2: # branch merge |
|
330 | if manifest2: # branch merge | |
330 | if fparent2 == nullid or cnode is None: # copied on remote side |
|
331 | if ( | |
|
332 | fparent2 == repo.nullid or cnode is None | |||
|
333 | ): # copied on remote side | |||
331 | if cfname in manifest2: |
|
334 | if cfname in manifest2: | |
332 | cnode = manifest2[cfname] |
|
335 | cnode = manifest2[cfname] | |
333 | newfparent = fparent1 |
|
336 | newfparent = fparent1 | |
@@ -346,7 +349,7 b' def _filecommit(' | |||||
346 | if includecopymeta: |
|
349 | if includecopymeta: | |
347 | meta[b"copy"] = cfname |
|
350 | meta[b"copy"] = cfname | |
348 | meta[b"copyrev"] = hex(cnode) |
|
351 | meta[b"copyrev"] = hex(cnode) | |
349 | fparent1, fparent2 = nullid, newfparent |
|
352 | fparent1, fparent2 = repo.nullid, newfparent | |
350 | else: |
|
353 | else: | |
351 | repo.ui.warn( |
|
354 | repo.ui.warn( | |
352 | _( |
|
355 | _( | |
@@ -356,20 +359,20 b' def _filecommit(' | |||||
356 | % (fname, cfname) |
|
359 | % (fname, cfname) | |
357 | ) |
|
360 | ) | |
358 |
|
361 | |||
359 | elif fparent1 == nullid: |
|
362 | elif fparent1 == repo.nullid: | |
360 | fparent1, fparent2 = fparent2, nullid |
|
363 | fparent1, fparent2 = fparent2, repo.nullid | |
361 | elif fparent2 != nullid: |
|
364 | elif fparent2 != repo.nullid: | |
362 | if ms.active() and ms.extras(fname).get(b'filenode-source') == b'other': |
|
365 | if ms.active() and ms.extras(fname).get(b'filenode-source') == b'other': | |
363 | fparent1, fparent2 = fparent2, nullid |
|
366 | fparent1, fparent2 = fparent2, repo.nullid | |
364 | elif ms.active() and ms.extras(fname).get(b'merged') != b'yes': |
|
367 | elif ms.active() and ms.extras(fname).get(b'merged') != b'yes': | |
365 | fparent1, fparent2 = fparent1, nullid |
|
368 | fparent1, fparent2 = fparent1, repo.nullid | |
366 | # is one parent an ancestor of the other? |
|
369 | # is one parent an ancestor of the other? | |
367 | else: |
|
370 | else: | |
368 | fparentancestors = flog.commonancestorsheads(fparent1, fparent2) |
|
371 | fparentancestors = flog.commonancestorsheads(fparent1, fparent2) | |
369 | if fparent1 in fparentancestors: |
|
372 | if fparent1 in fparentancestors: | |
370 | fparent1, fparent2 = fparent2, nullid |
|
373 | fparent1, fparent2 = fparent2, repo.nullid | |
371 | elif fparent2 in fparentancestors: |
|
374 | elif fparent2 in fparentancestors: | |
372 | fparent2 = nullid |
|
375 | fparent2 = repo.nullid | |
373 |
|
376 | |||
374 | force_new_node = False |
|
377 | force_new_node = False | |
375 | # The file might have been deleted by merge code and user explicitly choose |
|
378 | # The file might have been deleted by merge code and user explicitly choose | |
@@ -384,9 +387,14 b' def _filecommit(' | |||||
384 | force_new_node = True |
|
387 | force_new_node = True | |
385 | # is the file changed? |
|
388 | # is the file changed? | |
386 | text = fctx.data() |
|
389 | text = fctx.data() | |
387 | if fparent2 != nullid or meta or flog.cmp(fparent1, text) or force_new_node: |
|
390 | if ( | |
|
391 | fparent2 != repo.nullid | |||
|
392 | or meta | |||
|
393 | or flog.cmp(fparent1, text) | |||
|
394 | or force_new_node | |||
|
395 | ): | |||
388 | if touched is None: # do not overwrite added |
|
396 | if touched is None: # do not overwrite added | |
389 | if fparent2 == nullid: |
|
397 | if fparent2 == repo.nullid: | |
390 | touched = 'modified' |
|
398 | touched = 'modified' | |
391 | else: |
|
399 | else: | |
392 | touched = 'merged' |
|
400 | touched = 'merged' |
@@ -14,14 +14,9 b' import stat' | |||||
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import ( |
|
16 | from .node import ( | |
17 | addednodeid, |
|
|||
18 | hex, |
|
17 | hex, | |
19 | modifiednodeid, |
|
|||
20 | nullid, |
|
|||
21 | nullrev, |
|
18 | nullrev, | |
22 | short, |
|
19 | short, | |
23 | wdirfilenodeids, |
|
|||
24 | wdirhex, |
|
|||
25 | ) |
|
20 | ) | |
26 | from .pycompat import ( |
|
21 | from .pycompat import ( | |
27 | getattr, |
|
22 | getattr, | |
@@ -140,7 +135,7 b' class basectx(object):' | |||||
140 | removed.append(fn) |
|
135 | removed.append(fn) | |
141 | elif flag1 != flag2: |
|
136 | elif flag1 != flag2: | |
142 | modified.append(fn) |
|
137 | modified.append(fn) | |
143 | elif node2 not in wdirfilenodeids: |
|
138 | elif node2 not in self._repo.nodeconstants.wdirfilenodeids: | |
144 | # When comparing files between two commits, we save time by |
|
139 | # When comparing files between two commits, we save time by | |
145 | # not comparing the file contents when the nodeids differ. |
|
140 | # not comparing the file contents when the nodeids differ. | |
146 | # Note that this means we incorrectly report a reverted change |
|
141 | # Note that this means we incorrectly report a reverted change | |
@@ -737,7 +732,7 b' class changectx(basectx):' | |||||
737 | n2 = c2._parents[0]._node |
|
732 | n2 = c2._parents[0]._node | |
738 | cahs = self._repo.changelog.commonancestorsheads(self._node, n2) |
|
733 | cahs = self._repo.changelog.commonancestorsheads(self._node, n2) | |
739 | if not cahs: |
|
734 | if not cahs: | |
740 | anc = nullid |
|
735 | anc = self._repo.nodeconstants.nullid | |
741 | elif len(cahs) == 1: |
|
736 | elif len(cahs) == 1: | |
742 | anc = cahs[0] |
|
737 | anc = cahs[0] | |
743 | else: |
|
738 | else: | |
@@ -1132,7 +1127,11 b' class basefilectx(object):' | |||||
1132 | _path = self._path |
|
1127 | _path = self._path | |
1133 | fl = self._filelog |
|
1128 | fl = self._filelog | |
1134 | parents = self._filelog.parents(self._filenode) |
|
1129 | parents = self._filelog.parents(self._filenode) | |
1135 | pl = [(_path, node, fl) for node in parents if node != nullid] |
|
1130 | pl = [ | |
|
1131 | (_path, node, fl) | |||
|
1132 | for node in parents | |||
|
1133 | if node != self._repo.nodeconstants.nullid | |||
|
1134 | ] | |||
1136 |
|
1135 | |||
1137 | r = fl.renamed(self._filenode) |
|
1136 | r = fl.renamed(self._filenode) | |
1138 | if r: |
|
1137 | if r: | |
@@ -1556,12 +1555,12 b' class workingctx(committablectx):' | |||||
1556 | return self._repo.dirstate[key] not in b"?r" |
|
1555 | return self._repo.dirstate[key] not in b"?r" | |
1557 |
|
1556 | |||
1558 | def hex(self): |
|
1557 | def hex(self): | |
1559 | return wdirhex |
|
1558 | return self._repo.nodeconstants.wdirhex | |
1560 |
|
1559 | |||
1561 | @propertycache |
|
1560 | @propertycache | |
1562 | def _parents(self): |
|
1561 | def _parents(self): | |
1563 | p = self._repo.dirstate.parents() |
|
1562 | p = self._repo.dirstate.parents() | |
1564 | if p[1] == nullid: |
|
1563 | if p[1] == self._repo.nodeconstants.nullid: | |
1565 | p = p[:-1] |
|
1564 | p = p[:-1] | |
1566 | # use unfiltered repo to delay/avoid loading obsmarkers |
|
1565 | # use unfiltered repo to delay/avoid loading obsmarkers | |
1567 | unfi = self._repo.unfiltered() |
|
1566 | unfi = self._repo.unfiltered() | |
@@ -1572,7 +1571,9 b' class workingctx(committablectx):' | |||||
1572 | for n in p |
|
1571 | for n in p | |
1573 | ] |
|
1572 | ] | |
1574 |
|
1573 | |||
1575 |
def setparents(self, p1node, p2node= |
|
1574 | def setparents(self, p1node, p2node=None): | |
|
1575 | if p2node is None: | |||
|
1576 | p2node = self._repo.nodeconstants.nullid | |||
1576 | dirstate = self._repo.dirstate |
|
1577 | dirstate = self._repo.dirstate | |
1577 | with dirstate.parentchange(): |
|
1578 | with dirstate.parentchange(): | |
1578 | copies = dirstate.setparents(p1node, p2node) |
|
1579 | copies = dirstate.setparents(p1node, p2node) | |
@@ -1584,7 +1585,7 b' class workingctx(committablectx):' | |||||
1584 | for f in copies: |
|
1585 | for f in copies: | |
1585 | if f not in pctx and copies[f] in pctx: |
|
1586 | if f not in pctx and copies[f] in pctx: | |
1586 | dirstate.copy(copies[f], f) |
|
1587 | dirstate.copy(copies[f], f) | |
1587 | if p2node == nullid: |
|
1588 | if p2node == self._repo.nodeconstants.nullid: | |
1588 | for f, s in sorted(dirstate.copies().items()): |
|
1589 | for f, s in sorted(dirstate.copies().items()): | |
1589 | if f not in pctx and s not in pctx: |
|
1590 | if f not in pctx and s not in pctx: | |
1590 | dirstate.copy(None, f) |
|
1591 | dirstate.copy(None, f) | |
@@ -1944,8 +1945,8 b' class workingctx(committablectx):' | |||||
1944 |
|
1945 | |||
1945 | ff = self._flagfunc |
|
1946 | ff = self._flagfunc | |
1946 | for i, l in ( |
|
1947 | for i, l in ( | |
1947 | (addednodeid, status.added), |
|
1948 | (self._repo.nodeconstants.addednodeid, status.added), | |
1948 | (modifiednodeid, status.modified), |
|
1949 | (self._repo.nodeconstants.modifiednodeid, status.modified), | |
1949 | ): |
|
1950 | ): | |
1950 | for f in l: |
|
1951 | for f in l: | |
1951 | man[f] = i |
|
1952 | man[f] = i | |
@@ -2070,13 +2071,18 b' class committablefilectx(basefilectx):' | |||||
2070 | path = self.copysource() |
|
2071 | path = self.copysource() | |
2071 | if not path: |
|
2072 | if not path: | |
2072 | return None |
|
2073 | return None | |
2073 | return path, self._changectx._parents[0]._manifest.get(path, nullid) |
|
2074 | return ( | |
|
2075 | path, | |||
|
2076 | self._changectx._parents[0]._manifest.get( | |||
|
2077 | path, self._repo.nodeconstants.nullid | |||
|
2078 | ), | |||
|
2079 | ) | |||
2074 |
|
2080 | |||
2075 | def parents(self): |
|
2081 | def parents(self): | |
2076 | '''return parent filectxs, following copies if necessary''' |
|
2082 | '''return parent filectxs, following copies if necessary''' | |
2077 |
|
2083 | |||
2078 | def filenode(ctx, path): |
|
2084 | def filenode(ctx, path): | |
2079 | return ctx._manifest.get(path, nullid) |
|
2085 | return ctx._manifest.get(path, self._repo.nodeconstants.nullid) | |
2080 |
|
2086 | |||
2081 | path = self._path |
|
2087 | path = self._path | |
2082 | fl = self._filelog |
|
2088 | fl = self._filelog | |
@@ -2094,7 +2100,7 b' class committablefilectx(basefilectx):' | |||||
2094 | return [ |
|
2100 | return [ | |
2095 | self._parentfilectx(p, fileid=n, filelog=l) |
|
2101 | self._parentfilectx(p, fileid=n, filelog=l) | |
2096 | for p, n, l in pl |
|
2102 | for p, n, l in pl | |
2097 | if n != nullid |
|
2103 | if n != self._repo.nodeconstants.nullid | |
2098 | ] |
|
2104 | ] | |
2099 |
|
2105 | |||
2100 | def children(self): |
|
2106 | def children(self): | |
@@ -2222,7 +2228,9 b' class overlayworkingctx(committablectx):' | |||||
2222 | # ``overlayworkingctx`` (e.g. with --collapse). |
|
2228 | # ``overlayworkingctx`` (e.g. with --collapse). | |
2223 | util.clearcachedproperty(self, b'_manifest') |
|
2229 | util.clearcachedproperty(self, b'_manifest') | |
2224 |
|
2230 | |||
2225 |
def setparents(self, p1node, p2node= |
|
2231 | def setparents(self, p1node, p2node=None): | |
|
2232 | if p2node is None: | |||
|
2233 | p2node = self._repo.nodeconstants.nullid | |||
2226 | assert p1node == self._wrappedctx.node() |
|
2234 | assert p1node == self._wrappedctx.node() | |
2227 | self._parents = [self._wrappedctx, self._repo.unfiltered()[p2node]] |
|
2235 | self._parents = [self._wrappedctx, self._repo.unfiltered()[p2node]] | |
2228 |
|
2236 | |||
@@ -2248,10 +2256,10 b' class overlayworkingctx(committablectx):' | |||||
2248 |
|
2256 | |||
2249 | flag = self._flagfunc |
|
2257 | flag = self._flagfunc | |
2250 | for path in self.added(): |
|
2258 | for path in self.added(): | |
2251 | man[path] = addednodeid |
|
2259 | man[path] = self._repo.nodeconstants.addednodeid | |
2252 | man.setflag(path, flag(path)) |
|
2260 | man.setflag(path, flag(path)) | |
2253 | for path in self.modified(): |
|
2261 | for path in self.modified(): | |
2254 | man[path] = modifiednodeid |
|
2262 | man[path] = self._repo.nodeconstants.modifiednodeid | |
2255 | man.setflag(path, flag(path)) |
|
2263 | man.setflag(path, flag(path)) | |
2256 | for path in self.removed(): |
|
2264 | for path in self.removed(): | |
2257 | del man[path] |
|
2265 | del man[path] | |
@@ -2827,7 +2835,7 b' class memctx(committablectx):' | |||||
2827 | ) |
|
2835 | ) | |
2828 | self._rev = None |
|
2836 | self._rev = None | |
2829 | self._node = None |
|
2837 | self._node = None | |
2830 | parents = [(p or nullid) for p in parents] |
|
2838 | parents = [(p or self._repo.nodeconstants.nullid) for p in parents] | |
2831 | p1, p2 = parents |
|
2839 | p1, p2 = parents | |
2832 | self._parents = [self._repo[p] for p in (p1, p2)] |
|
2840 | self._parents = [self._repo[p] for p in (p1, p2)] | |
2833 | files = sorted(set(files)) |
|
2841 | files = sorted(set(files)) | |
@@ -2866,10 +2874,10 b' class memctx(committablectx):' | |||||
2866 | man = pctx.manifest().copy() |
|
2874 | man = pctx.manifest().copy() | |
2867 |
|
2875 | |||
2868 | for f in self._status.modified: |
|
2876 | for f in self._status.modified: | |
2869 | man[f] = modifiednodeid |
|
2877 | man[f] = self._repo.nodeconstants.modifiednodeid | |
2870 |
|
2878 | |||
2871 | for f in self._status.added: |
|
2879 | for f in self._status.added: | |
2872 | man[f] = addednodeid |
|
2880 | man[f] = self._repo.nodeconstants.addednodeid | |
2873 |
|
2881 | |||
2874 | for f in self._status.removed: |
|
2882 | for f in self._status.removed: | |
2875 | if f in man: |
|
2883 | if f in man: | |
@@ -3006,12 +3014,12 b' class metadataonlyctx(committablectx):' | |||||
3006 | # sanity check to ensure that the reused manifest parents are |
|
3014 | # sanity check to ensure that the reused manifest parents are | |
3007 | # manifests of our commit parents |
|
3015 | # manifests of our commit parents | |
3008 | mp1, mp2 = self.manifestctx().parents |
|
3016 | mp1, mp2 = self.manifestctx().parents | |
3009 | if p1 != nullid and p1.manifestnode() != mp1: |
|
3017 | if p1 != self._repo.nodeconstants.nullid and p1.manifestnode() != mp1: | |
3010 | raise RuntimeError( |
|
3018 | raise RuntimeError( | |
3011 | r"can't reuse the manifest: its p1 " |
|
3019 | r"can't reuse the manifest: its p1 " | |
3012 | r"doesn't match the new ctx p1" |
|
3020 | r"doesn't match the new ctx p1" | |
3013 | ) |
|
3021 | ) | |
3014 | if p2 != nullid and p2.manifestnode() != mp2: |
|
3022 | if p2 != self._repo.nodeconstants.nullid and p2.manifestnode() != mp2: | |
3015 | raise RuntimeError( |
|
3023 | raise RuntimeError( | |
3016 | r"can't reuse the manifest: " |
|
3024 | r"can't reuse the manifest: " | |
3017 | r"its p2 doesn't match the new ctx p2" |
|
3025 | r"its p2 doesn't match the new ctx p2" |
@@ -12,10 +12,7 b' import collections' | |||||
12 | import os |
|
12 | import os | |
13 |
|
13 | |||
14 | from .i18n import _ |
|
14 | from .i18n import _ | |
15 |
from .node import |
|
15 | from .node import nullrev | |
16 | nullid, |
|
|||
17 | nullrev, |
|
|||
18 | ) |
|
|||
19 |
|
16 | |||
20 | from . import ( |
|
17 | from . import ( | |
21 | match as matchmod, |
|
18 | match as matchmod, | |
@@ -579,7 +576,7 b' def _revinfo_getter_extra(repo):' | |||||
579 | parents = fctx._filelog.parents(fctx._filenode) |
|
576 | parents = fctx._filelog.parents(fctx._filenode) | |
580 | nb_parents = 0 |
|
577 | nb_parents = 0 | |
581 | for n in parents: |
|
578 | for n in parents: | |
582 | if n != nullid: |
|
579 | if n != repo.nullid: | |
583 | nb_parents += 1 |
|
580 | nb_parents += 1 | |
584 | return nb_parents >= 2 |
|
581 | return nb_parents >= 2 | |
585 |
|
582 |
@@ -30,7 +30,6 b' from .i18n import _' | |||||
30 | from .node import ( |
|
30 | from .node import ( | |
31 | bin, |
|
31 | bin, | |
32 | hex, |
|
32 | hex, | |
33 | nullid, |
|
|||
34 | nullrev, |
|
33 | nullrev, | |
35 | short, |
|
34 | short, | |
36 | ) |
|
35 | ) | |
@@ -1667,7 +1666,7 b' def debugindexdot(ui, repo, file_=None, ' | |||||
1667 | node = r.node(i) |
|
1666 | node = r.node(i) | |
1668 | pp = r.parents(node) |
|
1667 | pp = r.parents(node) | |
1669 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) |
|
1668 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | |
1670 | if pp[1] != nullid: |
|
1669 | if pp[1] != repo.nullid: | |
1671 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) |
|
1670 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | |
1672 | ui.write(b"}\n") |
|
1671 | ui.write(b"}\n") | |
1673 |
|
1672 | |||
@@ -1675,7 +1674,7 b' def debugindexdot(ui, repo, file_=None, ' | |||||
1675 | @command(b'debugindexstats', []) |
|
1674 | @command(b'debugindexstats', []) | |
1676 | def debugindexstats(ui, repo): |
|
1675 | def debugindexstats(ui, repo): | |
1677 | """show stats related to the changelog index""" |
|
1676 | """show stats related to the changelog index""" | |
1678 | repo.changelog.shortest(nullid, 1) |
|
1677 | repo.changelog.shortest(repo.nullid, 1) | |
1679 | index = repo.changelog.index |
|
1678 | index = repo.changelog.index | |
1680 | if not util.safehasattr(index, b'stats'): |
|
1679 | if not util.safehasattr(index, b'stats'): | |
1681 | raise error.Abort(_(b'debugindexstats only works with native code')) |
|
1680 | raise error.Abort(_(b'debugindexstats only works with native code')) | |
@@ -2425,7 +2424,7 b' def debugobsolete(ui, repo, precursor=No' | |||||
2425 | # arbitrary node identifiers, possibly not present in the |
|
2424 | # arbitrary node identifiers, possibly not present in the | |
2426 | # local repository. |
|
2425 | # local repository. | |
2427 | n = bin(s) |
|
2426 | n = bin(s) | |
2428 |
if len(n) != |
|
2427 | if len(n) != repo.nodeconstants.nodelen: | |
2429 | raise TypeError() |
|
2428 | raise TypeError() | |
2430 | return n |
|
2429 | return n | |
2431 | except TypeError: |
|
2430 | except TypeError: | |
@@ -3328,7 +3327,7 b' def debugrevlogindex(ui, repo, file_=Non' | |||||
3328 | try: |
|
3327 | try: | |
3329 | pp = r.parents(node) |
|
3328 | pp = r.parents(node) | |
3330 | except Exception: |
|
3329 | except Exception: | |
3331 | pp = [nullid, nullid] |
|
3330 | pp = [repo.nullid, repo.nullid] | |
3332 | if ui.verbose: |
|
3331 | if ui.verbose: | |
3333 | ui.write( |
|
3332 | ui.write( | |
3334 | b"% 6d % 9d % 7d % 7d %s %s %s\n" |
|
3333 | b"% 6d % 9d % 7d % 7d %s %s %s\n" | |
@@ -3742,7 +3741,9 b' def debugbackupbundle(ui, repo, *pats, *' | |||||
3742 | for n in chlist: |
|
3741 | for n in chlist: | |
3743 | if limit is not None and count >= limit: |
|
3742 | if limit is not None and count >= limit: | |
3744 | break |
|
3743 | break | |
3745 | parents = [True for p in other.changelog.parents(n) if p != nullid] |
|
3744 | parents = [ | |
|
3745 | True for p in other.changelog.parents(n) if p != repo.nullid | |||
|
3746 | ] | |||
3746 | if opts.get(b"no_merges") and len(parents) == 2: |
|
3747 | if opts.get(b"no_merges") and len(parents) == 2: | |
3747 | continue |
|
3748 | continue | |
3748 | count += 1 |
|
3749 | count += 1 |
@@ -14,7 +14,6 b' import os' | |||||
14 | import stat |
|
14 | import stat | |
15 |
|
15 | |||
16 | from .i18n import _ |
|
16 | from .i18n import _ | |
17 | from .node import nullid |
|
|||
18 | from .pycompat import delattr |
|
17 | from .pycompat import delattr | |
19 |
|
18 | |||
20 | from hgdemandimport import tracing |
|
19 | from hgdemandimport import tracing | |
@@ -314,7 +313,7 b' class dirstate(object):' | |||||
314 | def branch(self): |
|
313 | def branch(self): | |
315 | return encoding.tolocal(self._branch) |
|
314 | return encoding.tolocal(self._branch) | |
316 |
|
315 | |||
317 |
def setparents(self, p1, p2= |
|
316 | def setparents(self, p1, p2=None): | |
318 | """Set dirstate parents to p1 and p2. |
|
317 | """Set dirstate parents to p1 and p2. | |
319 |
|
318 | |||
320 | When moving from two parents to one, 'm' merged entries a |
|
319 | When moving from two parents to one, 'm' merged entries a | |
@@ -323,6 +322,8 b' class dirstate(object):' | |||||
323 |
|
322 | |||
324 | See localrepo.setparents() |
|
323 | See localrepo.setparents() | |
325 | """ |
|
324 | """ | |
|
325 | if p2 is None: | |||
|
326 | p2 = self._nodeconstants.nullid | |||
326 | if self._parentwriters == 0: |
|
327 | if self._parentwriters == 0: | |
327 | raise ValueError( |
|
328 | raise ValueError( | |
328 | b"cannot set dirstate parent outside of " |
|
329 | b"cannot set dirstate parent outside of " | |
@@ -335,7 +336,10 b' class dirstate(object):' | |||||
335 | self._origpl = self._pl |
|
336 | self._origpl = self._pl | |
336 | self._map.setparents(p1, p2) |
|
337 | self._map.setparents(p1, p2) | |
337 | copies = {} |
|
338 | copies = {} | |
338 | if oldp2 != nullid and p2 == nullid: |
|
339 | if ( | |
|
340 | oldp2 != self._nodeconstants.nullid | |||
|
341 | and p2 == self._nodeconstants.nullid | |||
|
342 | ): | |||
339 | candidatefiles = self._map.nonnormalset.union( |
|
343 | candidatefiles = self._map.nonnormalset.union( | |
340 | self._map.otherparentset |
|
344 | self._map.otherparentset | |
341 | ) |
|
345 | ) | |
@@ -459,7 +463,7 b' class dirstate(object):' | |||||
459 |
|
463 | |||
460 | def normallookup(self, f): |
|
464 | def normallookup(self, f): | |
461 | '''Mark a file normal, but possibly dirty.''' |
|
465 | '''Mark a file normal, but possibly dirty.''' | |
462 | if self._pl[1] != nullid: |
|
466 | if self._pl[1] != self._nodeconstants.nullid: | |
463 | # if there is a merge going on and the file was either |
|
467 | # if there is a merge going on and the file was either | |
464 | # in state 'm' (-1) or coming from other parent (-2) before |
|
468 | # in state 'm' (-1) or coming from other parent (-2) before | |
465 | # being removed, restore that state. |
|
469 | # being removed, restore that state. | |
@@ -481,7 +485,7 b' class dirstate(object):' | |||||
481 |
|
485 | |||
482 | def otherparent(self, f): |
|
486 | def otherparent(self, f): | |
483 | '''Mark as coming from the other parent, always dirty.''' |
|
487 | '''Mark as coming from the other parent, always dirty.''' | |
484 | if self._pl[1] == nullid: |
|
488 | if self._pl[1] == self._nodeconstants.nullid: | |
485 | raise error.Abort( |
|
489 | raise error.Abort( | |
486 | _(b"setting %r to other parent only allowed in merges") % f |
|
490 | _(b"setting %r to other parent only allowed in merges") % f | |
487 | ) |
|
491 | ) | |
@@ -503,7 +507,7 b' class dirstate(object):' | |||||
503 | self._dirty = True |
|
507 | self._dirty = True | |
504 | oldstate = self[f] |
|
508 | oldstate = self[f] | |
505 | size = 0 |
|
509 | size = 0 | |
506 | if self._pl[1] != nullid: |
|
510 | if self._pl[1] != self._nodeconstants.nullid: | |
507 | entry = self._map.get(f) |
|
511 | entry = self._map.get(f) | |
508 | if entry is not None: |
|
512 | if entry is not None: | |
509 | # backup the previous state |
|
513 | # backup the previous state | |
@@ -519,7 +523,7 b' class dirstate(object):' | |||||
519 |
|
523 | |||
520 | def merge(self, f): |
|
524 | def merge(self, f): | |
521 | '''Mark a file merged.''' |
|
525 | '''Mark a file merged.''' | |
522 | if self._pl[1] == nullid: |
|
526 | if self._pl[1] == self._nodeconstants.nullid: | |
523 | return self.normallookup(f) |
|
527 | return self.normallookup(f) | |
524 | return self.otherparent(f) |
|
528 | return self.otherparent(f) | |
525 |
|
529 | |||
@@ -638,7 +642,7 b' class dirstate(object):' | |||||
638 |
|
642 | |||
639 | if self._origpl is None: |
|
643 | if self._origpl is None: | |
640 | self._origpl = self._pl |
|
644 | self._origpl = self._pl | |
641 | self._map.setparents(parent, nullid) |
|
645 | self._map.setparents(parent, self._nodeconstants.nullid) | |
642 |
|
646 | |||
643 | for f in to_lookup: |
|
647 | for f in to_lookup: | |
644 | self.normallookup(f) |
|
648 | self.normallookup(f) | |
@@ -1459,7 +1463,7 b' class dirstatemap(object):' | |||||
1459 | def clear(self): |
|
1463 | def clear(self): | |
1460 | self._map.clear() |
|
1464 | self._map.clear() | |
1461 | self.copymap.clear() |
|
1465 | self.copymap.clear() | |
1462 | self.setparents(nullid, nullid) |
|
1466 | self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid) | |
1463 | util.clearcachedproperty(self, b"_dirs") |
|
1467 | util.clearcachedproperty(self, b"_dirs") | |
1464 | util.clearcachedproperty(self, b"_alldirs") |
|
1468 | util.clearcachedproperty(self, b"_alldirs") | |
1465 | util.clearcachedproperty(self, b"filefoldmap") |
|
1469 | util.clearcachedproperty(self, b"filefoldmap") | |
@@ -1636,7 +1640,10 b' class dirstatemap(object):' | |||||
1636 | st[self._nodelen : 2 * self._nodelen], |
|
1640 | st[self._nodelen : 2 * self._nodelen], | |
1637 | ) |
|
1641 | ) | |
1638 | elif l == 0: |
|
1642 | elif l == 0: | |
1639 |
self._parents = ( |
|
1643 | self._parents = ( | |
|
1644 | self._nodeconstants.nullid, | |||
|
1645 | self._nodeconstants.nullid, | |||
|
1646 | ) | |||
1640 | else: |
|
1647 | else: | |
1641 | raise error.Abort( |
|
1648 | raise error.Abort( | |
1642 | _(b'working directory state appears damaged!') |
|
1649 | _(b'working directory state appears damaged!') | |
@@ -1794,7 +1801,9 b' if rustmod is not None:' | |||||
1794 | def clear(self): |
|
1801 | def clear(self): | |
1795 | self._rustmap.clear() |
|
1802 | self._rustmap.clear() | |
1796 | self._inner_rustmap.clear() |
|
1803 | self._inner_rustmap.clear() | |
1797 |
self.setparents( |
|
1804 | self.setparents( | |
|
1805 | self._nodeconstants.nullid, self._nodeconstants.nullid | |||
|
1806 | ) | |||
1798 | util.clearcachedproperty(self, b"_dirs") |
|
1807 | util.clearcachedproperty(self, b"_dirs") | |
1799 | util.clearcachedproperty(self, b"_alldirs") |
|
1808 | util.clearcachedproperty(self, b"_alldirs") | |
1800 | util.clearcachedproperty(self, b"dirfoldmap") |
|
1809 | util.clearcachedproperty(self, b"dirfoldmap") |
@@ -12,7 +12,6 b' import functools' | |||||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 | from .node import ( |
|
13 | from .node import ( | |
14 | hex, |
|
14 | hex, | |
15 | nullid, |
|
|||
16 | short, |
|
15 | short, | |
17 | ) |
|
16 | ) | |
18 |
|
17 | |||
@@ -107,7 +106,7 b' class outgoing(object):' | |||||
107 | if missingroots: |
|
106 | if missingroots: | |
108 | discbases = [] |
|
107 | discbases = [] | |
109 | for n in missingroots: |
|
108 | for n in missingroots: | |
110 | discbases.extend([p for p in cl.parents(n) if p != nullid]) |
|
109 | discbases.extend([p for p in cl.parents(n) if p != repo.nullid]) | |
111 | # TODO remove call to nodesbetween. |
|
110 | # TODO remove call to nodesbetween. | |
112 | # TODO populate attributes on outgoing instance instead of setting |
|
111 | # TODO populate attributes on outgoing instance instead of setting | |
113 | # discbases. |
|
112 | # discbases. | |
@@ -116,7 +115,7 b' class outgoing(object):' | |||||
116 | ancestorsof = heads |
|
115 | ancestorsof = heads | |
117 | commonheads = [n for n in discbases if n not in included] |
|
116 | commonheads = [n for n in discbases if n not in included] | |
118 | elif not commonheads: |
|
117 | elif not commonheads: | |
119 | commonheads = [nullid] |
|
118 | commonheads = [repo.nullid] | |
120 | self.commonheads = commonheads |
|
119 | self.commonheads = commonheads | |
121 | self.ancestorsof = ancestorsof |
|
120 | self.ancestorsof = ancestorsof | |
122 | self._revlog = cl |
|
121 | self._revlog = cl | |
@@ -381,7 +380,7 b' def checkheads(pushop):' | |||||
381 | # - a local outgoing head descended from update |
|
380 | # - a local outgoing head descended from update | |
382 | # - a remote head that's known locally and not |
|
381 | # - a remote head that's known locally and not | |
383 | # ancestral to an outgoing head |
|
382 | # ancestral to an outgoing head | |
384 | if remoteheads == [nullid]: |
|
383 | if remoteheads == [repo.nullid]: | |
385 | # remote is empty, nothing to check. |
|
384 | # remote is empty, nothing to check. | |
386 | return |
|
385 | return | |
387 |
|
386 |
@@ -13,7 +13,6 b' import weakref' | |||||
13 | from .i18n import _ |
|
13 | from .i18n import _ | |
14 | from .node import ( |
|
14 | from .node import ( | |
15 | hex, |
|
15 | hex, | |
16 | nullid, |
|
|||
17 | nullrev, |
|
16 | nullrev, | |
18 | ) |
|
17 | ) | |
19 | from . import ( |
|
18 | from . import ( | |
@@ -164,7 +163,7 b' def _computeoutgoing(repo, heads, common' | |||||
164 | hasnode = cl.hasnode |
|
163 | hasnode = cl.hasnode | |
165 | common = [n for n in common if hasnode(n)] |
|
164 | common = [n for n in common if hasnode(n)] | |
166 | else: |
|
165 | else: | |
167 | common = [nullid] |
|
166 | common = [repo.nullid] | |
168 | if not heads: |
|
167 | if not heads: | |
169 | heads = cl.heads() |
|
168 | heads = cl.heads() | |
170 | return discovery.outgoing(repo, common, heads) |
|
169 | return discovery.outgoing(repo, common, heads) | |
@@ -1839,7 +1838,7 b' def _pullbundle2(pullop):' | |||||
1839 | if ( |
|
1838 | if ( | |
1840 | pullop.remote.capable(b'clonebundles') |
|
1839 | pullop.remote.capable(b'clonebundles') | |
1841 | and pullop.heads is None |
|
1840 | and pullop.heads is None | |
1842 | and list(pullop.common) == [nullid] |
|
1841 | and list(pullop.common) == [pullop.repo.nullid] | |
1843 | ): |
|
1842 | ): | |
1844 | kwargs[b'cbattempted'] = pullop.clonebundleattempted |
|
1843 | kwargs[b'cbattempted'] = pullop.clonebundleattempted | |
1845 |
|
1844 | |||
@@ -1849,7 +1848,7 b' def _pullbundle2(pullop):' | |||||
1849 | pullop.repo.ui.status(_(b"no changes found\n")) |
|
1848 | pullop.repo.ui.status(_(b"no changes found\n")) | |
1850 | pullop.cgresult = 0 |
|
1849 | pullop.cgresult = 0 | |
1851 | else: |
|
1850 | else: | |
1852 | if pullop.heads is None and list(pullop.common) == [nullid]: |
|
1851 | if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]: | |
1853 | pullop.repo.ui.status(_(b"requesting all changes\n")) |
|
1852 | pullop.repo.ui.status(_(b"requesting all changes\n")) | |
1854 | if obsolete.isenabled(pullop.repo, obsolete.exchangeopt): |
|
1853 | if obsolete.isenabled(pullop.repo, obsolete.exchangeopt): | |
1855 | remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps) |
|
1854 | remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps) | |
@@ -1920,7 +1919,7 b' def _pullchangeset(pullop):' | |||||
1920 | pullop.cgresult = 0 |
|
1919 | pullop.cgresult = 0 | |
1921 | return |
|
1920 | return | |
1922 | tr = pullop.gettransaction() |
|
1921 | tr = pullop.gettransaction() | |
1923 | if pullop.heads is None and list(pullop.common) == [nullid]: |
|
1922 | if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]: | |
1924 | pullop.repo.ui.status(_(b"requesting all changes\n")) |
|
1923 | pullop.repo.ui.status(_(b"requesting all changes\n")) | |
1925 | elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'): |
|
1924 | elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'): | |
1926 | # issue1320, avoid a race if remote changed after discovery |
|
1925 | # issue1320, avoid a race if remote changed after discovery |
@@ -11,10 +11,7 b' import collections' | |||||
11 | import weakref |
|
11 | import weakref | |
12 |
|
12 | |||
13 | from .i18n import _ |
|
13 | from .i18n import _ | |
14 |
from .node import |
|
14 | from .node import short | |
15 | nullid, |
|
|||
16 | short, |
|
|||
17 | ) |
|
|||
18 | from . import ( |
|
15 | from . import ( | |
19 | bookmarks, |
|
16 | bookmarks, | |
20 | error, |
|
17 | error, | |
@@ -304,7 +301,7 b' def _pullchangesetdiscovery(repo, remote' | |||||
304 | if set(remoteheads).issubset(common): |
|
301 | if set(remoteheads).issubset(common): | |
305 | fetch = [] |
|
302 | fetch = [] | |
306 |
|
303 | |||
307 | common.discard(nullid) |
|
304 | common.discard(repo.nullid) | |
308 |
|
305 | |||
309 | return common, fetch, remoteheads |
|
306 | return common, fetch, remoteheads | |
310 |
|
307 | |||
@@ -413,7 +410,7 b' def _processchangesetdata(repo, tr, objs' | |||||
413 | # Linknode is always itself for changesets. |
|
410 | # Linknode is always itself for changesets. | |
414 | cset[b'node'], |
|
411 | cset[b'node'], | |
415 | # We always send full revisions. So delta base is not set. |
|
412 | # We always send full revisions. So delta base is not set. | |
416 | nullid, |
|
413 | repo.nullid, | |
417 | mdiff.trivialdiffheader(len(data)) + data, |
|
414 | mdiff.trivialdiffheader(len(data)) + data, | |
418 | # Flags not yet supported. |
|
415 | # Flags not yet supported. | |
419 | 0, |
|
416 | 0, | |
@@ -478,7 +475,7 b' def _fetchmanifests(repo, tr, remote, ma' | |||||
478 | basenode = manifest[b'deltabasenode'] |
|
475 | basenode = manifest[b'deltabasenode'] | |
479 | delta = extrafields[b'delta'] |
|
476 | delta = extrafields[b'delta'] | |
480 | elif b'revision' in extrafields: |
|
477 | elif b'revision' in extrafields: | |
481 | basenode = nullid |
|
478 | basenode = repo.nullid | |
482 | revision = extrafields[b'revision'] |
|
479 | revision = extrafields[b'revision'] | |
483 | delta = mdiff.trivialdiffheader(len(revision)) + revision |
|
480 | delta = mdiff.trivialdiffheader(len(revision)) + revision | |
484 | else: |
|
481 | else: | |
@@ -610,7 +607,7 b' def _fetchfiles(repo, tr, remote, fnodes' | |||||
610 | basenode = filerevision[b'deltabasenode'] |
|
607 | basenode = filerevision[b'deltabasenode'] | |
611 | delta = extrafields[b'delta'] |
|
608 | delta = extrafields[b'delta'] | |
612 | elif b'revision' in extrafields: |
|
609 | elif b'revision' in extrafields: | |
613 | basenode = nullid |
|
610 | basenode = repo.nullid | |
614 | revision = extrafields[b'revision'] |
|
611 | revision = extrafields[b'revision'] | |
615 | delta = mdiff.trivialdiffheader(len(revision)) + revision |
|
612 | delta = mdiff.trivialdiffheader(len(revision)) + revision | |
616 | else: |
|
613 | else: | |
@@ -705,7 +702,7 b' def _fetchfilesfromcsets(' | |||||
705 | basenode = filerevision[b'deltabasenode'] |
|
702 | basenode = filerevision[b'deltabasenode'] | |
706 | delta = extrafields[b'delta'] |
|
703 | delta = extrafields[b'delta'] | |
707 | elif b'revision' in extrafields: |
|
704 | elif b'revision' in extrafields: | |
708 | basenode = nullid |
|
705 | basenode = repo.nullid | |
709 | revision = extrafields[b'revision'] |
|
706 | revision = extrafields[b'revision'] | |
710 | delta = mdiff.trivialdiffheader(len(revision)) + revision |
|
707 | delta = mdiff.trivialdiffheader(len(revision)) + revision | |
711 | else: |
|
708 | else: |
@@ -8,10 +8,7 b'' | |||||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 |
from .node import |
|
11 | from .node import nullrev | |
12 | nullid, |
|
|||
13 | nullrev, |
|
|||
14 | ) |
|
|||
15 | from . import ( |
|
12 | from . import ( | |
16 | error, |
|
13 | error, | |
17 | revlog, |
|
14 | revlog, | |
@@ -42,7 +39,7 b' class filelog(object):' | |||||
42 | return self._revlog.__iter__() |
|
39 | return self._revlog.__iter__() | |
43 |
|
40 | |||
44 | def hasnode(self, node): |
|
41 | def hasnode(self, node): | |
45 | if node in (nullid, nullrev): |
|
42 | if node in (self.nullid, nullrev): | |
46 | return False |
|
43 | return False | |
47 |
|
44 | |||
48 | try: |
|
45 | try: |
@@ -15,7 +15,6 b' import shutil' | |||||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import ( |
|
16 | from .node import ( | |
17 | hex, |
|
17 | hex, | |
18 | nullid, |
|
|||
19 | short, |
|
18 | short, | |
20 | ) |
|
19 | ) | |
21 | from .pycompat import ( |
|
20 | from .pycompat import ( | |
@@ -111,7 +110,7 b' class absentfilectx(object):' | |||||
111 | return None |
|
110 | return None | |
112 |
|
111 | |||
113 | def filenode(self): |
|
112 | def filenode(self): | |
114 | return nullid |
|
113 | return self._ctx.repo().nullid | |
115 |
|
114 | |||
116 | _customcmp = True |
|
115 | _customcmp = True | |
117 |
|
116 |
@@ -16,8 +16,7 b' import stat' | |||||
16 | from .i18n import _ |
|
16 | from .i18n import _ | |
17 | from .node import ( |
|
17 | from .node import ( | |
18 | hex, |
|
18 | hex, | |
19 | nullhex, |
|
19 | sha1nodeconstants, | |
20 | nullid, |
|
|||
21 | short, |
|
20 | short, | |
22 | ) |
|
21 | ) | |
23 | from .pycompat import getattr |
|
22 | from .pycompat import getattr | |
@@ -772,7 +771,7 b' def clone(' | |||||
772 | }, |
|
771 | }, | |
773 | ).result() |
|
772 | ).result() | |
774 |
|
773 | |||
775 | if rootnode != nullid: |
|
774 | if rootnode != sha1nodeconstants.nullid: | |
776 | sharepath = os.path.join(sharepool, hex(rootnode)) |
|
775 | sharepath = os.path.join(sharepool, hex(rootnode)) | |
777 | else: |
|
776 | else: | |
778 | ui.status( |
|
777 | ui.status( | |
@@ -883,7 +882,9 b' def clone(' | |||||
883 | # we need to re-init the repo after manually copying the data |
|
882 | # we need to re-init the repo after manually copying the data | |
884 | # into it |
|
883 | # into it | |
885 | destpeer = peer(srcrepo, peeropts, dest) |
|
884 | destpeer = peer(srcrepo, peeropts, dest) | |
886 | srcrepo.hook(b'outgoing', source=b'clone', node=nullhex) |
|
885 | srcrepo.hook( | |
|
886 | b'outgoing', source=b'clone', node=srcrepo.nodeconstants.nullhex | |||
|
887 | ) | |||
887 | else: |
|
888 | else: | |
888 | try: |
|
889 | try: | |
889 | # only pass ui when no srcrepo |
|
890 | # only pass ui when no srcrepo | |
@@ -1329,7 +1330,9 b' def incoming(ui, repo, source, opts, sub' | |||||
1329 | for n in chlist: |
|
1330 | for n in chlist: | |
1330 | if limit is not None and count >= limit: |
|
1331 | if limit is not None and count >= limit: | |
1331 | break |
|
1332 | break | |
1332 | parents = [p for p in other.changelog.parents(n) if p != nullid] |
|
1333 | parents = [ | |
|
1334 | p for p in other.changelog.parents(n) if p != repo.nullid | |||
|
1335 | ] | |||
1333 | if opts.get(b'no_merges') and len(parents) == 2: |
|
1336 | if opts.get(b'no_merges') and len(parents) == 2: | |
1334 | continue |
|
1337 | continue | |
1335 | count += 1 |
|
1338 | count += 1 | |
@@ -1406,7 +1409,7 b' def _outgoing_filter(repo, revs, opts):' | |||||
1406 | for n in revs: |
|
1409 | for n in revs: | |
1407 | if limit is not None and count >= limit: |
|
1410 | if limit is not None and count >= limit: | |
1408 | break |
|
1411 | break | |
1409 | parents = [p for p in cl.parents(n) if p != nullid] |
|
1412 | parents = [p for p in cl.parents(n) if p != repo.nullid] | |
1410 | if no_merges and len(parents) == 2: |
|
1413 | if no_merges and len(parents) == 2: | |
1411 | continue |
|
1414 | continue | |
1412 | count += 1 |
|
1415 | count += 1 |
@@ -14,7 +14,7 b' import os' | |||||
14 | import re |
|
14 | import re | |
15 |
|
15 | |||
16 | from ..i18n import _ |
|
16 | from ..i18n import _ | |
17 |
from ..node import hex, |
|
17 | from ..node import hex, short | |
18 | from ..pycompat import setattr |
|
18 | from ..pycompat import setattr | |
19 |
|
19 | |||
20 | from .common import ( |
|
20 | from .common import ( | |
@@ -220,7 +220,7 b' def _ctxsgen(context, ctxs):' | |||||
220 | def _siblings(siblings=None, hiderev=None): |
|
220 | def _siblings(siblings=None, hiderev=None): | |
221 | if siblings is None: |
|
221 | if siblings is None: | |
222 | siblings = [] |
|
222 | siblings = [] | |
223 | siblings = [s for s in siblings if s.node() != nullid] |
|
223 | siblings = [s for s in siblings if s.node() != s.repo().nullid] | |
224 | if len(siblings) == 1 and siblings[0].rev() == hiderev: |
|
224 | if len(siblings) == 1 and siblings[0].rev() == hiderev: | |
225 | siblings = [] |
|
225 | siblings = [] | |
226 | return templateutil.mappinggenerator(_ctxsgen, args=(siblings,)) |
|
226 | return templateutil.mappinggenerator(_ctxsgen, args=(siblings,)) | |
@@ -316,12 +316,16 b' def _nodenamesgen(context, f, node, name' | |||||
316 | yield {name: t} |
|
316 | yield {name: t} | |
317 |
|
317 | |||
318 |
|
318 | |||
319 |
def showtag(repo, t1, node= |
|
319 | def showtag(repo, t1, node=None): | |
|
320 | if node is None: | |||
|
321 | node = repo.nullid | |||
320 | args = (repo.nodetags, node, b'tag') |
|
322 | args = (repo.nodetags, node, b'tag') | |
321 | return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1) |
|
323 | return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1) | |
322 |
|
324 | |||
323 |
|
325 | |||
324 |
def showbookmark(repo, t1, node= |
|
326 | def showbookmark(repo, t1, node=None): | |
|
327 | if node is None: | |||
|
328 | node = repo.nullid | |||
325 | args = (repo.nodebookmarks, node, b'bookmark') |
|
329 | args = (repo.nodebookmarks, node, b'bookmark') | |
326 | return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1) |
|
330 | return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1) | |
327 |
|
331 |
@@ -2,8 +2,6 b' from __future__ import absolute_import, ' | |||||
2 |
|
2 | |||
3 | import contextlib |
|
3 | import contextlib | |
4 |
|
4 | |||
5 | from .. import node as nodemod |
|
|||
6 |
|
||||
7 | from . import util as interfaceutil |
|
5 | from . import util as interfaceutil | |
8 |
|
6 | |||
9 |
|
7 | |||
@@ -97,7 +95,7 b' class idirstate(interfaceutil.Interface)' | |||||
97 | def branch(): |
|
95 | def branch(): | |
98 | pass |
|
96 | pass | |
99 |
|
97 | |||
100 |
def setparents(p1, p2= |
|
98 | def setparents(p1, p2=None): | |
101 | """Set dirstate parents to p1 and p2. |
|
99 | """Set dirstate parents to p1 and p2. | |
102 |
|
100 | |||
103 | When moving from two parents to one, 'm' merged entries a |
|
101 | When moving from two parents to one, 'm' merged entries a |
@@ -19,7 +19,6 b' from .i18n import _' | |||||
19 | from .node import ( |
|
19 | from .node import ( | |
20 | bin, |
|
20 | bin, | |
21 | hex, |
|
21 | hex, | |
22 | nullid, |
|
|||
23 | nullrev, |
|
22 | nullrev, | |
24 | sha1nodeconstants, |
|
23 | sha1nodeconstants, | |
25 | short, |
|
24 | short, | |
@@ -1702,7 +1701,7 b' class localrepository(object):' | |||||
1702 | _(b"warning: ignoring unknown working parent %s!\n") |
|
1701 | _(b"warning: ignoring unknown working parent %s!\n") | |
1703 | % short(node) |
|
1702 | % short(node) | |
1704 | ) |
|
1703 | ) | |
1705 | return nullid |
|
1704 | return self.nullid | |
1706 |
|
1705 | |||
1707 | @storecache(narrowspec.FILENAME) |
|
1706 | @storecache(narrowspec.FILENAME) | |
1708 | def narrowpats(self): |
|
1707 | def narrowpats(self): | |
@@ -1753,9 +1752,9 b' class localrepository(object):' | |||||
1753 | @unfilteredpropertycache |
|
1752 | @unfilteredpropertycache | |
1754 | def _quick_access_changeid_null(self): |
|
1753 | def _quick_access_changeid_null(self): | |
1755 | return { |
|
1754 | return { | |
1756 | b'null': (nullrev, nullid), |
|
1755 | b'null': (nullrev, self.nodeconstants.nullid), | |
1757 | nullrev: (nullrev, nullid), |
|
1756 | nullrev: (nullrev, self.nodeconstants.nullid), | |
1758 | nullid: (nullrev, nullid), |
|
1757 | self.nullid: (nullrev, self.nullid), | |
1759 | } |
|
1758 | } | |
1760 |
|
1759 | |||
1761 | @unfilteredpropertycache |
|
1760 | @unfilteredpropertycache | |
@@ -1765,7 +1764,7 b' class localrepository(object):' | |||||
1765 | quick = self._quick_access_changeid_null.copy() |
|
1764 | quick = self._quick_access_changeid_null.copy() | |
1766 | cl = self.unfiltered().changelog |
|
1765 | cl = self.unfiltered().changelog | |
1767 | for node in self.dirstate.parents(): |
|
1766 | for node in self.dirstate.parents(): | |
1768 | if node == nullid: |
|
1767 | if node == self.nullid: | |
1769 | continue |
|
1768 | continue | |
1770 | rev = cl.index.get_rev(node) |
|
1769 | rev = cl.index.get_rev(node) | |
1771 | if rev is None: |
|
1770 | if rev is None: | |
@@ -1785,7 +1784,7 b' class localrepository(object):' | |||||
1785 | quick[r] = pair |
|
1784 | quick[r] = pair | |
1786 | quick[n] = pair |
|
1785 | quick[n] = pair | |
1787 | p1node = self.dirstate.p1() |
|
1786 | p1node = self.dirstate.p1() | |
1788 | if p1node != nullid: |
|
1787 | if p1node != self.nullid: | |
1789 | quick[b'.'] = quick[p1node] |
|
1788 | quick[b'.'] = quick[p1node] | |
1790 | return quick |
|
1789 | return quick | |
1791 |
|
1790 | |||
@@ -2037,7 +2036,7 b' class localrepository(object):' | |||||
2037 | # local encoding. |
|
2036 | # local encoding. | |
2038 | tags = {} |
|
2037 | tags = {} | |
2039 | for (name, (node, hist)) in pycompat.iteritems(alltags): |
|
2038 | for (name, (node, hist)) in pycompat.iteritems(alltags): | |
2040 | if node != nullid: |
|
2039 | if node != self.nullid: | |
2041 | tags[encoding.tolocal(name)] = node |
|
2040 | tags[encoding.tolocal(name)] = node | |
2042 | tags[b'tip'] = self.changelog.tip() |
|
2041 | tags[b'tip'] = self.changelog.tip() | |
2043 | tagtypes = { |
|
2042 | tagtypes = { | |
@@ -2161,7 +2160,9 b' class localrepository(object):' | |||||
2161 | def wjoin(self, f, *insidef): |
|
2160 | def wjoin(self, f, *insidef): | |
2162 | return self.vfs.reljoin(self.root, f, *insidef) |
|
2161 | return self.vfs.reljoin(self.root, f, *insidef) | |
2163 |
|
2162 | |||
2164 |
def setparents(self, p1, p2= |
|
2163 | def setparents(self, p1, p2=None): | |
|
2164 | if p2 is None: | |||
|
2165 | p2 = self.nullid | |||
2165 | self[None].setparents(p1, p2) |
|
2166 | self[None].setparents(p1, p2) | |
2166 | self._quick_access_changeid_invalidate() |
|
2167 | self._quick_access_changeid_invalidate() | |
2167 |
|
2168 | |||
@@ -3094,7 +3095,7 b' class localrepository(object):' | |||||
3094 | subrepoutil.writestate(self, newstate) |
|
3095 | subrepoutil.writestate(self, newstate) | |
3095 |
|
3096 | |||
3096 | p1, p2 = self.dirstate.parents() |
|
3097 | p1, p2 = self.dirstate.parents() | |
3097 | hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or b'') |
|
3098 | hookp1, hookp2 = hex(p1), (p2 != self.nullid and hex(p2) or b'') | |
3098 | try: |
|
3099 | try: | |
3099 | self.hook( |
|
3100 | self.hook( | |
3100 | b"precommit", throw=True, parent1=hookp1, parent2=hookp2 |
|
3101 | b"precommit", throw=True, parent1=hookp1, parent2=hookp2 | |
@@ -3267,7 +3268,7 b' class localrepository(object):' | |||||
3267 | t = n |
|
3268 | t = n | |
3268 | while True: |
|
3269 | while True: | |
3269 | p = self.changelog.parents(n) |
|
3270 | p = self.changelog.parents(n) | |
3270 | if p[1] != nullid or p[0] == nullid: |
|
3271 | if p[1] != self.nullid or p[0] == self.nullid: | |
3271 | b.append((t, n, p[0], p[1])) |
|
3272 | b.append((t, n, p[0], p[1])) | |
3272 | break |
|
3273 | break | |
3273 | n = p[0] |
|
3274 | n = p[0] | |
@@ -3280,7 +3281,7 b' class localrepository(object):' | |||||
3280 | n, l, i = top, [], 0 |
|
3281 | n, l, i = top, [], 0 | |
3281 | f = 1 |
|
3282 | f = 1 | |
3282 |
|
3283 | |||
3283 | while n != bottom and n != nullid: |
|
3284 | while n != bottom and n != self.nullid: | |
3284 | p = self.changelog.parents(n)[0] |
|
3285 | p = self.changelog.parents(n)[0] | |
3285 | if i == f: |
|
3286 | if i == f: | |
3286 | l.append(n) |
|
3287 | l.append(n) |
@@ -12,12 +12,7 b' import os' | |||||
12 | import posixpath |
|
12 | import posixpath | |
13 |
|
13 | |||
14 | from .i18n import _ |
|
14 | from .i18n import _ | |
15 |
from .node import |
|
15 | from .node import nullrev, wdirrev | |
16 | nullid, |
|
|||
17 | nullrev, |
|
|||
18 | wdirid, |
|
|||
19 | wdirrev, |
|
|||
20 | ) |
|
|||
21 |
|
16 | |||
22 | from .thirdparty import attr |
|
17 | from .thirdparty import attr | |
23 |
|
18 | |||
@@ -357,7 +352,7 b' class changesetprinter(object):' | |||||
357 | if self.ui.debugflag: |
|
352 | if self.ui.debugflag: | |
358 | mnode = ctx.manifestnode() |
|
353 | mnode = ctx.manifestnode() | |
359 | if mnode is None: |
|
354 | if mnode is None: | |
360 | mnode = wdirid |
|
355 | mnode = self.repo.nodeconstants.wdirid | |
361 | mrev = wdirrev |
|
356 | mrev = wdirrev | |
362 | else: |
|
357 | else: | |
363 | mrev = self.repo.manifestlog.rev(mnode) |
|
358 | mrev = self.repo.manifestlog.rev(mnode) | |
@@ -505,7 +500,11 b' class changesetformatter(changesetprinte' | |||||
505 | ) |
|
500 | ) | |
506 |
|
501 | |||
507 | if self.ui.debugflag or b'manifest' in datahint: |
|
502 | if self.ui.debugflag or b'manifest' in datahint: | |
508 | fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid)) |
|
503 | fm.data( | |
|
504 | manifest=fm.hexfunc( | |||
|
505 | ctx.manifestnode() or self.repo.nodeconstants.wdirid | |||
|
506 | ) | |||
|
507 | ) | |||
509 | if self.ui.debugflag or b'extra' in datahint: |
|
508 | if self.ui.debugflag or b'extra' in datahint: | |
510 | fm.data(extra=fm.formatdict(ctx.extra())) |
|
509 | fm.data(extra=fm.formatdict(ctx.extra())) | |
511 |
|
510 | |||
@@ -991,7 +990,7 b' def _initialrevs(repo, wopts):' | |||||
991 | """Return the initial set of revisions to be filtered or followed""" |
|
990 | """Return the initial set of revisions to be filtered or followed""" | |
992 | if wopts.revspec: |
|
991 | if wopts.revspec: | |
993 | revs = scmutil.revrange(repo, wopts.revspec) |
|
992 | revs = scmutil.revrange(repo, wopts.revspec) | |
994 | elif wopts.follow and repo.dirstate.p1() == nullid: |
|
993 | elif wopts.follow and repo.dirstate.p1() == repo.nullid: | |
995 | revs = smartset.baseset() |
|
994 | revs = smartset.baseset() | |
996 | elif wopts.follow: |
|
995 | elif wopts.follow: | |
997 | revs = repo.revs(b'.') |
|
996 | revs = repo.revs(b'.') |
@@ -16,7 +16,6 b' from .i18n import _' | |||||
16 | from .node import ( |
|
16 | from .node import ( | |
17 | bin, |
|
17 | bin, | |
18 | hex, |
|
18 | hex, | |
19 | nullid, |
|
|||
20 | nullrev, |
|
19 | nullrev, | |
21 | ) |
|
20 | ) | |
22 | from .pycompat import getattr |
|
21 | from .pycompat import getattr | |
@@ -795,7 +794,10 b' class treemanifest(object):' | |||||
795 | def __init__(self, nodeconstants, dir=b'', text=b''): |
|
794 | def __init__(self, nodeconstants, dir=b'', text=b''): | |
796 | self._dir = dir |
|
795 | self._dir = dir | |
797 | self.nodeconstants = nodeconstants |
|
796 | self.nodeconstants = nodeconstants | |
798 | self._node = nullid |
|
797 | from .node import sha1nodeconstants | |
|
798 | ||||
|
799 | assert sha1nodeconstants == nodeconstants | |||
|
800 | self._node = self.nodeconstants.nullid | |||
799 | self._loadfunc = _noop |
|
801 | self._loadfunc = _noop | |
800 | self._copyfunc = _noop |
|
802 | self._copyfunc = _noop | |
801 | self._dirty = False |
|
803 | self._dirty = False | |
@@ -1391,7 +1393,7 b' class treemanifest(object):' | |||||
1391 | continue |
|
1393 | continue | |
1392 | subp1 = getnode(m1, d) |
|
1394 | subp1 = getnode(m1, d) | |
1393 | subp2 = getnode(m2, d) |
|
1395 | subp2 = getnode(m2, d) | |
1394 | if subp1 == nullid: |
|
1396 | if subp1 == self.nodeconstants.nullid: | |
1395 | subp1, subp2 = subp2, subp1 |
|
1397 | subp1, subp2 = subp2, subp1 | |
1396 | writesubtree(subm, subp1, subp2, match) |
|
1398 | writesubtree(subm, subp1, subp2, match) | |
1397 |
|
1399 | |||
@@ -1574,6 +1576,12 b' class manifestrevlog(object):' | |||||
1574 | value is passed in to the constructor. |
|
1576 | value is passed in to the constructor. | |
1575 | """ |
|
1577 | """ | |
1576 | self.nodeconstants = nodeconstants |
|
1578 | self.nodeconstants = nodeconstants | |
|
1579 | from .node import sha1nodeconstants | |||
|
1580 | ||||
|
1581 | assert sha1nodeconstants == nodeconstants, ( | |||
|
1582 | sha1nodeconstants, | |||
|
1583 | nodeconstants, | |||
|
1584 | ) | |||
1577 | # During normal operations, we expect to deal with not more than four |
|
1585 | # During normal operations, we expect to deal with not more than four | |
1578 | # revs at a time (such as during commit --amend). When rebasing large |
|
1586 | # revs at a time (such as during commit --amend). When rebasing large | |
1579 | # stacks of commits, the number can go up, hence the config knob below. |
|
1587 | # stacks of commits, the number can go up, hence the config knob below. | |
@@ -1929,6 +1937,9 b' class manifestlog(object):' | |||||
1929 |
|
1937 | |||
1930 | def __init__(self, opener, repo, rootstore, narrowmatch): |
|
1938 | def __init__(self, opener, repo, rootstore, narrowmatch): | |
1931 | self.nodeconstants = repo.nodeconstants |
|
1939 | self.nodeconstants = repo.nodeconstants | |
|
1940 | from .node import sha1nodeconstants | |||
|
1941 | ||||
|
1942 | assert sha1nodeconstants == repo.nodeconstants | |||
1932 | usetreemanifest = False |
|
1943 | usetreemanifest = False | |
1933 | cachesize = 4 |
|
1944 | cachesize = 4 | |
1934 |
|
1945 | |||
@@ -1994,7 +2005,7 b' class manifestlog(object):' | |||||
1994 | else: |
|
2005 | else: | |
1995 | m = manifestctx(self, node) |
|
2006 | m = manifestctx(self, node) | |
1996 |
|
2007 | |||
1997 | if node != nullid: |
|
2008 | if node != self.nodeconstants.nullid: | |
1998 | mancache = self._dirmancache.get(tree) |
|
2009 | mancache = self._dirmancache.get(tree) | |
1999 | if not mancache: |
|
2010 | if not mancache: | |
2000 | mancache = util.lrucachedict(self._cachesize) |
|
2011 | mancache = util.lrucachedict(self._cachesize) | |
@@ -2082,7 +2093,7 b' class manifestctx(object):' | |||||
2082 |
|
2093 | |||
2083 | def read(self): |
|
2094 | def read(self): | |
2084 | if self._data is None: |
|
2095 | if self._data is None: | |
2085 | if self._node == nullid: |
|
2096 | if self._node == self._manifestlog.nodeconstants.nullid: | |
2086 | self._data = manifestdict() |
|
2097 | self._data = manifestdict() | |
2087 | else: |
|
2098 | else: | |
2088 | store = self._storage() |
|
2099 | store = self._storage() | |
@@ -2188,7 +2199,7 b' class treemanifestctx(object):' | |||||
2188 | def read(self): |
|
2199 | def read(self): | |
2189 | if self._data is None: |
|
2200 | if self._data is None: | |
2190 | store = self._storage() |
|
2201 | store = self._storage() | |
2191 | if self._node == nullid: |
|
2202 | if self._node == self._manifestlog.nodeconstants.nullid: | |
2192 | self._data = treemanifest(self._manifestlog.nodeconstants) |
|
2203 | self._data = treemanifest(self._manifestlog.nodeconstants) | |
2193 | # TODO accessing non-public API |
|
2204 | # TODO accessing non-public API | |
2194 | elif store._treeondisk: |
|
2205 | elif store._treeondisk: | |
@@ -2296,6 +2307,9 b' class excludeddir(treemanifest):' | |||||
2296 |
|
2307 | |||
2297 | def __init__(self, nodeconstants, dir, node): |
|
2308 | def __init__(self, nodeconstants, dir, node): | |
2298 | super(excludeddir, self).__init__(nodeconstants, dir) |
|
2309 | super(excludeddir, self).__init__(nodeconstants, dir) | |
|
2310 | from .node import sha1nodeconstants | |||
|
2311 | ||||
|
2312 | assert sha1nodeconstants == nodeconstants | |||
2299 | self._node = node |
|
2313 | self._node = node | |
2300 | # Add an empty file, which will be included by iterators and such, |
|
2314 | # Add an empty file, which will be included by iterators and such, | |
2301 | # appearing as the directory itself (i.e. something like "dir/") |
|
2315 | # appearing as the directory itself (i.e. something like "dir/") | |
@@ -2316,6 +2330,9 b' class excludeddirmanifestctx(treemanifes' | |||||
2316 |
|
2330 | |||
2317 | def __init__(self, nodeconstants, dir, node): |
|
2331 | def __init__(self, nodeconstants, dir, node): | |
2318 | self.nodeconstants = nodeconstants |
|
2332 | self.nodeconstants = nodeconstants | |
|
2333 | from .node import sha1nodeconstants | |||
|
2334 | ||||
|
2335 | assert sha1nodeconstants == nodeconstants | |||
2319 | self._dir = dir |
|
2336 | self._dir = dir | |
2320 | self._node = node |
|
2337 | self._node = node | |
2321 |
|
2338 | |||
@@ -2344,6 +2361,9 b' class excludedmanifestrevlog(manifestrev' | |||||
2344 |
|
2361 | |||
2345 | def __init__(self, nodeconstants, dir): |
|
2362 | def __init__(self, nodeconstants, dir): | |
2346 | self.nodeconstants = nodeconstants |
|
2363 | self.nodeconstants = nodeconstants | |
|
2364 | from .node import sha1nodeconstants | |||
|
2365 | ||||
|
2366 | assert sha1nodeconstants == nodeconstants | |||
2347 | self._dir = dir |
|
2367 | self._dir = dir | |
2348 |
|
2368 | |||
2349 | def __len__(self): |
|
2369 | def __len__(self): |
@@ -13,12 +13,7 b' import stat' | |||||
13 | import struct |
|
13 | import struct | |
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 |
from .node import |
|
16 | from .node import nullrev | |
17 | addednodeid, |
|
|||
18 | modifiednodeid, |
|
|||
19 | nullid, |
|
|||
20 | nullrev, |
|
|||
21 | ) |
|
|||
22 | from .thirdparty import attr |
|
17 | from .thirdparty import attr | |
23 | from .utils import stringutil |
|
18 | from .utils import stringutil | |
24 | from . import ( |
|
19 | from . import ( | |
@@ -779,7 +774,7 b' def manifestmerge(' | |||||
779 | # to flag the change. If wctx is a committed revision, we shouldn't |
|
774 | # to flag the change. If wctx is a committed revision, we shouldn't | |
780 | # care for the dirty state of the working directory. |
|
775 | # care for the dirty state of the working directory. | |
781 | if any(wctx.sub(s).dirty() for s in wctx.substate): |
|
776 | if any(wctx.sub(s).dirty() for s in wctx.substate): | |
782 | m1[b'.hgsubstate'] = modifiednodeid |
|
777 | m1[b'.hgsubstate'] = repo.nodeconstants.modifiednodeid | |
783 |
|
778 | |||
784 | # Don't use m2-vs-ma optimization if: |
|
779 | # Don't use m2-vs-ma optimization if: | |
785 | # - ma is the same as m1 or m2, which we're just going to diff again later |
|
780 | # - ma is the same as m1 or m2, which we're just going to diff again later | |
@@ -944,7 +939,7 b' def manifestmerge(' | |||||
944 | mresult.addcommitinfo( |
|
939 | mresult.addcommitinfo( | |
945 | f, b'merge-removal-candidate', b'yes' |
|
940 | f, b'merge-removal-candidate', b'yes' | |
946 | ) |
|
941 | ) | |
947 | elif n1 == addednodeid: |
|
942 | elif n1 == repo.nodeconstants.addednodeid: | |
948 | # This file was locally added. We should forget it instead of |
|
943 | # This file was locally added. We should forget it instead of | |
949 | # deleting it. |
|
944 | # deleting it. | |
950 | mresult.addfile( |
|
945 | mresult.addfile( | |
@@ -1785,7 +1780,7 b' def _advertisefsmonitor(repo, num_gets, ' | |||||
1785 | if ( |
|
1780 | if ( | |
1786 | fsmonitorwarning |
|
1781 | fsmonitorwarning | |
1787 | and not fsmonitorenabled |
|
1782 | and not fsmonitorenabled | |
1788 | and p1node == nullid |
|
1783 | and p1node == repo.nullid | |
1789 | and num_gets >= fsmonitorthreshold |
|
1784 | and num_gets >= fsmonitorthreshold | |
1790 | and pycompat.sysplatform.startswith((b'linux', b'darwin')) |
|
1785 | and pycompat.sysplatform.startswith((b'linux', b'darwin')) | |
1791 | ): |
|
1786 | ): | |
@@ -1913,7 +1908,7 b' def _update(' | |||||
1913 | else: |
|
1908 | else: | |
1914 | if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']: |
|
1909 | if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']: | |
1915 | cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node()) |
|
1910 | cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node()) | |
1916 | pas = [repo[anc] for anc in (sorted(cahs) or [nullid])] |
|
1911 | pas = [repo[anc] for anc in (sorted(cahs) or [repo.nullid])] | |
1917 | else: |
|
1912 | else: | |
1918 | pas = [p1.ancestor(p2, warn=branchmerge)] |
|
1913 | pas = [p1.ancestor(p2, warn=branchmerge)] | |
1919 |
|
1914 | |||
@@ -2112,7 +2107,7 b' def _update(' | |||||
2112 |
|
2107 | |||
2113 | ### apply phase |
|
2108 | ### apply phase | |
2114 | if not branchmerge: # just jump to the new rev |
|
2109 | if not branchmerge: # just jump to the new rev | |
2115 | fp1, fp2, xp1, xp2 = fp2, nullid, xp2, b'' |
|
2110 | fp1, fp2, xp1, xp2 = fp2, repo.nullid, xp2, b'' | |
2116 | # If we're doing a partial update, we need to skip updating |
|
2111 | # If we're doing a partial update, we need to skip updating | |
2117 | # the dirstate. |
|
2112 | # the dirstate. | |
2118 | always = matcher is None or matcher.always() |
|
2113 | always = matcher is None or matcher.always() | |
@@ -2281,14 +2276,14 b' def graft(' | |||||
2281 | if keepconflictparent and stats.unresolvedcount: |
|
2276 | if keepconflictparent and stats.unresolvedcount: | |
2282 | pother = ctx.node() |
|
2277 | pother = ctx.node() | |
2283 | else: |
|
2278 | else: | |
2284 | pother = nullid |
|
2279 | pother = repo.nullid | |
2285 | parents = ctx.parents() |
|
2280 | parents = ctx.parents() | |
2286 | if keepparent and len(parents) == 2 and base in parents: |
|
2281 | if keepparent and len(parents) == 2 and base in parents: | |
2287 | parents.remove(base) |
|
2282 | parents.remove(base) | |
2288 | pother = parents[0].node() |
|
2283 | pother = parents[0].node() | |
2289 | # Never set both parents equal to each other |
|
2284 | # Never set both parents equal to each other | |
2290 | if pother == pctx.node(): |
|
2285 | if pother == pctx.node(): | |
2291 | pother = nullid |
|
2286 | pother = repo.nullid | |
2292 |
|
2287 | |||
2293 | if wctx.isinmemory(): |
|
2288 | if wctx.isinmemory(): | |
2294 | wctx.setparents(pctx.node(), pother) |
|
2289 | wctx.setparents(pctx.node(), pother) |
@@ -9,7 +9,6 b' from .i18n import _' | |||||
9 | from .node import ( |
|
9 | from .node import ( | |
10 | bin, |
|
10 | bin, | |
11 | hex, |
|
11 | hex, | |
12 | nullhex, |
|
|||
13 | nullrev, |
|
12 | nullrev, | |
14 | ) |
|
13 | ) | |
15 | from . import ( |
|
14 | from . import ( | |
@@ -32,7 +31,7 b' def _droponode(data):' | |||||
32 |
|
31 | |||
33 |
|
32 | |||
34 | def _filectxorabsent(hexnode, ctx, f): |
|
33 | def _filectxorabsent(hexnode, ctx, f): | |
35 | if hexnode == nullhex: |
|
34 | if hexnode == ctx.repo().nodeconstants.nullhex: | |
36 | return filemerge.absentfilectx(ctx, f) |
|
35 | return filemerge.absentfilectx(ctx, f) | |
37 | else: |
|
36 | else: | |
38 | return ctx[f] |
|
37 | return ctx[f] | |
@@ -248,7 +247,7 b' class _mergestate_base(object):' | |||||
248 | note: also write the local version to the `.hg/merge` directory. |
|
247 | note: also write the local version to the `.hg/merge` directory. | |
249 | """ |
|
248 | """ | |
250 | if fcl.isabsent(): |
|
249 | if fcl.isabsent(): | |
251 | localkey = nullhex |
|
250 | localkey = self._repo.nodeconstants.nullhex | |
252 | else: |
|
251 | else: | |
253 | localkey = mergestate.getlocalkey(fcl.path()) |
|
252 | localkey = mergestate.getlocalkey(fcl.path()) | |
254 | self._make_backup(fcl, localkey) |
|
253 | self._make_backup(fcl, localkey) | |
@@ -354,7 +353,7 b' class _mergestate_base(object):' | |||||
354 | flags = flo |
|
353 | flags = flo | |
355 | if preresolve: |
|
354 | if preresolve: | |
356 | # restore local |
|
355 | # restore local | |
357 | if localkey != nullhex: |
|
356 | if localkey != self._repo.nodeconstants.nullhex: | |
358 | self._restore_backup(wctx[dfile], localkey, flags) |
|
357 | self._restore_backup(wctx[dfile], localkey, flags) | |
359 | else: |
|
358 | else: | |
360 | wctx[dfile].remove(ignoremissing=True) |
|
359 | wctx[dfile].remove(ignoremissing=True) | |
@@ -658,7 +657,10 b' class mergestate(_mergestate_base):' | |||||
658 | records.append( |
|
657 | records.append( | |
659 | (RECORD_PATH_CONFLICT, b'\0'.join([filename] + v)) |
|
658 | (RECORD_PATH_CONFLICT, b'\0'.join([filename] + v)) | |
660 | ) |
|
659 | ) | |
661 | elif v[1] == nullhex or v[6] == nullhex: |
|
660 | elif ( | |
|
661 | v[1] == self._repo.nodeconstants.nullhex | |||
|
662 | or v[6] == self._repo.nodeconstants.nullhex | |||
|
663 | ): | |||
662 | # Change/Delete or Delete/Change conflicts. These are stored in |
|
664 | # Change/Delete or Delete/Change conflicts. These are stored in | |
663 | # 'C' records. v[1] is the local file, and is nullhex when the |
|
665 | # 'C' records. v[1] is the local file, and is nullhex when the | |
664 | # file is deleted locally ('dc'). v[6] is the remote file, and |
|
666 | # file is deleted locally ('dc'). v[6] is the remote file, and |
@@ -11,10 +11,7 b' from __future__ import absolute_import, ' | |||||
11 | import multiprocessing |
|
11 | import multiprocessing | |
12 | import struct |
|
12 | import struct | |
13 |
|
13 | |||
14 |
from .node import |
|
14 | from .node import nullrev | |
15 | nullid, |
|
|||
16 | nullrev, |
|
|||
17 | ) |
|
|||
18 | from . import ( |
|
15 | from . import ( | |
19 | error, |
|
16 | error, | |
20 | pycompat, |
|
17 | pycompat, | |
@@ -617,7 +614,7 b' def computechangesetfilesmerged(ctx):' | |||||
617 | if f in ctx: |
|
614 | if f in ctx: | |
618 | fctx = ctx[f] |
|
615 | fctx = ctx[f] | |
619 | parents = fctx._filelog.parents(fctx._filenode) |
|
616 | parents = fctx._filelog.parents(fctx._filenode) | |
620 | if parents[1] != nullid: |
|
617 | if parents[1] != ctx.repo().nullid: | |
621 | merged.append(f) |
|
618 | merged.append(f) | |
622 | return merged |
|
619 | return merged | |
623 |
|
620 |
@@ -58,11 +58,11 b' class sha1nodeconstants(object):' | |||||
58 |
|
58 | |||
59 |
|
59 | |||
60 | # legacy starting point for porting modules |
|
60 | # legacy starting point for porting modules | |
61 | nullid = sha1nodeconstants.nullid |
|
61 | # nullid = sha1nodeconstants.nullid | |
62 | nullhex = sha1nodeconstants.nullhex |
|
62 | # nullhex = sha1nodeconstants.nullhex | |
63 | newnodeid = sha1nodeconstants.newnodeid |
|
63 | # newnodeid = sha1nodeconstants.newnodeid | |
64 | addednodeid = sha1nodeconstants.addednodeid |
|
64 | # addednodeid = sha1nodeconstants.addednodeid | |
65 | modifiednodeid = sha1nodeconstants.modifiednodeid |
|
65 | # modifiednodeid = sha1nodeconstants.modifiednodeid | |
66 | wdirfilenodeids = sha1nodeconstants.wdirfilenodeids |
|
66 | # wdirfilenodeids = sha1nodeconstants.wdirfilenodeids | |
67 | wdirid = sha1nodeconstants.wdirid |
|
67 | # wdirid = sha1nodeconstants.wdirid | |
68 | wdirhex = sha1nodeconstants.wdirhex |
|
68 | # wdirhex = sha1nodeconstants.wdirhex |
@@ -73,11 +73,14 b' import errno' | |||||
73 | import struct |
|
73 | import struct | |
74 |
|
74 | |||
75 | from .i18n import _ |
|
75 | from .i18n import _ | |
|
76 | from .node import ( | |||
|
77 | bin, | |||
|
78 | hex, | |||
|
79 | ) | |||
76 | from .pycompat import getattr |
|
80 | from .pycompat import getattr | |
77 | from .node import ( |
|
81 | from .node import ( | |
78 | bin, |
|
82 | bin, | |
79 | hex, |
|
83 | hex, | |
80 | nullid, |
|
|||
81 | ) |
|
84 | ) | |
82 | from . import ( |
|
85 | from . import ( | |
83 | encoding, |
|
86 | encoding, | |
@@ -526,14 +529,14 b' def _addchildren(children, markers):' | |||||
526 | children.setdefault(p, set()).add(mark) |
|
529 | children.setdefault(p, set()).add(mark) | |
527 |
|
530 | |||
528 |
|
531 | |||
529 | def _checkinvalidmarkers(markers): |
|
532 | def _checkinvalidmarkers(repo, markers): | |
530 | """search for marker with invalid data and raise error if needed |
|
533 | """search for marker with invalid data and raise error if needed | |
531 |
|
534 | |||
532 | Exist as a separated function to allow the evolve extension for a more |
|
535 | Exist as a separated function to allow the evolve extension for a more | |
533 | subtle handling. |
|
536 | subtle handling. | |
534 | """ |
|
537 | """ | |
535 | for mark in markers: |
|
538 | for mark in markers: | |
536 | if nullid in mark[1]: |
|
539 | if repo.nullid in mark[1]: | |
537 | raise error.Abort( |
|
540 | raise error.Abort( | |
538 | _( |
|
541 | _( | |
539 | b'bad obsolescence marker detected: ' |
|
542 | b'bad obsolescence marker detected: ' | |
@@ -727,7 +730,7 b' class obsstore(object):' | |||||
727 | return [] |
|
730 | return [] | |
728 | self._version, markers = _readmarkers(data) |
|
731 | self._version, markers = _readmarkers(data) | |
729 | markers = list(markers) |
|
732 | markers = list(markers) | |
730 | _checkinvalidmarkers(markers) |
|
733 | _checkinvalidmarkers(self.repo, markers) | |
731 | return markers |
|
734 | return markers | |
732 |
|
735 | |||
733 | @propertycache |
|
736 | @propertycache | |
@@ -761,7 +764,7 b' class obsstore(object):' | |||||
761 | _addpredecessors(self.predecessors, markers) |
|
764 | _addpredecessors(self.predecessors, markers) | |
762 | if self._cached('children'): |
|
765 | if self._cached('children'): | |
763 | _addchildren(self.children, markers) |
|
766 | _addchildren(self.children, markers) | |
764 | _checkinvalidmarkers(markers) |
|
767 | _checkinvalidmarkers(self.repo, markers) | |
765 |
|
768 | |||
766 | def relevantmarkers(self, nodes): |
|
769 | def relevantmarkers(self, nodes): | |
767 | """return a set of all obsolescence markers relevant to a set of nodes. |
|
770 | """return a set of all obsolescence markers relevant to a set of nodes. |
@@ -20,7 +20,7 b' import zlib' | |||||
20 | from .i18n import _ |
|
20 | from .i18n import _ | |
21 | from .node import ( |
|
21 | from .node import ( | |
22 | hex, |
|
22 | hex, | |
23 | nullhex, |
|
23 | sha1nodeconstants, | |
24 | short, |
|
24 | short, | |
25 | ) |
|
25 | ) | |
26 | from .pycompat import open |
|
26 | from .pycompat import open | |
@@ -3100,8 +3100,8 b' def diffcontent(data1, data2, header, bi' | |||||
3100 |
|
3100 | |||
3101 | ctx1, fctx1, path1, flag1, content1, date1 = data1 |
|
3101 | ctx1, fctx1, path1, flag1, content1, date1 = data1 | |
3102 | ctx2, fctx2, path2, flag2, content2, date2 = data2 |
|
3102 | ctx2, fctx2, path2, flag2, content2, date2 = data2 | |
3103 | index1 = _gitindex(content1) if path1 in ctx1 else nullhex |
|
3103 | index1 = _gitindex(content1) if path1 in ctx1 else sha1nodeconstants.nullhex | |
3104 | index2 = _gitindex(content2) if path2 in ctx2 else nullhex |
|
3104 | index2 = _gitindex(content2) if path2 in ctx2 else sha1nodeconstants.nullhex | |
3105 | if binary and opts.git and not opts.nobinary: |
|
3105 | if binary and opts.git and not opts.nobinary: | |
3106 | text = mdiff.b85diff(content1, content2) |
|
3106 | text = mdiff.b85diff(content1, content2) | |
3107 | if text: |
|
3107 | if text: |
@@ -109,7 +109,6 b' from .i18n import _' | |||||
109 | from .node import ( |
|
109 | from .node import ( | |
110 | bin, |
|
110 | bin, | |
111 | hex, |
|
111 | hex, | |
112 | nullid, |
|
|||
113 | nullrev, |
|
112 | nullrev, | |
114 | short, |
|
113 | short, | |
115 | wdirrev, |
|
114 | wdirrev, | |
@@ -862,7 +861,7 b' def analyzeremotephases(repo, subset, ro' | |||||
862 | node = bin(nhex) |
|
861 | node = bin(nhex) | |
863 | phase = int(phase) |
|
862 | phase = int(phase) | |
864 | if phase == public: |
|
863 | if phase == public: | |
865 | if node != nullid: |
|
864 | if node != repo.nullid: | |
866 | repo.ui.warn( |
|
865 | repo.ui.warn( | |
867 | _( |
|
866 | _( | |
868 | b'ignoring inconsistent public root' |
|
867 | b'ignoring inconsistent public root' | |
@@ -919,10 +918,10 b' def newheads(repo, heads, roots):' | |||||
919 | rev = cl.index.get_rev |
|
918 | rev = cl.index.get_rev | |
920 | if not roots: |
|
919 | if not roots: | |
921 | return heads |
|
920 | return heads | |
922 | if not heads or heads == [nullid]: |
|
921 | if not heads or heads == [repo.nullid]: | |
923 | return [] |
|
922 | return [] | |
924 | # The logic operated on revisions, convert arguments early for convenience |
|
923 | # The logic operated on revisions, convert arguments early for convenience | |
925 | new_heads = {rev(n) for n in heads if n != nullid} |
|
924 | new_heads = {rev(n) for n in heads if n != repo.nullid} | |
926 | roots = [rev(n) for n in roots] |
|
925 | roots = [rev(n) for n in roots] | |
927 | # compute the area we need to remove |
|
926 | # compute the area we need to remove | |
928 | affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads) |
|
927 | affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads) |
@@ -10,7 +10,10 b' from __future__ import absolute_import' | |||||
10 | import struct |
|
10 | import struct | |
11 | import zlib |
|
11 | import zlib | |
12 |
|
12 | |||
13 |
from ..node import |
|
13 | from ..node import ( | |
|
14 | nullrev, | |||
|
15 | sha1nodeconstants, | |||
|
16 | ) | |||
14 | from .. import ( |
|
17 | from .. import ( | |
15 | pycompat, |
|
18 | pycompat, | |
16 | util, |
|
19 | util, | |
@@ -50,7 +53,7 b' class BaseIndexObject(object):' | |||||
50 | # Size of a C long int, platform independent |
|
53 | # Size of a C long int, platform independent | |
51 | int_size = struct.calcsize(b'>i') |
|
54 | int_size = struct.calcsize(b'>i') | |
52 | # An empty index entry, used as a default value to be overridden, or nullrev |
|
55 | # An empty index entry, used as a default value to be overridden, or nullrev | |
53 | null_item = (0, 0, 0, -1, -1, -1, -1, nullid) |
|
56 | null_item = (0, 0, 0, -1, -1, -1, -1, sha1nodeconstants.nullid) | |
54 |
|
57 | |||
55 | @util.propertycache |
|
58 | @util.propertycache | |
56 | def entry_size(self): |
|
59 | def entry_size(self): | |
@@ -64,7 +67,7 b' class BaseIndexObject(object):' | |||||
64 |
|
67 | |||
65 | @util.propertycache |
|
68 | @util.propertycache | |
66 | def _nodemap(self): |
|
69 | def _nodemap(self): | |
67 | nodemap = nodemaputil.NodeMap({nullid: nullrev}) |
|
70 | nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev}) | |
68 | for r in range(0, len(self)): |
|
71 | for r in range(0, len(self)): | |
69 | n = self[r][7] |
|
72 | n = self[r][7] | |
70 | nodemap[n] = r |
|
73 | nodemap[n] = r | |
@@ -246,7 +249,7 b' def parse_index2(data, inline, revlogv2=' | |||||
246 |
|
249 | |||
247 | class Index2Mixin(object): |
|
250 | class Index2Mixin(object): | |
248 | index_format = revlog_constants.INDEX_ENTRY_V2 |
|
251 | index_format = revlog_constants.INDEX_ENTRY_V2 | |
249 | null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) |
|
252 | null_item = (0, 0, 0, -1, -1, -1, -1, sha1nodeconstants.nullid, 0, 0) | |
250 |
|
253 | |||
251 | def replace_sidedata_info(self, i, sidedata_offset, sidedata_length): |
|
254 | def replace_sidedata_info(self, i, sidedata_offset, sidedata_length): | |
252 | """ |
|
255 | """ |
@@ -26,14 +26,9 b' import zlib' | |||||
26 | from .node import ( |
|
26 | from .node import ( | |
27 | bin, |
|
27 | bin, | |
28 | hex, |
|
28 | hex, | |
29 | nullhex, |
|
|||
30 | nullid, |
|
|||
31 | nullrev, |
|
29 | nullrev, | |
32 | sha1nodeconstants, |
|
30 | sha1nodeconstants, | |
33 | short, |
|
31 | short, | |
34 | wdirfilenodeids, |
|
|||
35 | wdirhex, |
|
|||
36 | wdirid, |
|
|||
37 | wdirrev, |
|
32 | wdirrev, | |
38 | ) |
|
33 | ) | |
39 | from .i18n import _ |
|
34 | from .i18n import _ | |
@@ -232,7 +227,7 b' class revlogoldindex(list):' | |||||
232 |
|
227 | |||
233 | @util.propertycache |
|
228 | @util.propertycache | |
234 | def _nodemap(self): |
|
229 | def _nodemap(self): | |
235 | nodemap = nodemaputil.NodeMap({nullid: nullrev}) |
|
230 | nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev}) | |
236 | for r in range(0, len(self)): |
|
231 | for r in range(0, len(self)): | |
237 | n = self[r][7] |
|
232 | n = self[r][7] | |
238 | nodemap[n] = r |
|
233 | nodemap[n] = r | |
@@ -270,7 +265,7 b' class revlogoldindex(list):' | |||||
270 |
|
265 | |||
271 | def __getitem__(self, i): |
|
266 | def __getitem__(self, i): | |
272 | if i == -1: |
|
267 | if i == -1: | |
273 | return (0, 0, 0, -1, -1, -1, -1, nullid) |
|
268 | return (0, 0, 0, -1, -1, -1, -1, sha1nodeconstants.nullid) | |
274 | return list.__getitem__(self, i) |
|
269 | return list.__getitem__(self, i) | |
275 |
|
270 | |||
276 |
|
271 | |||
@@ -278,7 +273,7 b' class revlogoldio(object):' | |||||
278 | def parseindex(self, data, inline): |
|
273 | def parseindex(self, data, inline): | |
279 | s = INDEX_ENTRY_V0.size |
|
274 | s = INDEX_ENTRY_V0.size | |
280 | index = [] |
|
275 | index = [] | |
281 | nodemap = nodemaputil.NodeMap({nullid: nullrev}) |
|
276 | nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev}) | |
282 | n = off = 0 |
|
277 | n = off = 0 | |
283 | l = len(data) |
|
278 | l = len(data) | |
284 | while off + s <= l: |
|
279 | while off + s <= l: | |
@@ -818,7 +813,10 b' class revlog(object):' | |||||
818 | raise |
|
813 | raise | |
819 | except error.RevlogError: |
|
814 | except error.RevlogError: | |
820 | # parsers.c radix tree lookup failed |
|
815 | # parsers.c radix tree lookup failed | |
821 | if node == wdirid or node in wdirfilenodeids: |
|
816 | if ( | |
|
817 | node == self.nodeconstants.wdirid | |||
|
818 | or node in self.nodeconstants.wdirfilenodeids | |||
|
819 | ): | |||
822 | raise error.WdirUnsupported |
|
820 | raise error.WdirUnsupported | |
823 | raise error.LookupError(node, self.indexfile, _(b'no node')) |
|
821 | raise error.LookupError(node, self.indexfile, _(b'no node')) | |
824 |
|
822 | |||
@@ -909,7 +907,7 b' class revlog(object):' | |||||
909 | i = self.index |
|
907 | i = self.index | |
910 | d = i[self.rev(node)] |
|
908 | d = i[self.rev(node)] | |
911 | # inline node() to avoid function call overhead |
|
909 | # inline node() to avoid function call overhead | |
912 | if d[5] == nullid: |
|
910 | if d[5] == self.nullid: | |
913 | return i[d[6]][7], i[d[5]][7] |
|
911 | return i[d[6]][7], i[d[5]][7] | |
914 | else: |
|
912 | else: | |
915 | return i[d[5]][7], i[d[6]][7] |
|
913 | return i[d[5]][7], i[d[6]][7] | |
@@ -1027,7 +1025,7 b' class revlog(object):' | |||||
1027 | not supplied, uses all of the revlog's heads. If common is not |
|
1025 | not supplied, uses all of the revlog's heads. If common is not | |
1028 | supplied, uses nullid.""" |
|
1026 | supplied, uses nullid.""" | |
1029 | if common is None: |
|
1027 | if common is None: | |
1030 | common = [nullid] |
|
1028 | common = [self.nullid] | |
1031 | if heads is None: |
|
1029 | if heads is None: | |
1032 | heads = self.heads() |
|
1030 | heads = self.heads() | |
1033 |
|
1031 | |||
@@ -1133,7 +1131,7 b' class revlog(object):' | |||||
1133 | not supplied, uses all of the revlog's heads. If common is not |
|
1131 | not supplied, uses all of the revlog's heads. If common is not | |
1134 | supplied, uses nullid.""" |
|
1132 | supplied, uses nullid.""" | |
1135 | if common is None: |
|
1133 | if common is None: | |
1136 | common = [nullid] |
|
1134 | common = [self.nullid] | |
1137 | if heads is None: |
|
1135 | if heads is None: | |
1138 | heads = self.heads() |
|
1136 | heads = self.heads() | |
1139 |
|
1137 | |||
@@ -1171,11 +1169,15 b' class revlog(object):' | |||||
1171 | return nonodes |
|
1169 | return nonodes | |
1172 | lowestrev = min([self.rev(n) for n in roots]) |
|
1170 | lowestrev = min([self.rev(n) for n in roots]) | |
1173 | else: |
|
1171 | else: | |
1174 | roots = [nullid] # Everybody's a descendant of nullid |
|
1172 | roots = [self.nullid] # Everybody's a descendant of nullid | |
1175 | lowestrev = nullrev |
|
1173 | lowestrev = nullrev | |
1176 | if (lowestrev == nullrev) and (heads is None): |
|
1174 | if (lowestrev == nullrev) and (heads is None): | |
1177 | # We want _all_ the nodes! |
|
1175 | # We want _all_ the nodes! | |
1178 | return ([self.node(r) for r in self], [nullid], list(self.heads())) |
|
1176 | return ( | |
|
1177 | [self.node(r) for r in self], | |||
|
1178 | [self.nullid], | |||
|
1179 | list(self.heads()), | |||
|
1180 | ) | |||
1179 | if heads is None: |
|
1181 | if heads is None: | |
1180 | # All nodes are ancestors, so the latest ancestor is the last |
|
1182 | # All nodes are ancestors, so the latest ancestor is the last | |
1181 | # node. |
|
1183 | # node. | |
@@ -1201,7 +1203,7 b' class revlog(object):' | |||||
1201 | # grab a node to tag |
|
1203 | # grab a node to tag | |
1202 | n = nodestotag.pop() |
|
1204 | n = nodestotag.pop() | |
1203 | # Never tag nullid |
|
1205 | # Never tag nullid | |
1204 | if n == nullid: |
|
1206 | if n == self.nullid: | |
1205 | continue |
|
1207 | continue | |
1206 | # A node's revision number represents its place in a |
|
1208 | # A node's revision number represents its place in a | |
1207 | # topologically sorted list of nodes. |
|
1209 | # topologically sorted list of nodes. | |
@@ -1213,7 +1215,7 b' class revlog(object):' | |||||
1213 | ancestors.add(n) # Mark as ancestor |
|
1215 | ancestors.add(n) # Mark as ancestor | |
1214 | # Add non-nullid parents to list of nodes to tag. |
|
1216 | # Add non-nullid parents to list of nodes to tag. | |
1215 | nodestotag.update( |
|
1217 | nodestotag.update( | |
1216 | [p for p in self.parents(n) if p != nullid] |
|
1218 | [p for p in self.parents(n) if p != self.nullid] | |
1217 | ) |
|
1219 | ) | |
1218 | elif n in heads: # We've seen it before, is it a fake head? |
|
1220 | elif n in heads: # We've seen it before, is it a fake head? | |
1219 | # So it is, real heads should not be the ancestors of |
|
1221 | # So it is, real heads should not be the ancestors of | |
@@ -1241,7 +1243,7 b' class revlog(object):' | |||||
1241 | # We are descending from nullid, and don't need to care about |
|
1243 | # We are descending from nullid, and don't need to care about | |
1242 | # any other roots. |
|
1244 | # any other roots. | |
1243 | lowestrev = nullrev |
|
1245 | lowestrev = nullrev | |
1244 | roots = [nullid] |
|
1246 | roots = [self.nullid] | |
1245 | # Transform our roots list into a set. |
|
1247 | # Transform our roots list into a set. | |
1246 | descendants = set(roots) |
|
1248 | descendants = set(roots) | |
1247 | # Also, keep the original roots so we can filter out roots that aren't |
|
1249 | # Also, keep the original roots so we can filter out roots that aren't | |
@@ -1335,7 +1337,7 b' class revlog(object):' | |||||
1335 | """ |
|
1337 | """ | |
1336 | if start is None and stop is None: |
|
1338 | if start is None and stop is None: | |
1337 | if not len(self): |
|
1339 | if not len(self): | |
1338 | return [nullid] |
|
1340 | return [self.nullid] | |
1339 | return [self.node(r) for r in self.headrevs()] |
|
1341 | return [self.node(r) for r in self.headrevs()] | |
1340 |
|
1342 | |||
1341 | if start is None: |
|
1343 | if start is None: | |
@@ -1425,7 +1427,7 b' class revlog(object):' | |||||
1425 | if ancs: |
|
1427 | if ancs: | |
1426 | # choose a consistent winner when there's a tie |
|
1428 | # choose a consistent winner when there's a tie | |
1427 | return min(map(self.node, ancs)) |
|
1429 | return min(map(self.node, ancs)) | |
1428 | return nullid |
|
1430 | return self.nullid | |
1429 |
|
1431 | |||
1430 | def _match(self, id): |
|
1432 | def _match(self, id): | |
1431 | if isinstance(id, int): |
|
1433 | if isinstance(id, int): | |
@@ -1463,7 +1465,7 b' class revlog(object):' | |||||
1463 |
|
1465 | |||
1464 | def _partialmatch(self, id): |
|
1466 | def _partialmatch(self, id): | |
1465 | # we don't care wdirfilenodeids as they should be always full hash |
|
1467 | # we don't care wdirfilenodeids as they should be always full hash | |
1466 | maybewdir = wdirhex.startswith(id) |
|
1468 | maybewdir = self.nodeconstants.wdirhex.startswith(id) | |
1467 | try: |
|
1469 | try: | |
1468 | partial = self.index.partialmatch(id) |
|
1470 | partial = self.index.partialmatch(id) | |
1469 | if partial and self.hasnode(partial): |
|
1471 | if partial and self.hasnode(partial): | |
@@ -1499,8 +1501,8 b' class revlog(object):' | |||||
1499 | nl = [ |
|
1501 | nl = [ | |
1500 | n for n in nl if hex(n).startswith(id) and self.hasnode(n) |
|
1502 | n for n in nl if hex(n).startswith(id) and self.hasnode(n) | |
1501 | ] |
|
1503 | ] | |
1502 | if nullhex.startswith(id): |
|
1504 | if self.nodeconstants.nullhex.startswith(id): | |
1503 | nl.append(nullid) |
|
1505 | nl.append(self.nullid) | |
1504 | if len(nl) > 0: |
|
1506 | if len(nl) > 0: | |
1505 | if len(nl) == 1 and not maybewdir: |
|
1507 | if len(nl) == 1 and not maybewdir: | |
1506 | self._pcache[id] = nl[0] |
|
1508 | self._pcache[id] = nl[0] | |
@@ -1560,13 +1562,13 b' class revlog(object):' | |||||
1560 | length = max(self.index.shortest(node), minlength) |
|
1562 | length = max(self.index.shortest(node), minlength) | |
1561 | return disambiguate(hexnode, length) |
|
1563 | return disambiguate(hexnode, length) | |
1562 | except error.RevlogError: |
|
1564 | except error.RevlogError: | |
1563 | if node != wdirid: |
|
1565 | if node != self.nodeconstants.wdirid: | |
1564 | raise error.LookupError(node, self.indexfile, _(b'no node')) |
|
1566 | raise error.LookupError(node, self.indexfile, _(b'no node')) | |
1565 | except AttributeError: |
|
1567 | except AttributeError: | |
1566 | # Fall through to pure code |
|
1568 | # Fall through to pure code | |
1567 | pass |
|
1569 | pass | |
1568 |
|
1570 | |||
1569 | if node == wdirid: |
|
1571 | if node == self.nodeconstants.wdirid: | |
1570 | for length in range(minlength, len(hexnode) + 1): |
|
1572 | for length in range(minlength, len(hexnode) + 1): | |
1571 | prefix = hexnode[:length] |
|
1573 | prefix = hexnode[:length] | |
1572 | if isvalid(prefix): |
|
1574 | if isvalid(prefix): | |
@@ -1881,7 +1883,7 b' class revlog(object):' | |||||
1881 | rev = None |
|
1883 | rev = None | |
1882 |
|
1884 | |||
1883 | # fast path the special `nullid` rev |
|
1885 | # fast path the special `nullid` rev | |
1884 | if node == nullid: |
|
1886 | if node == self.nullid: | |
1885 | return b"", {} |
|
1887 | return b"", {} | |
1886 |
|
1888 | |||
1887 | # ``rawtext`` is the text as stored inside the revlog. Might be the |
|
1889 | # ``rawtext`` is the text as stored inside the revlog. Might be the | |
@@ -2302,11 +2304,14 b' class revlog(object):' | |||||
2302 | - rawtext is optional (can be None); if not set, cachedelta must be set. |
|
2304 | - rawtext is optional (can be None); if not set, cachedelta must be set. | |
2303 | if both are set, they must correspond to each other. |
|
2305 | if both are set, they must correspond to each other. | |
2304 | """ |
|
2306 | """ | |
2305 | if node == nullid: |
|
2307 | if node == self.nullid: | |
2306 | raise error.RevlogError( |
|
2308 | raise error.RevlogError( | |
2307 | _(b"%s: attempt to add null revision") % self.indexfile |
|
2309 | _(b"%s: attempt to add null revision") % self.indexfile | |
2308 | ) |
|
2310 | ) | |
2309 | if node == wdirid or node in wdirfilenodeids: |
|
2311 | if ( | |
|
2312 | node == self.nodeconstants.wdirid | |||
|
2313 | or node in self.nodeconstants.wdirfilenodeids | |||
|
2314 | ): | |||
2310 | raise error.RevlogError( |
|
2315 | raise error.RevlogError( | |
2311 | _(b"%s: attempt to add wdir revision") % self.indexfile |
|
2316 | _(b"%s: attempt to add wdir revision") % self.indexfile | |
2312 | ) |
|
2317 | ) |
@@ -19,10 +19,8 b' from .i18n import _' | |||||
19 | from .node import ( |
|
19 | from .node import ( | |
20 | bin, |
|
20 | bin, | |
21 | hex, |
|
21 | hex, | |
22 | nullid, |
|
|||
23 | nullrev, |
|
22 | nullrev, | |
24 | short, |
|
23 | short, | |
25 | wdirid, |
|
|||
26 | wdirrev, |
|
24 | wdirrev, | |
27 | ) |
|
25 | ) | |
28 | from .pycompat import getattr |
|
26 | from .pycompat import getattr | |
@@ -450,7 +448,7 b' def binnode(ctx):' | |||||
450 | """Return binary node id for a given basectx""" |
|
448 | """Return binary node id for a given basectx""" | |
451 | node = ctx.node() |
|
449 | node = ctx.node() | |
452 | if node is None: |
|
450 | if node is None: | |
453 | return wdirid |
|
451 | return ctx.repo().nodeconstants.wdirid | |
454 | return node |
|
452 | return node | |
455 |
|
453 | |||
456 |
|
454 | |||
@@ -1108,7 +1106,7 b' def cleanupnodes(' | |||||
1108 | if roots: |
|
1106 | if roots: | |
1109 | newnode = roots[0].node() |
|
1107 | newnode = roots[0].node() | |
1110 | else: |
|
1108 | else: | |
1111 | newnode = nullid |
|
1109 | newnode = repo.nullid | |
1112 | else: |
|
1110 | else: | |
1113 | newnode = newnodes[0] |
|
1111 | newnode = newnodes[0] | |
1114 | moves[oldnode] = newnode |
|
1112 | moves[oldnode] = newnode | |
@@ -1506,7 +1504,7 b' def movedirstate(repo, newctx, match=Non' | |||||
1506 | oldctx = repo[b'.'] |
|
1504 | oldctx = repo[b'.'] | |
1507 | ds = repo.dirstate |
|
1505 | ds = repo.dirstate | |
1508 | copies = dict(ds.copies()) |
|
1506 | copies = dict(ds.copies()) | |
1509 | ds.setparents(newctx.node(), nullid) |
|
1507 | ds.setparents(newctx.node(), repo.nullid) | |
1510 | s = newctx.status(oldctx, match=match) |
|
1508 | s = newctx.status(oldctx, match=match) | |
1511 | for f in s.modified: |
|
1509 | for f in s.modified: | |
1512 | if ds[f] == b'r': |
|
1510 | if ds[f] == b'r': |
@@ -46,10 +46,7 b' import collections' | |||||
46 | import random |
|
46 | import random | |
47 |
|
47 | |||
48 | from .i18n import _ |
|
48 | from .i18n import _ | |
49 |
from .node import |
|
49 | from .node import nullrev | |
50 | nullid, |
|
|||
51 | nullrev, |
|
|||
52 | ) |
|
|||
53 | from . import ( |
|
50 | from . import ( | |
54 | error, |
|
51 | error, | |
55 | policy, |
|
52 | policy, | |
@@ -391,9 +388,9 b' def findcommonheads(' | |||||
391 | audit[b'total-roundtrips'] = 1 |
|
388 | audit[b'total-roundtrips'] = 1 | |
392 |
|
389 | |||
393 | if cl.tiprev() == nullrev: |
|
390 | if cl.tiprev() == nullrev: | |
394 | if srvheadhashes != [nullid]: |
|
391 | if srvheadhashes != [cl.nullid]: | |
395 | return [nullid], True, srvheadhashes |
|
392 | return [cl.nullid], True, srvheadhashes | |
396 | return [nullid], False, [] |
|
393 | return [cl.nullid], False, [] | |
397 | else: |
|
394 | else: | |
398 | # we still need the remote head for the function return |
|
395 | # we still need the remote head for the function return | |
399 | with remote.commandexecutor() as e: |
|
396 | with remote.commandexecutor() as e: | |
@@ -406,7 +403,7 b' def findcommonheads(' | |||||
406 |
|
403 | |||
407 | knownsrvheads = [] # revnos of remote heads that are known locally |
|
404 | knownsrvheads = [] # revnos of remote heads that are known locally | |
408 | for node in srvheadhashes: |
|
405 | for node in srvheadhashes: | |
409 | if node == nullid: |
|
406 | if node == cl.nullid: | |
410 | continue |
|
407 | continue | |
411 |
|
408 | |||
412 | try: |
|
409 | try: | |
@@ -503,17 +500,17 b' def findcommonheads(' | |||||
503 | if audit is not None: |
|
500 | if audit is not None: | |
504 | audit[b'total-roundtrips'] = roundtrips |
|
501 | audit[b'total-roundtrips'] = roundtrips | |
505 |
|
502 | |||
506 | if not result and srvheadhashes != [nullid]: |
|
503 | if not result and srvheadhashes != [cl.nullid]: | |
507 | if abortwhenunrelated: |
|
504 | if abortwhenunrelated: | |
508 | raise error.Abort(_(b"repository is unrelated")) |
|
505 | raise error.Abort(_(b"repository is unrelated")) | |
509 | else: |
|
506 | else: | |
510 | ui.warn(_(b"warning: repository is unrelated\n")) |
|
507 | ui.warn(_(b"warning: repository is unrelated\n")) | |
511 | return ( |
|
508 | return ( | |
512 | {nullid}, |
|
509 | {cl.nullid}, | |
513 | True, |
|
510 | True, | |
514 | srvheadhashes, |
|
511 | srvheadhashes, | |
515 | ) |
|
512 | ) | |
516 |
|
513 | |||
517 | anyincoming = srvheadhashes != [nullid] |
|
514 | anyincoming = srvheadhashes != [cl.nullid] | |
518 | result = {clnode(r) for r in result} |
|
515 | result = {clnode(r) for r in result} | |
519 | return result, anyincoming, srvheadhashes |
|
516 | return result, anyincoming, srvheadhashes |
@@ -31,7 +31,6 b' from .i18n import _' | |||||
31 | from .node import ( |
|
31 | from .node import ( | |
32 | bin, |
|
32 | bin, | |
33 | hex, |
|
33 | hex, | |
34 | nullid, |
|
|||
35 | nullrev, |
|
34 | nullrev, | |
36 | ) |
|
35 | ) | |
37 | from . import ( |
|
36 | from . import ( | |
@@ -822,7 +821,7 b' def unshelvecontinue(ui, repo, state, op' | |||||
822 | pendingctx = state.pendingctx |
|
821 | pendingctx = state.pendingctx | |
823 |
|
822 | |||
824 | with repo.dirstate.parentchange(): |
|
823 | with repo.dirstate.parentchange(): | |
825 | repo.setparents(state.pendingctx.node(), nullid) |
|
824 | repo.setparents(state.pendingctx.node(), repo.nullid) | |
826 | repo.dirstate.write(repo.currenttransaction()) |
|
825 | repo.dirstate.write(repo.currenttransaction()) | |
827 |
|
826 | |||
828 | targetphase = phases.internal |
|
827 | targetphase = phases.internal | |
@@ -831,7 +830,7 b' def unshelvecontinue(ui, repo, state, op' | |||||
831 | overrides = {(b'phases', b'new-commit'): targetphase} |
|
830 | overrides = {(b'phases', b'new-commit'): targetphase} | |
832 | with repo.ui.configoverride(overrides, b'unshelve'): |
|
831 | with repo.ui.configoverride(overrides, b'unshelve'): | |
833 | with repo.dirstate.parentchange(): |
|
832 | with repo.dirstate.parentchange(): | |
834 | repo.setparents(state.parents[0], nullid) |
|
833 | repo.setparents(state.parents[0], repo.nullid) | |
835 | newnode, ispartialunshelve = _createunshelvectx( |
|
834 | newnode, ispartialunshelve = _createunshelvectx( | |
836 | ui, repo, shelvectx, basename, interactive, opts |
|
835 | ui, repo, shelvectx, basename, interactive, opts | |
837 | ) |
|
836 | ) | |
@@ -1027,7 +1026,7 b' def _rebaserestoredcommit(' | |||||
1027 | raise error.ConflictResolutionRequired(b'unshelve') |
|
1026 | raise error.ConflictResolutionRequired(b'unshelve') | |
1028 |
|
1027 | |||
1029 | with repo.dirstate.parentchange(): |
|
1028 | with repo.dirstate.parentchange(): | |
1030 | repo.setparents(tmpwctx.node(), nullid) |
|
1029 | repo.setparents(tmpwctx.node(), repo.nullid) | |
1031 | newnode, ispartialunshelve = _createunshelvectx( |
|
1030 | newnode, ispartialunshelve = _createunshelvectx( | |
1032 | ui, repo, shelvectx, basename, interactive, opts |
|
1031 | ui, repo, shelvectx, basename, interactive, opts | |
1033 | ) |
|
1032 | ) |
@@ -10,10 +10,7 b' from __future__ import absolute_import' | |||||
10 | import os |
|
10 | import os | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 |
from .node import |
|
13 | from .node import hex | |
14 | hex, |
|
|||
15 | nullid, |
|
|||
16 | ) |
|
|||
17 | from . import ( |
|
14 | from . import ( | |
18 | error, |
|
15 | error, | |
19 | match as matchmod, |
|
16 | match as matchmod, | |
@@ -177,7 +174,7 b' def activeconfig(repo):' | |||||
177 | revs = [ |
|
174 | revs = [ | |
178 | repo.changelog.rev(node) |
|
175 | repo.changelog.rev(node) | |
179 | for node in repo.dirstate.parents() |
|
176 | for node in repo.dirstate.parents() | |
180 | if node != nullid |
|
177 | if node != repo.nullid | |
181 | ] |
|
178 | ] | |
182 |
|
179 | |||
183 | allincludes = set() |
|
180 | allincludes = set() | |
@@ -321,7 +318,7 b' def matcher(repo, revs=None, includetemp' | |||||
321 | revs = [ |
|
318 | revs = [ | |
322 | repo.changelog.rev(node) |
|
319 | repo.changelog.rev(node) | |
323 | for node in repo.dirstate.parents() |
|
320 | for node in repo.dirstate.parents() | |
324 | if node != nullid |
|
321 | if node != repo.nullid | |
325 | ] |
|
322 | ] | |
326 |
|
323 | |||
327 | signature = configsignature(repo, includetemp=includetemp) |
|
324 | signature = configsignature(repo, includetemp=includetemp) |
@@ -2,7 +2,6 b' from __future__ import absolute_import' | |||||
2 |
|
2 | |||
3 | from .i18n import _ |
|
3 | from .i18n import _ | |
4 | from .pycompat import getattr |
|
4 | from .pycompat import getattr | |
5 | from .node import nullid |
|
|||
6 | from . import ( |
|
5 | from . import ( | |
7 | bookmarks as bookmarksmod, |
|
6 | bookmarks as bookmarksmod, | |
8 | cmdutil, |
|
7 | cmdutil, | |
@@ -39,7 +38,7 b' def _findupdatetarget(repo, nodes):' | |||||
39 |
|
38 | |||
40 | if ( |
|
39 | if ( | |
41 | util.safehasattr(repo, b'mq') |
|
40 | util.safehasattr(repo, b'mq') | |
42 | and p2 != nullid |
|
41 | and p2 != repo.nullid | |
43 | and p2 in [x.node for x in repo.mq.applied] |
|
42 | and p2 in [x.node for x in repo.mq.applied] | |
44 | ): |
|
43 | ): | |
45 | unode = p2 |
|
44 | unode = p2 | |
@@ -218,7 +217,7 b' def debugstrip(ui, repo, *revs, **opts):' | |||||
218 | # if one of the wdir parent is stripped we'll need |
|
217 | # if one of the wdir parent is stripped we'll need | |
219 | # to update away to an earlier revision |
|
218 | # to update away to an earlier revision | |
220 | update = any( |
|
219 | update = any( | |
221 | p != nullid and cl.rev(p) in strippedrevs |
|
220 | p != repo.nullid and cl.rev(p) in strippedrevs | |
222 | for p in repo.dirstate.parents() |
|
221 | for p in repo.dirstate.parents() | |
223 | ) |
|
222 | ) | |
224 |
|
223 |
@@ -21,7 +21,6 b' from .i18n import _' | |||||
21 | from .node import ( |
|
21 | from .node import ( | |
22 | bin, |
|
22 | bin, | |
23 | hex, |
|
23 | hex, | |
24 | nullid, |
|
|||
25 | short, |
|
24 | short, | |
26 | ) |
|
25 | ) | |
27 | from . import ( |
|
26 | from . import ( | |
@@ -686,7 +685,7 b' class hgsubrepo(abstractsubrepo):' | |||||
686 | # we can't fully delete the repository as it may contain |
|
685 | # we can't fully delete the repository as it may contain | |
687 | # local-only history |
|
686 | # local-only history | |
688 | self.ui.note(_(b'removing subrepo %s\n') % subrelpath(self)) |
|
687 | self.ui.note(_(b'removing subrepo %s\n') % subrelpath(self)) | |
689 | hg.clean(self._repo, nullid, False) |
|
688 | hg.clean(self._repo, self._repo.nullid, False) | |
690 |
|
689 | |||
691 | def _get(self, state): |
|
690 | def _get(self, state): | |
692 | source, revision, kind = state |
|
691 | source, revision, kind = state |
@@ -74,9 +74,6 b'' | |||||
74 | from __future__ import absolute_import |
|
74 | from __future__ import absolute_import | |
75 |
|
75 | |||
76 | from .i18n import _ |
|
76 | from .i18n import _ | |
77 | from .node import ( |
|
|||
78 | nullhex, |
|
|||
79 | ) |
|
|||
80 | from . import ( |
|
77 | from . import ( | |
81 | tags as tagsmod, |
|
78 | tags as tagsmod, | |
82 | util, |
|
79 | util, | |
@@ -243,8 +240,8 b' def merge(repo, fcd, fco, fca):' | |||||
243 | pnlosttagset = basetagset - pntagset |
|
240 | pnlosttagset = basetagset - pntagset | |
244 | for t in pnlosttagset: |
|
241 | for t in pnlosttagset: | |
245 | pntags[t] = basetags[t] |
|
242 | pntags[t] = basetags[t] | |
246 | if pntags[t][-1][0] != nullhex: |
|
243 | if pntags[t][-1][0] != repo.nodeconstants.nullhex: | |
247 | pntags[t].append([nullhex, None]) |
|
244 | pntags[t].append([repo.nodeconstants.nullhex, None]) | |
248 |
|
245 | |||
249 | conflictedtags = [] # for reporting purposes |
|
246 | conflictedtags = [] # for reporting purposes | |
250 | mergedtags = util.sortdict(p1tags) |
|
247 | mergedtags = util.sortdict(p1tags) |
@@ -18,7 +18,6 b' import io' | |||||
18 | from .node import ( |
|
18 | from .node import ( | |
19 | bin, |
|
19 | bin, | |
20 | hex, |
|
20 | hex, | |
21 | nullid, |
|
|||
22 | nullrev, |
|
21 | nullrev, | |
23 | short, |
|
22 | short, | |
24 | ) |
|
23 | ) | |
@@ -96,12 +95,12 b' def fnoderevs(ui, repo, revs):' | |||||
96 | return fnodes |
|
95 | return fnodes | |
97 |
|
96 | |||
98 |
|
97 | |||
99 | def _nulltonone(value): |
|
98 | def _nulltonone(repo, value): | |
100 | """convert nullid to None |
|
99 | """convert nullid to None | |
101 |
|
100 | |||
102 | For tag value, nullid means "deleted". This small utility function helps |
|
101 | For tag value, nullid means "deleted". This small utility function helps | |
103 | translating that to None.""" |
|
102 | translating that to None.""" | |
104 | if value == nullid: |
|
103 | if value == repo.nullid: | |
105 | return None |
|
104 | return None | |
106 | return value |
|
105 | return value | |
107 |
|
106 | |||
@@ -123,14 +122,14 b' def difftags(ui, repo, oldfnodes, newfno' | |||||
123 | # list of (tag, old, new): None means missing |
|
122 | # list of (tag, old, new): None means missing | |
124 | entries = [] |
|
123 | entries = [] | |
125 | for tag, (new, __) in newtags.items(): |
|
124 | for tag, (new, __) in newtags.items(): | |
126 | new = _nulltonone(new) |
|
125 | new = _nulltonone(repo, new) | |
127 | old, __ = oldtags.pop(tag, (None, None)) |
|
126 | old, __ = oldtags.pop(tag, (None, None)) | |
128 | old = _nulltonone(old) |
|
127 | old = _nulltonone(repo, old) | |
129 | if old != new: |
|
128 | if old != new: | |
130 | entries.append((tag, old, new)) |
|
129 | entries.append((tag, old, new)) | |
131 | # handle deleted tags |
|
130 | # handle deleted tags | |
132 | for tag, (old, __) in oldtags.items(): |
|
131 | for tag, (old, __) in oldtags.items(): | |
133 | old = _nulltonone(old) |
|
132 | old = _nulltonone(repo, old) | |
134 | if old is not None: |
|
133 | if old is not None: | |
135 | entries.append((tag, old, None)) |
|
134 | entries.append((tag, old, None)) | |
136 | entries.sort() |
|
135 | entries.sort() | |
@@ -452,7 +451,7 b' def _readtagcache(ui, repo):' | |||||
452 | repoheads = repo.heads() |
|
451 | repoheads = repo.heads() | |
453 | # Case 2 (uncommon): empty repo; get out quickly and don't bother |
|
452 | # Case 2 (uncommon): empty repo; get out quickly and don't bother | |
454 | # writing an empty cache. |
|
453 | # writing an empty cache. | |
455 | if repoheads == [nullid]: |
|
454 | if repoheads == [repo.nullid]: | |
456 | return ([], {}, valid, {}, False) |
|
455 | return ([], {}, valid, {}, False) | |
457 |
|
456 | |||
458 | # Case 3 (uncommon): cache file missing or empty. |
|
457 | # Case 3 (uncommon): cache file missing or empty. | |
@@ -499,7 +498,7 b' def _getfnodes(ui, repo, nodes):' | |||||
499 | for node in nodes: |
|
498 | for node in nodes: | |
500 | fnode = fnodescache.getfnode(node) |
|
499 | fnode = fnodescache.getfnode(node) | |
501 | flog = repo.file(b'.hgtags') |
|
500 | flog = repo.file(b'.hgtags') | |
502 | if fnode != nullid: |
|
501 | if fnode != repo.nullid: | |
503 | if fnode not in validated_fnodes: |
|
502 | if fnode not in validated_fnodes: | |
504 | if flog.hasnode(fnode): |
|
503 | if flog.hasnode(fnode): | |
505 | validated_fnodes.add(fnode) |
|
504 | validated_fnodes.add(fnode) | |
@@ -510,7 +509,7 b' def _getfnodes(ui, repo, nodes):' | |||||
510 | if unknown_entries: |
|
509 | if unknown_entries: | |
511 | fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) |
|
510 | fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) | |
512 | for node, fnode in pycompat.iteritems(fixed_nodemap): |
|
511 | for node, fnode in pycompat.iteritems(fixed_nodemap): | |
513 | if fnode != nullid: |
|
512 | if fnode != repo.nullid: | |
514 | cachefnode[node] = fnode |
|
513 | cachefnode[node] = fnode | |
515 |
|
514 | |||
516 | fnodescache.write() |
|
515 | fnodescache.write() | |
@@ -632,7 +631,7 b' def _tag(' | |||||
632 | m = name |
|
631 | m = name | |
633 |
|
632 | |||
634 | if repo._tagscache.tagtypes and name in repo._tagscache.tagtypes: |
|
633 | if repo._tagscache.tagtypes and name in repo._tagscache.tagtypes: | |
635 | old = repo.tags().get(name, nullid) |
|
634 | old = repo.tags().get(name, repo.nullid) | |
636 | fp.write(b'%s %s\n' % (hex(old), m)) |
|
635 | fp.write(b'%s %s\n' % (hex(old), m)) | |
637 | fp.write(b'%s %s\n' % (hex(node), m)) |
|
636 | fp.write(b'%s %s\n' % (hex(node), m)) | |
638 | fp.close() |
|
637 | fp.close() | |
@@ -762,8 +761,8 b' class hgtagsfnodescache(object):' | |||||
762 | If an .hgtags does not exist at the specified revision, nullid is |
|
761 | If an .hgtags does not exist at the specified revision, nullid is | |
763 | returned. |
|
762 | returned. | |
764 | """ |
|
763 | """ | |
765 | if node == nullid: |
|
764 | if node == self._repo.nullid: | |
766 |
return n |
|
765 | return node | |
767 |
|
766 | |||
768 | ctx = self._repo[node] |
|
767 | ctx = self._repo[node] | |
769 | rev = ctx.rev() |
|
768 | rev = ctx.rev() | |
@@ -826,7 +825,7 b' class hgtagsfnodescache(object):' | |||||
826 | fnode = ctx.filenode(b'.hgtags') |
|
825 | fnode = ctx.filenode(b'.hgtags') | |
827 | except error.LookupError: |
|
826 | except error.LookupError: | |
828 | # No .hgtags file on this revision. |
|
827 | # No .hgtags file on this revision. | |
829 | fnode = nullid |
|
828 | fnode = self._repo.nullid | |
830 | return fnode |
|
829 | return fnode | |
831 |
|
830 | |||
832 | def setfnode(self, node, fnode): |
|
831 | def setfnode(self, node, fnode): |
@@ -10,10 +10,7 b' from __future__ import absolute_import' | |||||
10 | import re |
|
10 | import re | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 |
from .node import |
|
13 | from .node import bin | |
14 | bin, |
|
|||
15 | wdirid, |
|
|||
16 | ) |
|
|||
17 | from . import ( |
|
14 | from . import ( | |
18 | color, |
|
15 | color, | |
19 | dagop, |
|
16 | dagop, | |
@@ -778,7 +775,7 b' def shortest(context, mapping, args):' | |||||
778 | try: |
|
775 | try: | |
779 | node = scmutil.resolvehexnodeidprefix(repo, hexnode) |
|
776 | node = scmutil.resolvehexnodeidprefix(repo, hexnode) | |
780 | except error.WdirUnsupported: |
|
777 | except error.WdirUnsupported: | |
781 | node = wdirid |
|
778 | node = repo.nodeconstants.wdirid | |
782 | except error.LookupError: |
|
779 | except error.LookupError: | |
783 | return hexnode |
|
780 | return hexnode | |
784 | if not node: |
|
781 | if not node: |
@@ -10,8 +10,6 b' from __future__ import absolute_import' | |||||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 | from .node import ( |
|
11 | from .node import ( | |
12 | hex, |
|
12 | hex, | |
13 | nullid, |
|
|||
14 | wdirid, |
|
|||
15 | wdirrev, |
|
13 | wdirrev, | |
16 | ) |
|
14 | ) | |
17 |
|
15 | |||
@@ -412,7 +410,7 b' def getgraphnode(repo, ctx, cache):' | |||||
412 |
|
410 | |||
413 | def getgraphnodecurrent(repo, ctx, cache): |
|
411 | def getgraphnodecurrent(repo, ctx, cache): | |
414 | wpnodes = repo.dirstate.parents() |
|
412 | wpnodes = repo.dirstate.parents() | |
415 | if wpnodes[1] == nullid: |
|
413 | if wpnodes[1] == repo.nullid: | |
416 | wpnodes = wpnodes[:1] |
|
414 | wpnodes = wpnodes[:1] | |
417 | if ctx.node() in wpnodes: |
|
415 | if ctx.node() in wpnodes: | |
418 | return b'@' |
|
416 | return b'@' | |
@@ -525,11 +523,12 b' def showmanifest(context, mapping):' | |||||
525 | ctx = context.resource(mapping, b'ctx') |
|
523 | ctx = context.resource(mapping, b'ctx') | |
526 | mnode = ctx.manifestnode() |
|
524 | mnode = ctx.manifestnode() | |
527 | if mnode is None: |
|
525 | if mnode is None: | |
528 | mnode = wdirid |
|
526 | mnode = repo.nodeconstants.wdirid | |
529 | mrev = wdirrev |
|
527 | mrev = wdirrev | |
|
528 | mhex = repo.nodeconstants.wdirhex | |||
530 | else: |
|
529 | else: | |
531 | mrev = repo.manifestlog.rev(mnode) |
|
530 | mrev = repo.manifestlog.rev(mnode) | |
532 | mhex = hex(mnode) |
|
531 | mhex = hex(mnode) | |
533 | mapping = context.overlaymap(mapping, {b'rev': mrev, b'node': mhex}) |
|
532 | mapping = context.overlaymap(mapping, {b'rev': mrev, b'node': mhex}) | |
534 | f = context.process(b'manifest', mapping) |
|
533 | f = context.process(b'manifest', mapping) | |
535 | return templateutil.hybriditem( |
|
534 | return templateutil.hybriditem( |
@@ -11,7 +11,6 b' import unittest' | |||||
11 |
|
11 | |||
12 | from ..node import ( |
|
12 | from ..node import ( | |
13 | hex, |
|
13 | hex, | |
14 | nullid, |
|
|||
15 | nullrev, |
|
14 | nullrev, | |
16 | ) |
|
15 | ) | |
17 | from ..pycompat import getattr |
|
16 | from ..pycompat import getattr | |
@@ -51,7 +50,7 b' class ifileindextests(basetestcase):' | |||||
51 | self.assertFalse(f.hasnode(None)) |
|
50 | self.assertFalse(f.hasnode(None)) | |
52 | self.assertFalse(f.hasnode(0)) |
|
51 | self.assertFalse(f.hasnode(0)) | |
53 | self.assertFalse(f.hasnode(nullrev)) |
|
52 | self.assertFalse(f.hasnode(nullrev)) | |
54 | self.assertFalse(f.hasnode(nullid)) |
|
53 | self.assertFalse(f.hasnode(f.nullid)) | |
55 | self.assertFalse(f.hasnode(b'0')) |
|
54 | self.assertFalse(f.hasnode(b'0')) | |
56 | self.assertFalse(f.hasnode(b'a' * 20)) |
|
55 | self.assertFalse(f.hasnode(b'a' * 20)) | |
57 |
|
56 | |||
@@ -64,8 +63,8 b' class ifileindextests(basetestcase):' | |||||
64 |
|
63 | |||
65 | self.assertEqual(list(f.revs(start=20)), []) |
|
64 | self.assertEqual(list(f.revs(start=20)), []) | |
66 |
|
65 | |||
67 | # parents() and parentrevs() work with nullid/nullrev. |
|
66 | # parents() and parentrevs() work with f.nullid/nullrev. | |
68 | self.assertEqual(f.parents(nullid), (nullid, nullid)) |
|
67 | self.assertEqual(f.parents(f.nullid), (f.nullid, f.nullid)) | |
69 | self.assertEqual(f.parentrevs(nullrev), (nullrev, nullrev)) |
|
68 | self.assertEqual(f.parentrevs(nullrev), (nullrev, nullrev)) | |
70 |
|
69 | |||
71 | with self.assertRaises(error.LookupError): |
|
70 | with self.assertRaises(error.LookupError): | |
@@ -78,9 +77,9 b' class ifileindextests(basetestcase):' | |||||
78 | with self.assertRaises(IndexError): |
|
77 | with self.assertRaises(IndexError): | |
79 | f.parentrevs(i) |
|
78 | f.parentrevs(i) | |
80 |
|
79 | |||
81 | # nullid/nullrev lookup always works. |
|
80 | # f.nullid/nullrev lookup always works. | |
82 | self.assertEqual(f.rev(nullid), nullrev) |
|
81 | self.assertEqual(f.rev(f.nullid), nullrev) | |
83 | self.assertEqual(f.node(nullrev), nullid) |
|
82 | self.assertEqual(f.node(nullrev), f.nullid) | |
84 |
|
83 | |||
85 | with self.assertRaises(error.LookupError): |
|
84 | with self.assertRaises(error.LookupError): | |
86 | f.rev(b'\x01' * 20) |
|
85 | f.rev(b'\x01' * 20) | |
@@ -92,16 +91,16 b' class ifileindextests(basetestcase):' | |||||
92 | with self.assertRaises(IndexError): |
|
91 | with self.assertRaises(IndexError): | |
93 | f.node(i) |
|
92 | f.node(i) | |
94 |
|
93 | |||
95 | self.assertEqual(f.lookup(nullid), nullid) |
|
94 | self.assertEqual(f.lookup(f.nullid), f.nullid) | |
96 | self.assertEqual(f.lookup(nullrev), nullid) |
|
95 | self.assertEqual(f.lookup(nullrev), f.nullid) | |
97 | self.assertEqual(f.lookup(hex(nullid)), nullid) |
|
96 | self.assertEqual(f.lookup(hex(f.nullid)), f.nullid) | |
98 | self.assertEqual(f.lookup(b'%d' % nullrev), nullid) |
|
97 | self.assertEqual(f.lookup(b'%d' % nullrev), f.nullid) | |
99 |
|
98 | |||
100 | with self.assertRaises(error.LookupError): |
|
99 | with self.assertRaises(error.LookupError): | |
101 | f.lookup(b'badvalue') |
|
100 | f.lookup(b'badvalue') | |
102 |
|
101 | |||
103 | with self.assertRaises(error.LookupError): |
|
102 | with self.assertRaises(error.LookupError): | |
104 | f.lookup(hex(nullid)[0:12]) |
|
103 | f.lookup(hex(f.nullid)[0:12]) | |
105 |
|
104 | |||
106 | with self.assertRaises(error.LookupError): |
|
105 | with self.assertRaises(error.LookupError): | |
107 | f.lookup(b'-2') |
|
106 | f.lookup(b'-2') | |
@@ -140,19 +139,19 b' class ifileindextests(basetestcase):' | |||||
140 | with self.assertRaises(IndexError): |
|
139 | with self.assertRaises(IndexError): | |
141 | f.iscensored(i) |
|
140 | f.iscensored(i) | |
142 |
|
141 | |||
143 | self.assertEqual(list(f.commonancestorsheads(nullid, nullid)), []) |
|
142 | self.assertEqual(list(f.commonancestorsheads(f.nullid, f.nullid)), []) | |
144 |
|
143 | |||
145 | with self.assertRaises(ValueError): |
|
144 | with self.assertRaises(ValueError): | |
146 | self.assertEqual(list(f.descendants([])), []) |
|
145 | self.assertEqual(list(f.descendants([])), []) | |
147 |
|
146 | |||
148 | self.assertEqual(list(f.descendants([nullrev])), []) |
|
147 | self.assertEqual(list(f.descendants([nullrev])), []) | |
149 |
|
148 | |||
150 | self.assertEqual(f.heads(), [nullid]) |
|
149 | self.assertEqual(f.heads(), [f.nullid]) | |
151 | self.assertEqual(f.heads(nullid), [nullid]) |
|
150 | self.assertEqual(f.heads(f.nullid), [f.nullid]) | |
152 | self.assertEqual(f.heads(None, [nullid]), [nullid]) |
|
151 | self.assertEqual(f.heads(None, [f.nullid]), [f.nullid]) | |
153 | self.assertEqual(f.heads(nullid, [nullid]), [nullid]) |
|
152 | self.assertEqual(f.heads(f.nullid, [f.nullid]), [f.nullid]) | |
154 |
|
153 | |||
155 | self.assertEqual(f.children(nullid), []) |
|
154 | self.assertEqual(f.children(f.nullid), []) | |
156 |
|
155 | |||
157 | with self.assertRaises(error.LookupError): |
|
156 | with self.assertRaises(error.LookupError): | |
158 | f.children(b'\x01' * 20) |
|
157 | f.children(b'\x01' * 20) | |
@@ -160,7 +159,7 b' class ifileindextests(basetestcase):' | |||||
160 | def testsinglerevision(self): |
|
159 | def testsinglerevision(self): | |
161 | f = self._makefilefn() |
|
160 | f = self._makefilefn() | |
162 | with self._maketransactionfn() as tr: |
|
161 | with self._maketransactionfn() as tr: | |
163 | node = f.add(b'initial', None, tr, 0, nullid, nullid) |
|
162 | node = f.add(b'initial', None, tr, 0, f.nullid, f.nullid) | |
164 |
|
163 | |||
165 | self.assertEqual(len(f), 1) |
|
164 | self.assertEqual(len(f), 1) | |
166 | self.assertEqual(list(f), [0]) |
|
165 | self.assertEqual(list(f), [0]) | |
@@ -174,7 +173,7 b' class ifileindextests(basetestcase):' | |||||
174 | self.assertTrue(f.hasnode(node)) |
|
173 | self.assertTrue(f.hasnode(node)) | |
175 | self.assertFalse(f.hasnode(hex(node))) |
|
174 | self.assertFalse(f.hasnode(hex(node))) | |
176 | self.assertFalse(f.hasnode(nullrev)) |
|
175 | self.assertFalse(f.hasnode(nullrev)) | |
177 | self.assertFalse(f.hasnode(nullid)) |
|
176 | self.assertFalse(f.hasnode(f.nullid)) | |
178 | self.assertFalse(f.hasnode(node[0:12])) |
|
177 | self.assertFalse(f.hasnode(node[0:12])) | |
179 | self.assertFalse(f.hasnode(hex(node)[0:20])) |
|
178 | self.assertFalse(f.hasnode(hex(node)[0:20])) | |
180 |
|
179 | |||
@@ -188,7 +187,7 b' class ifileindextests(basetestcase):' | |||||
188 | self.assertEqual(list(f.revs(1, 0)), [1, 0]) |
|
187 | self.assertEqual(list(f.revs(1, 0)), [1, 0]) | |
189 | self.assertEqual(list(f.revs(2, 0)), [2, 1, 0]) |
|
188 | self.assertEqual(list(f.revs(2, 0)), [2, 1, 0]) | |
190 |
|
189 | |||
191 | self.assertEqual(f.parents(node), (nullid, nullid)) |
|
190 | self.assertEqual(f.parents(node), (f.nullid, f.nullid)) | |
192 | self.assertEqual(f.parentrevs(0), (nullrev, nullrev)) |
|
191 | self.assertEqual(f.parentrevs(0), (nullrev, nullrev)) | |
193 |
|
192 | |||
194 | with self.assertRaises(error.LookupError): |
|
193 | with self.assertRaises(error.LookupError): | |
@@ -209,7 +208,7 b' class ifileindextests(basetestcase):' | |||||
209 |
|
208 | |||
210 | self.assertEqual(f.lookup(node), node) |
|
209 | self.assertEqual(f.lookup(node), node) | |
211 | self.assertEqual(f.lookup(0), node) |
|
210 | self.assertEqual(f.lookup(0), node) | |
212 | self.assertEqual(f.lookup(-1), nullid) |
|
211 | self.assertEqual(f.lookup(-1), f.nullid) | |
213 | self.assertEqual(f.lookup(b'0'), node) |
|
212 | self.assertEqual(f.lookup(b'0'), node) | |
214 | self.assertEqual(f.lookup(hex(node)), node) |
|
213 | self.assertEqual(f.lookup(hex(node)), node) | |
215 |
|
214 | |||
@@ -256,9 +255,9 b' class ifileindextests(basetestcase):' | |||||
256 |
|
255 | |||
257 | f = self._makefilefn() |
|
256 | f = self._makefilefn() | |
258 | with self._maketransactionfn() as tr: |
|
257 | with self._maketransactionfn() as tr: | |
259 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
258 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
260 | node1 = f.add(fulltext1, None, tr, 1, node0, nullid) |
|
259 | node1 = f.add(fulltext1, None, tr, 1, node0, f.nullid) | |
261 | node2 = f.add(fulltext2, None, tr, 3, node1, nullid) |
|
260 | node2 = f.add(fulltext2, None, tr, 3, node1, f.nullid) | |
262 |
|
261 | |||
263 | self.assertEqual(len(f), 3) |
|
262 | self.assertEqual(len(f), 3) | |
264 | self.assertEqual(list(f), [0, 1, 2]) |
|
263 | self.assertEqual(list(f), [0, 1, 2]) | |
@@ -284,9 +283,9 b' class ifileindextests(basetestcase):' | |||||
284 | # TODO this is wrong |
|
283 | # TODO this is wrong | |
285 | self.assertEqual(list(f.revs(3, 2)), [3, 2]) |
|
284 | self.assertEqual(list(f.revs(3, 2)), [3, 2]) | |
286 |
|
285 | |||
287 | self.assertEqual(f.parents(node0), (nullid, nullid)) |
|
286 | self.assertEqual(f.parents(node0), (f.nullid, f.nullid)) | |
288 | self.assertEqual(f.parents(node1), (node0, nullid)) |
|
287 | self.assertEqual(f.parents(node1), (node0, f.nullid)) | |
289 | self.assertEqual(f.parents(node2), (node1, nullid)) |
|
288 | self.assertEqual(f.parents(node2), (node1, f.nullid)) | |
290 |
|
289 | |||
291 | self.assertEqual(f.parentrevs(0), (nullrev, nullrev)) |
|
290 | self.assertEqual(f.parentrevs(0), (nullrev, nullrev)) | |
292 | self.assertEqual(f.parentrevs(1), (0, nullrev)) |
|
291 | self.assertEqual(f.parentrevs(1), (0, nullrev)) | |
@@ -330,7 +329,7 b' class ifileindextests(basetestcase):' | |||||
330 | with self.assertRaises(IndexError): |
|
329 | with self.assertRaises(IndexError): | |
331 | f.iscensored(3) |
|
330 | f.iscensored(3) | |
332 |
|
331 | |||
333 | self.assertEqual(f.commonancestorsheads(node1, nullid), []) |
|
332 | self.assertEqual(f.commonancestorsheads(node1, f.nullid), []) | |
334 | self.assertEqual(f.commonancestorsheads(node1, node0), [node0]) |
|
333 | self.assertEqual(f.commonancestorsheads(node1, node0), [node0]) | |
335 | self.assertEqual(f.commonancestorsheads(node1, node1), [node1]) |
|
334 | self.assertEqual(f.commonancestorsheads(node1, node1), [node1]) | |
336 | self.assertEqual(f.commonancestorsheads(node0, node1), [node0]) |
|
335 | self.assertEqual(f.commonancestorsheads(node0, node1), [node0]) | |
@@ -364,12 +363,12 b' class ifileindextests(basetestcase):' | |||||
364 | f = self._makefilefn() |
|
363 | f = self._makefilefn() | |
365 |
|
364 | |||
366 | with self._maketransactionfn() as tr: |
|
365 | with self._maketransactionfn() as tr: | |
367 | node0 = f.add(b'0', None, tr, 0, nullid, nullid) |
|
366 | node0 = f.add(b'0', None, tr, 0, f.nullid, f.nullid) | |
368 | node1 = f.add(b'1', None, tr, 1, node0, nullid) |
|
367 | node1 = f.add(b'1', None, tr, 1, node0, f.nullid) | |
369 | node2 = f.add(b'2', None, tr, 2, node1, nullid) |
|
368 | node2 = f.add(b'2', None, tr, 2, node1, f.nullid) | |
370 | node3 = f.add(b'3', None, tr, 3, node0, nullid) |
|
369 | node3 = f.add(b'3', None, tr, 3, node0, f.nullid) | |
371 | node4 = f.add(b'4', None, tr, 4, node3, nullid) |
|
370 | node4 = f.add(b'4', None, tr, 4, node3, f.nullid) | |
372 | node5 = f.add(b'5', None, tr, 5, node0, nullid) |
|
371 | node5 = f.add(b'5', None, tr, 5, node0, f.nullid) | |
373 |
|
372 | |||
374 | self.assertEqual(len(f), 6) |
|
373 | self.assertEqual(len(f), 6) | |
375 |
|
374 | |||
@@ -427,24 +426,24 b' class ifiledatatests(basetestcase):' | |||||
427 | with self.assertRaises(IndexError): |
|
426 | with self.assertRaises(IndexError): | |
428 | f.size(i) |
|
427 | f.size(i) | |
429 |
|
428 | |||
430 | self.assertEqual(f.revision(nullid), b'') |
|
429 | self.assertEqual(f.revision(f.nullid), b'') | |
431 | self.assertEqual(f.rawdata(nullid), b'') |
|
430 | self.assertEqual(f.rawdata(f.nullid), b'') | |
432 |
|
431 | |||
433 | with self.assertRaises(error.LookupError): |
|
432 | with self.assertRaises(error.LookupError): | |
434 | f.revision(b'\x01' * 20) |
|
433 | f.revision(b'\x01' * 20) | |
435 |
|
434 | |||
436 | self.assertEqual(f.read(nullid), b'') |
|
435 | self.assertEqual(f.read(f.nullid), b'') | |
437 |
|
436 | |||
438 | with self.assertRaises(error.LookupError): |
|
437 | with self.assertRaises(error.LookupError): | |
439 | f.read(b'\x01' * 20) |
|
438 | f.read(b'\x01' * 20) | |
440 |
|
439 | |||
441 | self.assertFalse(f.renamed(nullid)) |
|
440 | self.assertFalse(f.renamed(f.nullid)) | |
442 |
|
441 | |||
443 | with self.assertRaises(error.LookupError): |
|
442 | with self.assertRaises(error.LookupError): | |
444 | f.read(b'\x01' * 20) |
|
443 | f.read(b'\x01' * 20) | |
445 |
|
444 | |||
446 | self.assertTrue(f.cmp(nullid, b'')) |
|
445 | self.assertTrue(f.cmp(f.nullid, b'')) | |
447 | self.assertTrue(f.cmp(nullid, b'foo')) |
|
446 | self.assertTrue(f.cmp(f.nullid, b'foo')) | |
448 |
|
447 | |||
449 | with self.assertRaises(error.LookupError): |
|
448 | with self.assertRaises(error.LookupError): | |
450 | f.cmp(b'\x01' * 20, b'irrelevant') |
|
449 | f.cmp(b'\x01' * 20, b'irrelevant') | |
@@ -455,7 +454,7 b' class ifiledatatests(basetestcase):' | |||||
455 | next(gen) |
|
454 | next(gen) | |
456 |
|
455 | |||
457 | # Emitting null node yields nothing. |
|
456 | # Emitting null node yields nothing. | |
458 | gen = f.emitrevisions([nullid]) |
|
457 | gen = f.emitrevisions([f.nullid]) | |
459 | with self.assertRaises(StopIteration): |
|
458 | with self.assertRaises(StopIteration): | |
460 | next(gen) |
|
459 | next(gen) | |
461 |
|
460 | |||
@@ -468,7 +467,7 b' class ifiledatatests(basetestcase):' | |||||
468 |
|
467 | |||
469 | f = self._makefilefn() |
|
468 | f = self._makefilefn() | |
470 | with self._maketransactionfn() as tr: |
|
469 | with self._maketransactionfn() as tr: | |
471 | node = f.add(fulltext, None, tr, 0, nullid, nullid) |
|
470 | node = f.add(fulltext, None, tr, 0, f.nullid, f.nullid) | |
472 |
|
471 | |||
473 | self.assertEqual(f.storageinfo(), {}) |
|
472 | self.assertEqual(f.storageinfo(), {}) | |
474 | self.assertEqual( |
|
473 | self.assertEqual( | |
@@ -496,10 +495,10 b' class ifiledatatests(basetestcase):' | |||||
496 | rev = next(gen) |
|
495 | rev = next(gen) | |
497 |
|
496 | |||
498 | self.assertEqual(rev.node, node) |
|
497 | self.assertEqual(rev.node, node) | |
499 | self.assertEqual(rev.p1node, nullid) |
|
498 | self.assertEqual(rev.p1node, f.nullid) | |
500 | self.assertEqual(rev.p2node, nullid) |
|
499 | self.assertEqual(rev.p2node, f.nullid) | |
501 | self.assertIsNone(rev.linknode) |
|
500 | self.assertIsNone(rev.linknode) | |
502 | self.assertEqual(rev.basenode, nullid) |
|
501 | self.assertEqual(rev.basenode, f.nullid) | |
503 | self.assertIsNone(rev.baserevisionsize) |
|
502 | self.assertIsNone(rev.baserevisionsize) | |
504 | self.assertIsNone(rev.revision) |
|
503 | self.assertIsNone(rev.revision) | |
505 | self.assertIsNone(rev.delta) |
|
504 | self.assertIsNone(rev.delta) | |
@@ -512,10 +511,10 b' class ifiledatatests(basetestcase):' | |||||
512 | rev = next(gen) |
|
511 | rev = next(gen) | |
513 |
|
512 | |||
514 | self.assertEqual(rev.node, node) |
|
513 | self.assertEqual(rev.node, node) | |
515 | self.assertEqual(rev.p1node, nullid) |
|
514 | self.assertEqual(rev.p1node, f.nullid) | |
516 | self.assertEqual(rev.p2node, nullid) |
|
515 | self.assertEqual(rev.p2node, f.nullid) | |
517 | self.assertIsNone(rev.linknode) |
|
516 | self.assertIsNone(rev.linknode) | |
518 | self.assertEqual(rev.basenode, nullid) |
|
517 | self.assertEqual(rev.basenode, f.nullid) | |
519 | self.assertIsNone(rev.baserevisionsize) |
|
518 | self.assertIsNone(rev.baserevisionsize) | |
520 | self.assertEqual(rev.revision, fulltext) |
|
519 | self.assertEqual(rev.revision, fulltext) | |
521 | self.assertIsNone(rev.delta) |
|
520 | self.assertIsNone(rev.delta) | |
@@ -534,9 +533,9 b' class ifiledatatests(basetestcase):' | |||||
534 |
|
533 | |||
535 | f = self._makefilefn() |
|
534 | f = self._makefilefn() | |
536 | with self._maketransactionfn() as tr: |
|
535 | with self._maketransactionfn() as tr: | |
537 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
536 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
538 | node1 = f.add(fulltext1, None, tr, 1, node0, nullid) |
|
537 | node1 = f.add(fulltext1, None, tr, 1, node0, f.nullid) | |
539 | node2 = f.add(fulltext2, None, tr, 3, node1, nullid) |
|
538 | node2 = f.add(fulltext2, None, tr, 3, node1, f.nullid) | |
540 |
|
539 | |||
541 | self.assertEqual(f.storageinfo(), {}) |
|
540 | self.assertEqual(f.storageinfo(), {}) | |
542 | self.assertEqual( |
|
541 | self.assertEqual( | |
@@ -596,10 +595,10 b' class ifiledatatests(basetestcase):' | |||||
596 | rev = next(gen) |
|
595 | rev = next(gen) | |
597 |
|
596 | |||
598 | self.assertEqual(rev.node, node0) |
|
597 | self.assertEqual(rev.node, node0) | |
599 | self.assertEqual(rev.p1node, nullid) |
|
598 | self.assertEqual(rev.p1node, f.nullid) | |
600 | self.assertEqual(rev.p2node, nullid) |
|
599 | self.assertEqual(rev.p2node, f.nullid) | |
601 | self.assertIsNone(rev.linknode) |
|
600 | self.assertIsNone(rev.linknode) | |
602 | self.assertEqual(rev.basenode, nullid) |
|
601 | self.assertEqual(rev.basenode, f.nullid) | |
603 | self.assertIsNone(rev.baserevisionsize) |
|
602 | self.assertIsNone(rev.baserevisionsize) | |
604 | self.assertEqual(rev.revision, fulltext0) |
|
603 | self.assertEqual(rev.revision, fulltext0) | |
605 | self.assertIsNone(rev.delta) |
|
604 | self.assertIsNone(rev.delta) | |
@@ -608,7 +607,7 b' class ifiledatatests(basetestcase):' | |||||
608 |
|
607 | |||
609 | self.assertEqual(rev.node, node1) |
|
608 | self.assertEqual(rev.node, node1) | |
610 | self.assertEqual(rev.p1node, node0) |
|
609 | self.assertEqual(rev.p1node, node0) | |
611 | self.assertEqual(rev.p2node, nullid) |
|
610 | self.assertEqual(rev.p2node, f.nullid) | |
612 | self.assertIsNone(rev.linknode) |
|
611 | self.assertIsNone(rev.linknode) | |
613 | self.assertEqual(rev.basenode, node0) |
|
612 | self.assertEqual(rev.basenode, node0) | |
614 | self.assertIsNone(rev.baserevisionsize) |
|
613 | self.assertIsNone(rev.baserevisionsize) | |
@@ -622,7 +621,7 b' class ifiledatatests(basetestcase):' | |||||
622 |
|
621 | |||
623 | self.assertEqual(rev.node, node2) |
|
622 | self.assertEqual(rev.node, node2) | |
624 | self.assertEqual(rev.p1node, node1) |
|
623 | self.assertEqual(rev.p1node, node1) | |
625 | self.assertEqual(rev.p2node, nullid) |
|
624 | self.assertEqual(rev.p2node, f.nullid) | |
626 | self.assertIsNone(rev.linknode) |
|
625 | self.assertIsNone(rev.linknode) | |
627 | self.assertEqual(rev.basenode, node1) |
|
626 | self.assertEqual(rev.basenode, node1) | |
628 | self.assertIsNone(rev.baserevisionsize) |
|
627 | self.assertIsNone(rev.baserevisionsize) | |
@@ -641,10 +640,10 b' class ifiledatatests(basetestcase):' | |||||
641 | rev = next(gen) |
|
640 | rev = next(gen) | |
642 |
|
641 | |||
643 | self.assertEqual(rev.node, node0) |
|
642 | self.assertEqual(rev.node, node0) | |
644 | self.assertEqual(rev.p1node, nullid) |
|
643 | self.assertEqual(rev.p1node, f.nullid) | |
645 | self.assertEqual(rev.p2node, nullid) |
|
644 | self.assertEqual(rev.p2node, f.nullid) | |
646 | self.assertIsNone(rev.linknode) |
|
645 | self.assertIsNone(rev.linknode) | |
647 | self.assertEqual(rev.basenode, nullid) |
|
646 | self.assertEqual(rev.basenode, f.nullid) | |
648 | self.assertIsNone(rev.baserevisionsize) |
|
647 | self.assertIsNone(rev.baserevisionsize) | |
649 | self.assertEqual(rev.revision, fulltext0) |
|
648 | self.assertEqual(rev.revision, fulltext0) | |
650 | self.assertIsNone(rev.delta) |
|
649 | self.assertIsNone(rev.delta) | |
@@ -653,7 +652,7 b' class ifiledatatests(basetestcase):' | |||||
653 |
|
652 | |||
654 | self.assertEqual(rev.node, node1) |
|
653 | self.assertEqual(rev.node, node1) | |
655 | self.assertEqual(rev.p1node, node0) |
|
654 | self.assertEqual(rev.p1node, node0) | |
656 | self.assertEqual(rev.p2node, nullid) |
|
655 | self.assertEqual(rev.p2node, f.nullid) | |
657 | self.assertIsNone(rev.linknode) |
|
656 | self.assertIsNone(rev.linknode) | |
658 | self.assertEqual(rev.basenode, node0) |
|
657 | self.assertEqual(rev.basenode, node0) | |
659 | self.assertIsNone(rev.baserevisionsize) |
|
658 | self.assertIsNone(rev.baserevisionsize) | |
@@ -667,7 +666,7 b' class ifiledatatests(basetestcase):' | |||||
667 |
|
666 | |||
668 | self.assertEqual(rev.node, node2) |
|
667 | self.assertEqual(rev.node, node2) | |
669 | self.assertEqual(rev.p1node, node1) |
|
668 | self.assertEqual(rev.p1node, node1) | |
670 | self.assertEqual(rev.p2node, nullid) |
|
669 | self.assertEqual(rev.p2node, f.nullid) | |
671 | self.assertIsNone(rev.linknode) |
|
670 | self.assertIsNone(rev.linknode) | |
672 | self.assertEqual(rev.basenode, node1) |
|
671 | self.assertEqual(rev.basenode, node1) | |
673 | self.assertIsNone(rev.baserevisionsize) |
|
672 | self.assertIsNone(rev.baserevisionsize) | |
@@ -700,16 +699,16 b' class ifiledatatests(basetestcase):' | |||||
700 | rev = next(gen) |
|
699 | rev = next(gen) | |
701 | self.assertEqual(rev.node, node2) |
|
700 | self.assertEqual(rev.node, node2) | |
702 | self.assertEqual(rev.p1node, node1) |
|
701 | self.assertEqual(rev.p1node, node1) | |
703 | self.assertEqual(rev.p2node, nullid) |
|
702 | self.assertEqual(rev.p2node, f.nullid) | |
704 | self.assertEqual(rev.basenode, nullid) |
|
703 | self.assertEqual(rev.basenode, f.nullid) | |
705 | self.assertIsNone(rev.baserevisionsize) |
|
704 | self.assertIsNone(rev.baserevisionsize) | |
706 | self.assertEqual(rev.revision, fulltext2) |
|
705 | self.assertEqual(rev.revision, fulltext2) | |
707 | self.assertIsNone(rev.delta) |
|
706 | self.assertIsNone(rev.delta) | |
708 |
|
707 | |||
709 | rev = next(gen) |
|
708 | rev = next(gen) | |
710 | self.assertEqual(rev.node, node0) |
|
709 | self.assertEqual(rev.node, node0) | |
711 | self.assertEqual(rev.p1node, nullid) |
|
710 | self.assertEqual(rev.p1node, f.nullid) | |
712 | self.assertEqual(rev.p2node, nullid) |
|
711 | self.assertEqual(rev.p2node, f.nullid) | |
713 | # Delta behavior is storage dependent, so we can't easily test it. |
|
712 | # Delta behavior is storage dependent, so we can't easily test it. | |
714 |
|
713 | |||
715 | with self.assertRaises(StopIteration): |
|
714 | with self.assertRaises(StopIteration): | |
@@ -722,8 +721,8 b' class ifiledatatests(basetestcase):' | |||||
722 | rev = next(gen) |
|
721 | rev = next(gen) | |
723 | self.assertEqual(rev.node, node1) |
|
722 | self.assertEqual(rev.node, node1) | |
724 | self.assertEqual(rev.p1node, node0) |
|
723 | self.assertEqual(rev.p1node, node0) | |
725 | self.assertEqual(rev.p2node, nullid) |
|
724 | self.assertEqual(rev.p2node, f.nullid) | |
726 | self.assertEqual(rev.basenode, nullid) |
|
725 | self.assertEqual(rev.basenode, f.nullid) | |
727 | self.assertIsNone(rev.baserevisionsize) |
|
726 | self.assertIsNone(rev.baserevisionsize) | |
728 | self.assertEqual(rev.revision, fulltext1) |
|
727 | self.assertEqual(rev.revision, fulltext1) | |
729 | self.assertIsNone(rev.delta) |
|
728 | self.assertIsNone(rev.delta) | |
@@ -731,7 +730,7 b' class ifiledatatests(basetestcase):' | |||||
731 | rev = next(gen) |
|
730 | rev = next(gen) | |
732 | self.assertEqual(rev.node, node2) |
|
731 | self.assertEqual(rev.node, node2) | |
733 | self.assertEqual(rev.p1node, node1) |
|
732 | self.assertEqual(rev.p1node, node1) | |
734 | self.assertEqual(rev.p2node, nullid) |
|
733 | self.assertEqual(rev.p2node, f.nullid) | |
735 | self.assertEqual(rev.basenode, node1) |
|
734 | self.assertEqual(rev.basenode, node1) | |
736 | self.assertIsNone(rev.baserevisionsize) |
|
735 | self.assertIsNone(rev.baserevisionsize) | |
737 | self.assertIsNone(rev.revision) |
|
736 | self.assertIsNone(rev.revision) | |
@@ -751,7 +750,7 b' class ifiledatatests(basetestcase):' | |||||
751 | rev = next(gen) |
|
750 | rev = next(gen) | |
752 | self.assertEqual(rev.node, node1) |
|
751 | self.assertEqual(rev.node, node1) | |
753 | self.assertEqual(rev.p1node, node0) |
|
752 | self.assertEqual(rev.p1node, node0) | |
754 | self.assertEqual(rev.p2node, nullid) |
|
753 | self.assertEqual(rev.p2node, f.nullid) | |
755 | self.assertEqual(rev.basenode, node0) |
|
754 | self.assertEqual(rev.basenode, node0) | |
756 | self.assertIsNone(rev.baserevisionsize) |
|
755 | self.assertIsNone(rev.baserevisionsize) | |
757 | self.assertIsNone(rev.revision) |
|
756 | self.assertIsNone(rev.revision) | |
@@ -768,9 +767,9 b' class ifiledatatests(basetestcase):' | |||||
768 |
|
767 | |||
769 | rev = next(gen) |
|
768 | rev = next(gen) | |
770 | self.assertEqual(rev.node, node0) |
|
769 | self.assertEqual(rev.node, node0) | |
771 | self.assertEqual(rev.p1node, nullid) |
|
770 | self.assertEqual(rev.p1node, f.nullid) | |
772 | self.assertEqual(rev.p2node, nullid) |
|
771 | self.assertEqual(rev.p2node, f.nullid) | |
773 | self.assertEqual(rev.basenode, nullid) |
|
772 | self.assertEqual(rev.basenode, f.nullid) | |
774 | self.assertIsNone(rev.baserevisionsize) |
|
773 | self.assertIsNone(rev.baserevisionsize) | |
775 | self.assertIsNone(rev.revision) |
|
774 | self.assertIsNone(rev.revision) | |
776 | self.assertEqual( |
|
775 | self.assertEqual( | |
@@ -789,9 +788,9 b' class ifiledatatests(basetestcase):' | |||||
789 |
|
788 | |||
790 | rev = next(gen) |
|
789 | rev = next(gen) | |
791 | self.assertEqual(rev.node, node0) |
|
790 | self.assertEqual(rev.node, node0) | |
792 | self.assertEqual(rev.p1node, nullid) |
|
791 | self.assertEqual(rev.p1node, f.nullid) | |
793 | self.assertEqual(rev.p2node, nullid) |
|
792 | self.assertEqual(rev.p2node, f.nullid) | |
794 | self.assertEqual(rev.basenode, nullid) |
|
793 | self.assertEqual(rev.basenode, f.nullid) | |
795 | self.assertIsNone(rev.baserevisionsize) |
|
794 | self.assertIsNone(rev.baserevisionsize) | |
796 | self.assertIsNone(rev.revision) |
|
795 | self.assertIsNone(rev.revision) | |
797 | self.assertEqual( |
|
796 | self.assertEqual( | |
@@ -802,7 +801,7 b' class ifiledatatests(basetestcase):' | |||||
802 | rev = next(gen) |
|
801 | rev = next(gen) | |
803 | self.assertEqual(rev.node, node2) |
|
802 | self.assertEqual(rev.node, node2) | |
804 | self.assertEqual(rev.p1node, node1) |
|
803 | self.assertEqual(rev.p1node, node1) | |
805 | self.assertEqual(rev.p2node, nullid) |
|
804 | self.assertEqual(rev.p2node, f.nullid) | |
806 | self.assertEqual(rev.basenode, node0) |
|
805 | self.assertEqual(rev.basenode, node0) | |
807 |
|
806 | |||
808 | with self.assertRaises(StopIteration): |
|
807 | with self.assertRaises(StopIteration): | |
@@ -841,11 +840,11 b' class ifiledatatests(basetestcase):' | |||||
841 |
|
840 | |||
842 | f = self._makefilefn() |
|
841 | f = self._makefilefn() | |
843 | with self._maketransactionfn() as tr: |
|
842 | with self._maketransactionfn() as tr: | |
844 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
843 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
845 | node1 = f.add(fulltext1, meta1, tr, 1, node0, nullid) |
|
844 | node1 = f.add(fulltext1, meta1, tr, 1, node0, f.nullid) | |
846 | node2 = f.add(fulltext2, meta2, tr, 2, nullid, nullid) |
|
845 | node2 = f.add(fulltext2, meta2, tr, 2, f.nullid, f.nullid) | |
847 |
|
846 | |||
848 | # Metadata header isn't recognized when parent isn't nullid. |
|
847 | # Metadata header isn't recognized when parent isn't f.nullid. | |
849 | self.assertEqual(f.size(1), len(stored1)) |
|
848 | self.assertEqual(f.size(1), len(stored1)) | |
850 | self.assertEqual(f.size(2), len(fulltext2)) |
|
849 | self.assertEqual(f.size(2), len(fulltext2)) | |
851 |
|
850 | |||
@@ -886,8 +885,8 b' class ifiledatatests(basetestcase):' | |||||
886 |
|
885 | |||
887 | f = self._makefilefn() |
|
886 | f = self._makefilefn() | |
888 | with self._maketransactionfn() as tr: |
|
887 | with self._maketransactionfn() as tr: | |
889 | node0 = f.add(fulltext0, {}, tr, 0, nullid, nullid) |
|
888 | node0 = f.add(fulltext0, {}, tr, 0, f.nullid, f.nullid) | |
890 | node1 = f.add(fulltext1, meta1, tr, 1, nullid, nullid) |
|
889 | node1 = f.add(fulltext1, meta1, tr, 1, f.nullid, f.nullid) | |
891 |
|
890 | |||
892 | # TODO this is buggy. |
|
891 | # TODO this is buggy. | |
893 | self.assertEqual(f.size(0), len(fulltext0) + 4) |
|
892 | self.assertEqual(f.size(0), len(fulltext0) + 4) | |
@@ -916,15 +915,15 b' class ifiledatatests(basetestcase):' | |||||
916 | fulltext1 = fulltext0 + b'bar\n' |
|
915 | fulltext1 = fulltext0 + b'bar\n' | |
917 |
|
916 | |||
918 | with self._maketransactionfn() as tr: |
|
917 | with self._maketransactionfn() as tr: | |
919 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
918 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
920 | node1 = b'\xaa' * 20 |
|
919 | node1 = b'\xaa' * 20 | |
921 |
|
920 | |||
922 | self._addrawrevisionfn( |
|
921 | self._addrawrevisionfn( | |
923 | f, tr, node1, node0, nullid, 1, rawtext=fulltext1 |
|
922 | f, tr, node1, node0, f.nullid, 1, rawtext=fulltext1 | |
924 | ) |
|
923 | ) | |
925 |
|
924 | |||
926 | self.assertEqual(len(f), 2) |
|
925 | self.assertEqual(len(f), 2) | |
927 | self.assertEqual(f.parents(node1), (node0, nullid)) |
|
926 | self.assertEqual(f.parents(node1), (node0, f.nullid)) | |
928 |
|
927 | |||
929 | # revision() raises since it performs hash verification. |
|
928 | # revision() raises since it performs hash verification. | |
930 | with self.assertRaises(error.StorageError): |
|
929 | with self.assertRaises(error.StorageError): | |
@@ -951,11 +950,11 b' class ifiledatatests(basetestcase):' | |||||
951 | fulltext1 = fulltext0 + b'bar\n' |
|
950 | fulltext1 = fulltext0 + b'bar\n' | |
952 |
|
951 | |||
953 | with self._maketransactionfn() as tr: |
|
952 | with self._maketransactionfn() as tr: | |
954 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
953 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
955 | node1 = b'\xaa' * 20 |
|
954 | node1 = b'\xaa' * 20 | |
956 |
|
955 | |||
957 | self._addrawrevisionfn( |
|
956 | self._addrawrevisionfn( | |
958 | f, tr, node1, node0, nullid, 1, rawtext=fulltext1 |
|
957 | f, tr, node1, node0, f.nullid, 1, rawtext=fulltext1 | |
959 | ) |
|
958 | ) | |
960 |
|
959 | |||
961 | with self.assertRaises(error.StorageError): |
|
960 | with self.assertRaises(error.StorageError): | |
@@ -973,11 +972,11 b' class ifiledatatests(basetestcase):' | |||||
973 | fulltext1 = fulltext0 + b'bar\n' |
|
972 | fulltext1 = fulltext0 + b'bar\n' | |
974 |
|
973 | |||
975 | with self._maketransactionfn() as tr: |
|
974 | with self._maketransactionfn() as tr: | |
976 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
975 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
977 | node1 = b'\xaa' * 20 |
|
976 | node1 = b'\xaa' * 20 | |
978 |
|
977 | |||
979 | self._addrawrevisionfn( |
|
978 | self._addrawrevisionfn( | |
980 | f, tr, node1, node0, nullid, 1, rawtext=fulltext1 |
|
979 | f, tr, node1, node0, f.nullid, 1, rawtext=fulltext1 | |
981 | ) |
|
980 | ) | |
982 |
|
981 | |||
983 | with self.assertRaises(error.StorageError): |
|
982 | with self.assertRaises(error.StorageError): | |
@@ -994,22 +993,22 b' class ifiledatatests(basetestcase):' | |||||
994 | fulltext2 = fulltext1 + b'baz\n' |
|
993 | fulltext2 = fulltext1 + b'baz\n' | |
995 |
|
994 | |||
996 | with self._maketransactionfn() as tr: |
|
995 | with self._maketransactionfn() as tr: | |
997 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
996 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
998 | node1 = b'\xaa' * 20 |
|
997 | node1 = b'\xaa' * 20 | |
999 |
|
998 | |||
1000 | self._addrawrevisionfn( |
|
999 | self._addrawrevisionfn( | |
1001 | f, tr, node1, node0, nullid, 1, rawtext=fulltext1 |
|
1000 | f, tr, node1, node0, f.nullid, 1, rawtext=fulltext1 | |
1002 | ) |
|
1001 | ) | |
1003 |
|
1002 | |||
1004 | with self.assertRaises(error.StorageError): |
|
1003 | with self.assertRaises(error.StorageError): | |
1005 | f.read(node1) |
|
1004 | f.read(node1) | |
1006 |
|
1005 | |||
1007 | node2 = storageutil.hashrevisionsha1(fulltext2, node1, nullid) |
|
1006 | node2 = storageutil.hashrevisionsha1(fulltext2, node1, f.nullid) | |
1008 |
|
1007 | |||
1009 | with self._maketransactionfn() as tr: |
|
1008 | with self._maketransactionfn() as tr: | |
1010 | delta = mdiff.textdiff(fulltext1, fulltext2) |
|
1009 | delta = mdiff.textdiff(fulltext1, fulltext2) | |
1011 | self._addrawrevisionfn( |
|
1010 | self._addrawrevisionfn( | |
1012 | f, tr, node2, node1, nullid, 2, delta=(1, delta) |
|
1011 | f, tr, node2, node1, f.nullid, 2, delta=(1, delta) | |
1013 | ) |
|
1012 | ) | |
1014 |
|
1013 | |||
1015 | self.assertEqual(len(f), 3) |
|
1014 | self.assertEqual(len(f), 3) | |
@@ -1029,13 +1028,13 b' class ifiledatatests(basetestcase):' | |||||
1029 | ) |
|
1028 | ) | |
1030 |
|
1029 | |||
1031 | with self._maketransactionfn() as tr: |
|
1030 | with self._maketransactionfn() as tr: | |
1032 | node0 = f.add(b'foo', None, tr, 0, nullid, nullid) |
|
1031 | node0 = f.add(b'foo', None, tr, 0, f.nullid, f.nullid) | |
1033 |
|
1032 | |||
1034 | # The node value doesn't matter since we can't verify it. |
|
1033 | # The node value doesn't matter since we can't verify it. | |
1035 | node1 = b'\xbb' * 20 |
|
1034 | node1 = b'\xbb' * 20 | |
1036 |
|
1035 | |||
1037 | self._addrawrevisionfn( |
|
1036 | self._addrawrevisionfn( | |
1038 | f, tr, node1, node0, nullid, 1, stored1, censored=True |
|
1037 | f, tr, node1, node0, f.nullid, 1, stored1, censored=True | |
1039 | ) |
|
1038 | ) | |
1040 |
|
1039 | |||
1041 | self.assertTrue(f.iscensored(1)) |
|
1040 | self.assertTrue(f.iscensored(1)) | |
@@ -1063,13 +1062,13 b' class ifiledatatests(basetestcase):' | |||||
1063 | ) |
|
1062 | ) | |
1064 |
|
1063 | |||
1065 | with self._maketransactionfn() as tr: |
|
1064 | with self._maketransactionfn() as tr: | |
1066 | node0 = f.add(b'foo', None, tr, 0, nullid, nullid) |
|
1065 | node0 = f.add(b'foo', None, tr, 0, f.nullid, f.nullid) | |
1067 |
|
1066 | |||
1068 | # The node value doesn't matter since we can't verify it. |
|
1067 | # The node value doesn't matter since we can't verify it. | |
1069 | node1 = b'\xbb' * 20 |
|
1068 | node1 = b'\xbb' * 20 | |
1070 |
|
1069 | |||
1071 | self._addrawrevisionfn( |
|
1070 | self._addrawrevisionfn( | |
1072 | f, tr, node1, node0, nullid, 1, stored1, censored=True |
|
1071 | f, tr, node1, node0, f.nullid, 1, stored1, censored=True | |
1073 | ) |
|
1072 | ) | |
1074 |
|
1073 | |||
1075 | with self.assertRaises(error.CensoredNodeError): |
|
1074 | with self.assertRaises(error.CensoredNodeError): | |
@@ -1088,10 +1087,10 b' class ifilemutationtests(basetestcase):' | |||||
1088 | def testaddnoop(self): |
|
1087 | def testaddnoop(self): | |
1089 | f = self._makefilefn() |
|
1088 | f = self._makefilefn() | |
1090 | with self._maketransactionfn() as tr: |
|
1089 | with self._maketransactionfn() as tr: | |
1091 | node0 = f.add(b'foo', None, tr, 0, nullid, nullid) |
|
1090 | node0 = f.add(b'foo', None, tr, 0, f.nullid, f.nullid) | |
1092 | node1 = f.add(b'foo', None, tr, 0, nullid, nullid) |
|
1091 | node1 = f.add(b'foo', None, tr, 0, f.nullid, f.nullid) | |
1093 | # Varying by linkrev shouldn't impact hash. |
|
1092 | # Varying by linkrev shouldn't impact hash. | |
1094 | node2 = f.add(b'foo', None, tr, 1, nullid, nullid) |
|
1093 | node2 = f.add(b'foo', None, tr, 1, f.nullid, f.nullid) | |
1095 |
|
1094 | |||
1096 | self.assertEqual(node1, node0) |
|
1095 | self.assertEqual(node1, node0) | |
1097 | self.assertEqual(node2, node0) |
|
1096 | self.assertEqual(node2, node0) | |
@@ -1102,7 +1101,9 b' class ifilemutationtests(basetestcase):' | |||||
1102 | with self._maketransactionfn() as tr: |
|
1101 | with self._maketransactionfn() as tr: | |
1103 | # Adding a revision with bad node value fails. |
|
1102 | # Adding a revision with bad node value fails. | |
1104 | with self.assertRaises(error.StorageError): |
|
1103 | with self.assertRaises(error.StorageError): | |
1105 | f.addrevision(b'foo', tr, 0, nullid, nullid, node=b'\x01' * 20) |
|
1104 | f.addrevision( | |
|
1105 | b'foo', tr, 0, f.nullid, f.nullid, node=b'\x01' * 20 | |||
|
1106 | ) | |||
1106 |
|
1107 | |||
1107 | def testaddrevisionunknownflag(self): |
|
1108 | def testaddrevisionunknownflag(self): | |
1108 | f = self._makefilefn() |
|
1109 | f = self._makefilefn() | |
@@ -1113,7 +1114,7 b' class ifilemutationtests(basetestcase):' | |||||
1113 | break |
|
1114 | break | |
1114 |
|
1115 | |||
1115 | with self.assertRaises(error.StorageError): |
|
1116 | with self.assertRaises(error.StorageError): | |
1116 | f.addrevision(b'foo', tr, 0, nullid, nullid, flags=flags) |
|
1117 | f.addrevision(b'foo', tr, 0, f.nullid, f.nullid, flags=flags) | |
1117 |
|
1118 | |||
1118 | def testaddgroupsimple(self): |
|
1119 | def testaddgroupsimple(self): | |
1119 | f = self._makefilefn() |
|
1120 | f = self._makefilefn() | |
@@ -1153,12 +1154,12 b' class ifilemutationtests(basetestcase):' | |||||
1153 | delta0 = mdiff.trivialdiffheader(len(fulltext0)) + fulltext0 |
|
1154 | delta0 = mdiff.trivialdiffheader(len(fulltext0)) + fulltext0 | |
1154 |
|
1155 | |||
1155 | with self._maketransactionfn() as tr: |
|
1156 | with self._maketransactionfn() as tr: | |
1156 | node0 = f.add(fulltext0, None, tr, 0, nullid, nullid) |
|
1157 | node0 = f.add(fulltext0, None, tr, 0, f.nullid, f.nullid) | |
1157 |
|
1158 | |||
1158 | f = self._makefilefn() |
|
1159 | f = self._makefilefn() | |
1159 |
|
1160 | |||
1160 | deltas = [ |
|
1161 | deltas = [ | |
1161 | (node0, nullid, nullid, nullid, nullid, delta0, 0, {}), |
|
1162 | (node0, f.nullid, f.nullid, f.nullid, f.nullid, delta0, 0, {}), | |
1162 | ] |
|
1163 | ] | |
1163 |
|
1164 | |||
1164 | with self._maketransactionfn() as tr: |
|
1165 | with self._maketransactionfn() as tr: | |
@@ -1207,7 +1208,7 b' class ifilemutationtests(basetestcase):' | |||||
1207 | nodes = [] |
|
1208 | nodes = [] | |
1208 | with self._maketransactionfn() as tr: |
|
1209 | with self._maketransactionfn() as tr: | |
1209 | for fulltext in fulltexts: |
|
1210 | for fulltext in fulltexts: | |
1210 | nodes.append(f.add(fulltext, None, tr, 0, nullid, nullid)) |
|
1211 | nodes.append(f.add(fulltext, None, tr, 0, f.nullid, f.nullid)) | |
1211 |
|
1212 | |||
1212 | f = self._makefilefn() |
|
1213 | f = self._makefilefn() | |
1213 | deltas = [] |
|
1214 | deltas = [] | |
@@ -1215,7 +1216,7 b' class ifilemutationtests(basetestcase):' | |||||
1215 | delta = mdiff.trivialdiffheader(len(fulltext)) + fulltext |
|
1216 | delta = mdiff.trivialdiffheader(len(fulltext)) + fulltext | |
1216 |
|
1217 | |||
1217 | deltas.append( |
|
1218 | deltas.append( | |
1218 | (nodes[i], nullid, nullid, nullid, nullid, delta, 0, {}) |
|
1219 | (nodes[i], f.nullid, f.nullid, f.nullid, f.nullid, delta, 0, {}) | |
1219 | ) |
|
1220 | ) | |
1220 |
|
1221 | |||
1221 | with self._maketransactionfn() as tr: |
|
1222 | with self._maketransactionfn() as tr: | |
@@ -1254,18 +1255,18 b' class ifilemutationtests(basetestcase):' | |||||
1254 | ) |
|
1255 | ) | |
1255 |
|
1256 | |||
1256 | with self._maketransactionfn() as tr: |
|
1257 | with self._maketransactionfn() as tr: | |
1257 | node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid) |
|
1258 | node0 = f.add(b'foo\n' * 30, None, tr, 0, f.nullid, f.nullid) | |
1258 |
|
1259 | |||
1259 | # The node value doesn't matter since we can't verify it. |
|
1260 | # The node value doesn't matter since we can't verify it. | |
1260 | node1 = b'\xbb' * 20 |
|
1261 | node1 = b'\xbb' * 20 | |
1261 |
|
1262 | |||
1262 | self._addrawrevisionfn( |
|
1263 | self._addrawrevisionfn( | |
1263 | f, tr, node1, node0, nullid, 1, stored1, censored=True |
|
1264 | f, tr, node1, node0, f.nullid, 1, stored1, censored=True | |
1264 | ) |
|
1265 | ) | |
1265 |
|
1266 | |||
1266 | delta = mdiff.textdiff(b'bar\n' * 30, (b'bar\n' * 30) + b'baz\n') |
|
1267 | delta = mdiff.textdiff(b'bar\n' * 30, (b'bar\n' * 30) + b'baz\n') | |
1267 | deltas = [ |
|
1268 | deltas = [ | |
1268 | (b'\xcc' * 20, node1, nullid, b'\x01' * 20, node1, delta, 0, {}) |
|
1269 | (b'\xcc' * 20, node1, f.nullid, b'\x01' * 20, node1, delta, 0, {}) | |
1269 | ] |
|
1270 | ] | |
1270 |
|
1271 | |||
1271 | with self._maketransactionfn() as tr: |
|
1272 | with self._maketransactionfn() as tr: | |
@@ -1276,9 +1277,9 b' class ifilemutationtests(basetestcase):' | |||||
1276 | f = self._makefilefn() |
|
1277 | f = self._makefilefn() | |
1277 |
|
1278 | |||
1278 | with self._maketransactionfn() as tr: |
|
1279 | with self._maketransactionfn() as tr: | |
1279 | node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid) |
|
1280 | node0 = f.add(b'foo\n' * 30, None, tr, 0, f.nullid, f.nullid) | |
1280 | node1 = f.add(b'foo\n' * 31, None, tr, 1, node0, nullid) |
|
1281 | node1 = f.add(b'foo\n' * 31, None, tr, 1, node0, f.nullid) | |
1281 | node2 = f.add(b'foo\n' * 32, None, tr, 2, node1, nullid) |
|
1282 | node2 = f.add(b'foo\n' * 32, None, tr, 2, node1, f.nullid) | |
1282 |
|
1283 | |||
1283 | with self._maketransactionfn() as tr: |
|
1284 | with self._maketransactionfn() as tr: | |
1284 | f.censorrevision(tr, node1) |
|
1285 | f.censorrevision(tr, node1) | |
@@ -1298,7 +1299,7 b' class ifilemutationtests(basetestcase):' | |||||
1298 |
|
1299 | |||
1299 | with self._maketransactionfn() as tr: |
|
1300 | with self._maketransactionfn() as tr: | |
1300 | for rev in range(10): |
|
1301 | for rev in range(10): | |
1301 | f.add(b'%d' % rev, None, tr, rev, nullid, nullid) |
|
1302 | f.add(b'%d' % rev, None, tr, rev, f.nullid, f.nullid) | |
1302 |
|
1303 | |||
1303 | for rev in range(10): |
|
1304 | for rev in range(10): | |
1304 | self.assertEqual(f.getstrippoint(rev), (rev, set())) |
|
1305 | self.assertEqual(f.getstrippoint(rev), (rev, set())) | |
@@ -1308,10 +1309,10 b' class ifilemutationtests(basetestcase):' | |||||
1308 | f = self._makefilefn() |
|
1309 | f = self._makefilefn() | |
1309 |
|
1310 | |||
1310 | with self._maketransactionfn() as tr: |
|
1311 | with self._maketransactionfn() as tr: | |
1311 | p1 = nullid |
|
1312 | p1 = f.nullid | |
1312 |
|
1313 | |||
1313 | for rev in range(10): |
|
1314 | for rev in range(10): | |
1314 | f.add(b'%d' % rev, None, tr, rev, p1, nullid) |
|
1315 | f.add(b'%d' % rev, None, tr, rev, p1, f.nullid) | |
1315 |
|
1316 | |||
1316 | for rev in range(10): |
|
1317 | for rev in range(10): | |
1317 | self.assertEqual(f.getstrippoint(rev), (rev, set())) |
|
1318 | self.assertEqual(f.getstrippoint(rev), (rev, set())) | |
@@ -1320,11 +1321,11 b' class ifilemutationtests(basetestcase):' | |||||
1320 | f = self._makefilefn() |
|
1321 | f = self._makefilefn() | |
1321 |
|
1322 | |||
1322 | with self._maketransactionfn() as tr: |
|
1323 | with self._maketransactionfn() as tr: | |
1323 | node0 = f.add(b'0', None, tr, 0, nullid, nullid) |
|
1324 | node0 = f.add(b'0', None, tr, 0, f.nullid, f.nullid) | |
1324 | node1 = f.add(b'1', None, tr, 1, node0, nullid) |
|
1325 | node1 = f.add(b'1', None, tr, 1, node0, f.nullid) | |
1325 | f.add(b'2', None, tr, 2, node1, nullid) |
|
1326 | f.add(b'2', None, tr, 2, node1, f.nullid) | |
1326 | f.add(b'3', None, tr, 3, node0, nullid) |
|
1327 | f.add(b'3', None, tr, 3, node0, f.nullid) | |
1327 | f.add(b'4', None, tr, 4, node0, nullid) |
|
1328 | f.add(b'4', None, tr, 4, node0, f.nullid) | |
1328 |
|
1329 | |||
1329 | for rev in range(5): |
|
1330 | for rev in range(5): | |
1330 | self.assertEqual(f.getstrippoint(rev), (rev, set())) |
|
1331 | self.assertEqual(f.getstrippoint(rev), (rev, set())) | |
@@ -1333,9 +1334,9 b' class ifilemutationtests(basetestcase):' | |||||
1333 | f = self._makefilefn() |
|
1334 | f = self._makefilefn() | |
1334 |
|
1335 | |||
1335 | with self._maketransactionfn() as tr: |
|
1336 | with self._maketransactionfn() as tr: | |
1336 | node0 = f.add(b'0', None, tr, 0, nullid, nullid) |
|
1337 | node0 = f.add(b'0', None, tr, 0, f.nullid, f.nullid) | |
1337 | f.add(b'1', None, tr, 10, node0, nullid) |
|
1338 | f.add(b'1', None, tr, 10, node0, f.nullid) | |
1338 | f.add(b'2', None, tr, 5, node0, nullid) |
|
1339 | f.add(b'2', None, tr, 5, node0, f.nullid) | |
1339 |
|
1340 | |||
1340 | self.assertEqual(f.getstrippoint(0), (0, set())) |
|
1341 | self.assertEqual(f.getstrippoint(0), (0, set())) | |
1341 | self.assertEqual(f.getstrippoint(1), (1, set())) |
|
1342 | self.assertEqual(f.getstrippoint(1), (1, set())) | |
@@ -1362,9 +1363,9 b' class ifilemutationtests(basetestcase):' | |||||
1362 | f = self._makefilefn() |
|
1363 | f = self._makefilefn() | |
1363 |
|
1364 | |||
1364 | with self._maketransactionfn() as tr: |
|
1365 | with self._maketransactionfn() as tr: | |
1365 | p1 = nullid |
|
1366 | p1 = f.nullid | |
1366 | for rev in range(10): |
|
1367 | for rev in range(10): | |
1367 | p1 = f.add(b'%d' % rev, None, tr, rev, p1, nullid) |
|
1368 | p1 = f.add(b'%d' % rev, None, tr, rev, p1, f.nullid) | |
1368 |
|
1369 | |||
1369 | self.assertEqual(len(f), 10) |
|
1370 | self.assertEqual(len(f), 10) | |
1370 |
|
1371 | |||
@@ -1377,9 +1378,9 b' class ifilemutationtests(basetestcase):' | |||||
1377 | f = self._makefilefn() |
|
1378 | f = self._makefilefn() | |
1378 |
|
1379 | |||
1379 | with self._maketransactionfn() as tr: |
|
1380 | with self._maketransactionfn() as tr: | |
1380 | f.add(b'0', None, tr, 0, nullid, nullid) |
|
1381 | f.add(b'0', None, tr, 0, f.nullid, f.nullid) | |
1381 | node1 = f.add(b'1', None, tr, 5, nullid, nullid) |
|
1382 | node1 = f.add(b'1', None, tr, 5, f.nullid, f.nullid) | |
1382 | node2 = f.add(b'2', None, tr, 10, nullid, nullid) |
|
1383 | node2 = f.add(b'2', None, tr, 10, f.nullid, f.nullid) | |
1383 |
|
1384 | |||
1384 | self.assertEqual(len(f), 3) |
|
1385 | self.assertEqual(len(f), 3) | |
1385 |
|
1386 |
@@ -10,10 +10,7 b' from __future__ import absolute_import' | |||||
10 | import collections |
|
10 | import collections | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 |
from .node import |
|
13 | from .node import short | |
14 | nullid, |
|
|||
15 | short, |
|
|||
16 | ) |
|
|||
17 | from . import ( |
|
14 | from . import ( | |
18 | error, |
|
15 | error, | |
19 | pycompat, |
|
16 | pycompat, | |
@@ -44,11 +41,11 b' def findcommonincoming(repo, remote, hea' | |||||
44 | if audit is not None: |
|
41 | if audit is not None: | |
45 | audit[b'total-roundtrips'] = 1 |
|
42 | audit[b'total-roundtrips'] = 1 | |
46 |
|
43 | |||
47 | if repo.changelog.tip() == nullid: |
|
44 | if repo.changelog.tip() == repo.nullid: | |
48 | base.add(nullid) |
|
45 | base.add(repo.nullid) | |
49 | if heads != [nullid]: |
|
46 | if heads != [repo.nullid]: | |
50 | return [nullid], [nullid], list(heads) |
|
47 | return [repo.nullid], [repo.nullid], list(heads) | |
51 | return [nullid], [], heads |
|
48 | return [repo.nullid], [], heads | |
52 |
|
49 | |||
53 | # assume we're closer to the tip than the root |
|
50 | # assume we're closer to the tip than the root | |
54 | # and start by examining the heads |
|
51 | # and start by examining the heads | |
@@ -84,7 +81,7 b' def findcommonincoming(repo, remote, hea' | |||||
84 | continue |
|
81 | continue | |
85 |
|
82 | |||
86 | repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1]))) |
|
83 | repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1]))) | |
87 | if n[0] == nullid: # found the end of the branch |
|
84 | if n[0] == repo.nullid: # found the end of the branch | |
88 | pass |
|
85 | pass | |
89 | elif n in seenbranch: |
|
86 | elif n in seenbranch: | |
90 | repo.ui.debug(b"branch already found\n") |
|
87 | repo.ui.debug(b"branch already found\n") | |
@@ -170,7 +167,7 b' def findcommonincoming(repo, remote, hea' | |||||
170 | raise error.RepoError(_(b"already have changeset ") + short(f[:4])) |
|
167 | raise error.RepoError(_(b"already have changeset ") + short(f[:4])) | |
171 |
|
168 | |||
172 | base = list(base) |
|
169 | base = list(base) | |
173 | if base == [nullid]: |
|
170 | if base == [repo.nullid]: | |
174 | if force: |
|
171 | if force: | |
175 | repo.ui.warn(_(b"warning: repository is unrelated\n")) |
|
172 | repo.ui.warn(_(b"warning: repository is unrelated\n")) | |
176 | else: |
|
173 | else: |
@@ -34,6 +34,7 b' import time' | |||||
34 | import traceback |
|
34 | import traceback | |
35 | import warnings |
|
35 | import warnings | |
36 |
|
36 | |||
|
37 | from .node import hex | |||
37 | from .thirdparty import attr |
|
38 | from .thirdparty import attr | |
38 | from .pycompat import ( |
|
39 | from .pycompat import ( | |
39 | delattr, |
|
40 | delattr, |
@@ -13,8 +13,8 b' import struct' | |||||
13 | from ..i18n import _ |
|
13 | from ..i18n import _ | |
14 | from ..node import ( |
|
14 | from ..node import ( | |
15 | bin, |
|
15 | bin, | |
16 | nullid, |
|
|||
17 | nullrev, |
|
16 | nullrev, | |
|
17 | sha1nodeconstants, | |||
18 | ) |
|
18 | ) | |
19 | from .. import ( |
|
19 | from .. import ( | |
20 | dagop, |
|
20 | dagop, | |
@@ -26,7 +26,7 b' from ..interfaces import repository' | |||||
26 | from ..revlogutils import sidedata as sidedatamod |
|
26 | from ..revlogutils import sidedata as sidedatamod | |
27 | from ..utils import hashutil |
|
27 | from ..utils import hashutil | |
28 |
|
28 | |||
29 | _nullhash = hashutil.sha1(nullid) |
|
29 | _nullhash = hashutil.sha1(sha1nodeconstants.nullid) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def hashrevisionsha1(text, p1, p2): |
|
32 | def hashrevisionsha1(text, p1, p2): | |
@@ -37,7 +37,7 b' def hashrevisionsha1(text, p1, p2):' | |||||
37 | content in the revision graph. |
|
37 | content in the revision graph. | |
38 | """ |
|
38 | """ | |
39 | # As of now, if one of the parent node is null, p2 is null |
|
39 | # As of now, if one of the parent node is null, p2 is null | |
40 | if p2 == nullid: |
|
40 | if p2 == sha1nodeconstants.nullid: | |
41 | # deep copy of a hash is faster than creating one |
|
41 | # deep copy of a hash is faster than creating one | |
42 | s = _nullhash.copy() |
|
42 | s = _nullhash.copy() | |
43 | s.update(p1) |
|
43 | s.update(p1) | |
@@ -107,7 +107,7 b' def filerevisioncopied(store, node):' | |||||
107 | Returns ``False`` if the file has no copy metadata. Otherwise a |
|
107 | Returns ``False`` if the file has no copy metadata. Otherwise a | |
108 | 2-tuple of the source filename and node. |
|
108 | 2-tuple of the source filename and node. | |
109 | """ |
|
109 | """ | |
110 | if store.parents(node)[0] != nullid: |
|
110 | if store.parents(node)[0] != sha1nodeconstants.nullid: | |
111 | return False |
|
111 | return False | |
112 |
|
112 | |||
113 | meta = parsemeta(store.revision(node))[0] |
|
113 | meta = parsemeta(store.revision(node))[0] |
@@ -10,13 +10,8 b' from __future__ import absolute_import' | |||||
10 | import os |
|
10 | import os | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 |
from .node import |
|
13 | from .node import short | |
14 | nullid, |
|
14 | from .utils import stringutil | |
15 | short, |
|
|||
16 | ) |
|
|||
17 | from .utils import ( |
|
|||
18 | stringutil, |
|
|||
19 | ) |
|
|||
20 |
|
15 | |||
21 | from . import ( |
|
16 | from . import ( | |
22 | error, |
|
17 | error, | |
@@ -159,13 +154,13 b' class verifier(object):' | |||||
159 |
|
154 | |||
160 | try: |
|
155 | try: | |
161 | p1, p2 = obj.parents(node) |
|
156 | p1, p2 = obj.parents(node) | |
162 | if p1 not in seen and p1 != nullid: |
|
157 | if p1 not in seen and p1 != self.repo.nullid: | |
163 | self._err( |
|
158 | self._err( | |
164 | lr, |
|
159 | lr, | |
165 | _(b"unknown parent 1 %s of %s") % (short(p1), short(node)), |
|
160 | _(b"unknown parent 1 %s of %s") % (short(p1), short(node)), | |
166 | f, |
|
161 | f, | |
167 | ) |
|
162 | ) | |
168 | if p2 not in seen and p2 != nullid: |
|
163 | if p2 not in seen and p2 != self.repo.nullid: | |
169 | self._err( |
|
164 | self._err( | |
170 | lr, |
|
165 | lr, | |
171 | _(b"unknown parent 2 %s of %s") % (short(p2), short(node)), |
|
166 | _(b"unknown parent 2 %s of %s") % (short(p2), short(node)), | |
@@ -267,7 +262,7 b' class verifier(object):' | |||||
267 |
|
262 | |||
268 | try: |
|
263 | try: | |
269 | changes = cl.read(n) |
|
264 | changes = cl.read(n) | |
270 | if changes[0] != nullid: |
|
265 | if changes[0] != self.repo.nullid: | |
271 | mflinkrevs.setdefault(changes[0], []).append(i) |
|
266 | mflinkrevs.setdefault(changes[0], []).append(i) | |
272 | self.refersmf = True |
|
267 | self.refersmf = True | |
273 | for f in changes[3]: |
|
268 | for f in changes[3]: | |
@@ -598,7 +593,7 b' class verifier(object):' | |||||
598 | % (rp[0], short(rp[1])), |
|
593 | % (rp[0], short(rp[1])), | |
599 | f, |
|
594 | f, | |
600 | ) |
|
595 | ) | |
601 | elif rp[1] == nullid: |
|
596 | elif rp[1] == self.repo.nullid: | |
602 | ui.note( |
|
597 | ui.note( | |
603 | _( |
|
598 | _( | |
604 | b"warning: %s@%s: copy source" |
|
599 | b"warning: %s@%s: copy source" |
@@ -11,10 +11,7 b' import binascii' | |||||
11 | import os |
|
11 | import os | |
12 |
|
12 | |||
13 | from .i18n import _ |
|
13 | from .i18n import _ | |
14 |
from .node import |
|
14 | from .node import hex | |
15 | hex, |
|
|||
16 | nullid, |
|
|||
17 | ) |
|
|||
18 | from .pycompat import getattr |
|
15 | from .pycompat import getattr | |
19 |
|
16 | |||
20 | from . import ( |
|
17 | from . import ( | |
@@ -470,7 +467,7 b' def getbundle(repo, proto, others):' | |||||
470 | clheads = set(repo.changelog.heads()) |
|
467 | clheads = set(repo.changelog.heads()) | |
471 | heads = set(opts.get(b'heads', set())) |
|
468 | heads = set(opts.get(b'heads', set())) | |
472 | common = set(opts.get(b'common', set())) |
|
469 | common = set(opts.get(b'common', set())) | |
473 | common.discard(nullid) |
|
470 | common.discard(repo.nullid) | |
474 | if ( |
|
471 | if ( | |
475 | repo.ui.configbool(b'server', b'pullbundle') |
|
472 | repo.ui.configbool(b'server', b'pullbundle') | |
476 | and b'partial-pull' in proto.getprotocaps() |
|
473 | and b'partial-pull' in proto.getprotocaps() |
@@ -10,10 +10,7 b' import collections' | |||||
10 | import contextlib |
|
10 | import contextlib | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 |
from .node import |
|
13 | from .node import hex | |
14 | hex, |
|
|||
15 | nullid, |
|
|||
16 | ) |
|
|||
17 | from . import ( |
|
14 | from . import ( | |
18 | discovery, |
|
15 | discovery, | |
19 | encoding, |
|
16 | encoding, | |
@@ -950,7 +947,7 b' def resolvenodes(repo, revisions):' | |||||
950 | if spec[b'roots']: |
|
947 | if spec[b'roots']: | |
951 | common = [n for n in spec[b'roots'] if clhasnode(n)] |
|
948 | common = [n for n in spec[b'roots'] if clhasnode(n)] | |
952 | else: |
|
949 | else: | |
953 | common = [nullid] |
|
950 | common = [repo.nullid] | |
954 |
|
951 | |||
955 | for n in discovery.outgoing(repo, common, spec[b'heads']).missing: |
|
952 | for n in discovery.outgoing(repo, common, spec[b'heads']).missing: | |
956 | if n not in seen: |
|
953 | if n not in seen: |
@@ -86,7 +86,6 b' import collections' | |||||
86 | import itertools |
|
86 | import itertools | |
87 | import re |
|
87 | import re | |
88 |
|
88 | |||
89 | from mercurial.node import nullid |
|
|||
90 | from mercurial.i18n import _ |
|
89 | from mercurial.i18n import _ | |
91 | from mercurial import ( |
|
90 | from mercurial import ( | |
92 | context, |
|
91 | context, | |
@@ -299,7 +298,7 b' class simplecommitctx(context.committabl' | |||||
299 | self._added = added |
|
298 | self._added = added | |
300 | self._parents = parentctxs |
|
299 | self._parents = parentctxs | |
301 | while len(self._parents) < 2: |
|
300 | while len(self._parents) < 2: | |
302 | self._parents.append(repo[nullid]) |
|
301 | self._parents.append(repo[repo.nullid]) | |
303 |
|
302 | |||
304 | def filectx(self, key): |
|
303 | def filectx(self, key): | |
305 | return simplefilectx(key, self._added[key]) |
|
304 | return simplefilectx(key, self._added[key]) | |
@@ -388,7 +387,7 b' def debugdrawdag(ui, repo, **opts):' | |||||
388 | content = content.replace(br'\n', b'\n').replace(br'\1', b'\1') |
|
387 | content = content.replace(br'\n', b'\n').replace(br'\1', b'\1') | |
389 | files[name][path] = content |
|
388 | files[name][path] = content | |
390 |
|
389 | |||
391 | committed = {None: nullid} # {name: node} |
|
390 | committed = {None: repo.nullid} # {name: node} | |
392 |
|
391 | |||
393 | # for leaf nodes, try to find existing nodes in repo |
|
392 | # for leaf nodes, try to find existing nodes in repo | |
394 | for name, parents in edges.items(): |
|
393 | for name, parents in edges.items(): |
@@ -18,7 +18,6 b' from mercurial.i18n import _' | |||||
18 | from mercurial.node import ( |
|
18 | from mercurial.node import ( | |
19 | bin, |
|
19 | bin, | |
20 | hex, |
|
20 | hex, | |
21 | nullid, |
|
|||
22 | nullrev, |
|
21 | nullrev, | |
23 | ) |
|
22 | ) | |
24 | from mercurial.thirdparty import attr |
|
23 | from mercurial.thirdparty import attr | |
@@ -136,18 +135,18 b' class filestorage(object):' | |||||
136 | self._indexbynode[entry[b'node']] = entry |
|
135 | self._indexbynode[entry[b'node']] = entry | |
137 | self._indexbyrev[i] = entry |
|
136 | self._indexbyrev[i] = entry | |
138 |
|
137 | |||
139 | self._indexbynode[nullid] = { |
|
138 | self._indexbynode[self._repo.nullid] = { | |
140 | b'node': nullid, |
|
139 | b'node': self._repo.nullid, | |
141 | b'p1': nullid, |
|
140 | b'p1': self._repo.nullid, | |
142 | b'p2': nullid, |
|
141 | b'p2': self._repo.nullid, | |
143 | b'linkrev': nullrev, |
|
142 | b'linkrev': nullrev, | |
144 | b'flags': 0, |
|
143 | b'flags': 0, | |
145 | } |
|
144 | } | |
146 |
|
145 | |||
147 | self._indexbyrev[nullrev] = { |
|
146 | self._indexbyrev[nullrev] = { | |
148 | b'node': nullid, |
|
147 | b'node': self._repo.nullid, | |
149 | b'p1': nullid, |
|
148 | b'p1': self._repo.nullid, | |
150 | b'p2': nullid, |
|
149 | b'p2': self._repo.nullid, | |
151 | b'linkrev': nullrev, |
|
150 | b'linkrev': nullrev, | |
152 | b'flags': 0, |
|
151 | b'flags': 0, | |
153 | } |
|
152 | } | |
@@ -160,7 +159,7 b' class filestorage(object):' | |||||
160 | (0, 0, 0, -1, entry[b'linkrev'], p1rev, p2rev, entry[b'node']) |
|
159 | (0, 0, 0, -1, entry[b'linkrev'], p1rev, p2rev, entry[b'node']) | |
161 | ) |
|
160 | ) | |
162 |
|
161 | |||
163 | self._index.append((0, 0, 0, -1, -1, -1, -1, nullid)) |
|
162 | self._index.append((0, 0, 0, -1, -1, -1, -1, self._repo.nullid)) | |
164 |
|
163 | |||
165 | def __len__(self): |
|
164 | def __len__(self): | |
166 | return len(self._indexdata) |
|
165 | return len(self._indexdata) | |
@@ -288,7 +287,7 b' class filestorage(object):' | |||||
288 | node = nodeorrev |
|
287 | node = nodeorrev | |
289 | validatenode(node) |
|
288 | validatenode(node) | |
290 |
|
289 | |||
291 | if node == nullid: |
|
290 | if node == self._repo.nullid: | |
292 | return b'' |
|
291 | return b'' | |
293 |
|
292 | |||
294 | rev = self.rev(node) |
|
293 | rev = self.rev(node) | |
@@ -325,7 +324,7 b' class filestorage(object):' | |||||
325 | def renamed(self, node): |
|
324 | def renamed(self, node): | |
326 | validatenode(node) |
|
325 | validatenode(node) | |
327 |
|
326 | |||
328 | if self.parents(node)[0] != nullid: |
|
327 | if self.parents(node)[0] != self._repo.nullid: | |
329 | return False |
|
328 | return False | |
330 |
|
329 | |||
331 | fulltext = self.revision(node) |
|
330 | fulltext = self.revision(node) | |
@@ -451,7 +450,7 b' class filestorage(object):' | |||||
451 | sidedata_helpers=None, |
|
450 | sidedata_helpers=None, | |
452 | ): |
|
451 | ): | |
453 | # TODO this will probably break on some ordering options. |
|
452 | # TODO this will probably break on some ordering options. | |
454 | nodes = [n for n in nodes if n != nullid] |
|
453 | nodes = [n for n in nodes if n != self._repo.nullid] | |
455 | if not nodes: |
|
454 | if not nodes: | |
456 | return |
|
455 | return | |
457 | for delta in storageutil.emitrevisions( |
|
456 | for delta in storageutil.emitrevisions( | |
@@ -559,7 +558,7 b' class filestorage(object):' | |||||
559 | continue |
|
558 | continue | |
560 |
|
559 | |||
561 | # Need to resolve the fulltext from the delta base. |
|
560 | # Need to resolve the fulltext from the delta base. | |
562 | if deltabase == nullid: |
|
561 | if deltabase == self._repo.nullid: | |
563 | text = mdiff.patch(b'', delta) |
|
562 | text = mdiff.patch(b'', delta) | |
564 | else: |
|
563 | else: | |
565 | text = mdiff.patch(self.revision(deltabase), delta) |
|
564 | text = mdiff.patch(self.revision(deltabase), delta) | |
@@ -588,11 +587,11 b' class filestorage(object):' | |||||
588 | # This is copied from revlog.py. |
|
587 | # This is copied from revlog.py. | |
589 | if start is None and stop is None: |
|
588 | if start is None and stop is None: | |
590 | if not len(self): |
|
589 | if not len(self): | |
591 | return [nullid] |
|
590 | return [self._repo.nullid] | |
592 | return [self.node(r) for r in self._headrevs()] |
|
591 | return [self.node(r) for r in self._headrevs()] | |
593 |
|
592 | |||
594 | if start is None: |
|
593 | if start is None: | |
595 | start = nullid |
|
594 | start = self._repo.nullid | |
596 | if stop is None: |
|
595 | if stop is None: | |
597 | stop = [] |
|
596 | stop = [] | |
598 | stoprevs = {self.rev(n) for n in stop} |
|
597 | stoprevs = {self.rev(n) for n in stop} |
@@ -479,19 +479,19 b' and its ancestor by overriding "repo._fi' | |||||
479 |
|
479 | |||
480 | $ cat > ../legacyrepo.py <<EOF |
|
480 | $ cat > ../legacyrepo.py <<EOF | |
481 | > from __future__ import absolute_import |
|
481 | > from __future__ import absolute_import | |
482 |
> from mercurial import commit, error, extensions |
|
482 | > from mercurial import commit, error, extensions | |
483 | > def _filecommit(orig, repo, fctx, manifest1, manifest2, |
|
483 | > def _filecommit(orig, repo, fctx, manifest1, manifest2, | |
484 | > linkrev, tr, includecopymeta, ms): |
|
484 | > linkrev, tr, includecopymeta, ms): | |
485 | > fname = fctx.path() |
|
485 | > fname = fctx.path() | |
486 | > text = fctx.data() |
|
486 | > text = fctx.data() | |
487 | > flog = repo.file(fname) |
|
487 | > flog = repo.file(fname) | |
488 |
> fparent1 = manifest1.get(fname, |
|
488 | > fparent1 = manifest1.get(fname, repo.nullid) | |
489 |
> fparent2 = manifest2.get(fname, |
|
489 | > fparent2 = manifest2.get(fname, repo.nullid) | |
490 | > meta = {} |
|
490 | > meta = {} | |
491 | > copy = fctx.copysource() |
|
491 | > copy = fctx.copysource() | |
492 | > if copy and copy != fname: |
|
492 | > if copy and copy != fname: | |
493 | > raise error.Abort('copying is not supported') |
|
493 | > raise error.Abort('copying is not supported') | |
494 |
> if fparent2 != |
|
494 | > if fparent2 != repo.nullid: | |
495 | > return flog.add(text, meta, tr, linkrev, |
|
495 | > return flog.add(text, meta, tr, linkrev, | |
496 | > fparent1, fparent2), 'modified' |
|
496 | > fparent1, fparent2), 'modified' | |
497 | > raise error.Abort('only merging is supported') |
|
497 | > raise error.Abort('only merging is supported') |
@@ -646,14 +646,14 b' Test making empty commits' | |||||
646 | verify pathauditor blocks evil filepaths |
|
646 | verify pathauditor blocks evil filepaths | |
647 | $ cat > evil-commit.py <<EOF |
|
647 | $ cat > evil-commit.py <<EOF | |
648 | > from __future__ import absolute_import |
|
648 | > from __future__ import absolute_import | |
649 |
> from mercurial import context, hg, |
|
649 | > from mercurial import context, hg, ui as uimod | |
650 | > notrc = u".h\u200cg".encode('utf-8') + b'/hgrc' |
|
650 | > notrc = u".h\u200cg".encode('utf-8') + b'/hgrc' | |
651 | > u = uimod.ui.load() |
|
651 | > u = uimod.ui.load() | |
652 | > r = hg.repository(u, b'.') |
|
652 | > r = hg.repository(u, b'.') | |
653 | > def filectxfn(repo, memctx, path): |
|
653 | > def filectxfn(repo, memctx, path): | |
654 | > return context.memfilectx(repo, memctx, path, |
|
654 | > return context.memfilectx(repo, memctx, path, | |
655 | > b'[hooks]\nupdate = echo owned') |
|
655 | > b'[hooks]\nupdate = echo owned') | |
656 |
> c = context.memctx(r, [r.changelog.tip(), |
|
656 | > c = context.memctx(r, [r.changelog.tip(), r.nullid], | |
657 | > b'evil', [notrc], filectxfn, 0) |
|
657 | > b'evil', [notrc], filectxfn, 0) | |
658 | > r.commitctx(c) |
|
658 | > r.commitctx(c) | |
659 | > EOF |
|
659 | > EOF | |
@@ -672,14 +672,14 b' verify pathauditor blocks evil filepaths' | |||||
672 | repository tip rolled back to revision 2 (undo commit) |
|
672 | repository tip rolled back to revision 2 (undo commit) | |
673 | $ cat > evil-commit.py <<EOF |
|
673 | $ cat > evil-commit.py <<EOF | |
674 | > from __future__ import absolute_import |
|
674 | > from __future__ import absolute_import | |
675 |
> from mercurial import context, hg, |
|
675 | > from mercurial import context, hg, ui as uimod | |
676 | > notrc = b"HG~1/hgrc" |
|
676 | > notrc = b"HG~1/hgrc" | |
677 | > u = uimod.ui.load() |
|
677 | > u = uimod.ui.load() | |
678 | > r = hg.repository(u, b'.') |
|
678 | > r = hg.repository(u, b'.') | |
679 | > def filectxfn(repo, memctx, path): |
|
679 | > def filectxfn(repo, memctx, path): | |
680 | > return context.memfilectx(repo, memctx, path, |
|
680 | > return context.memfilectx(repo, memctx, path, | |
681 | > b'[hooks]\nupdate = echo owned') |
|
681 | > b'[hooks]\nupdate = echo owned') | |
682 |
> c = context.memctx(r, [r[b'tip'].node(), |
|
682 | > c = context.memctx(r, [r[b'tip'].node(), r.nullid], | |
683 | > b'evil', [notrc], filectxfn, 0) |
|
683 | > b'evil', [notrc], filectxfn, 0) | |
684 | > r.commitctx(c) |
|
684 | > r.commitctx(c) | |
685 | > EOF |
|
685 | > EOF | |
@@ -692,14 +692,14 b' verify pathauditor blocks evil filepaths' | |||||
692 | repository tip rolled back to revision 2 (undo commit) |
|
692 | repository tip rolled back to revision 2 (undo commit) | |
693 | $ cat > evil-commit.py <<EOF |
|
693 | $ cat > evil-commit.py <<EOF | |
694 | > from __future__ import absolute_import |
|
694 | > from __future__ import absolute_import | |
695 |
> from mercurial import context, hg, |
|
695 | > from mercurial import context, hg, ui as uimod | |
696 | > notrc = b"HG8B6C~2/hgrc" |
|
696 | > notrc = b"HG8B6C~2/hgrc" | |
697 | > u = uimod.ui.load() |
|
697 | > u = uimod.ui.load() | |
698 | > r = hg.repository(u, b'.') |
|
698 | > r = hg.repository(u, b'.') | |
699 | > def filectxfn(repo, memctx, path): |
|
699 | > def filectxfn(repo, memctx, path): | |
700 | > return context.memfilectx(repo, memctx, path, |
|
700 | > return context.memfilectx(repo, memctx, path, | |
701 | > b'[hooks]\nupdate = echo owned') |
|
701 | > b'[hooks]\nupdate = echo owned') | |
702 |
> c = context.memctx(r, [r[b'tip'].node(), |
|
702 | > c = context.memctx(r, [r[b'tip'].node(), r.nullid], | |
703 | > b'evil', [notrc], filectxfn, 0) |
|
703 | > b'evil', [notrc], filectxfn, 0) | |
704 | > r.commitctx(c) |
|
704 | > r.commitctx(c) | |
705 | > EOF |
|
705 | > EOF |
@@ -482,19 +482,19 b' and its ancestor by overriding "repo._fi' | |||||
482 |
|
482 | |||
483 | $ cat > ../legacyrepo.py <<EOF |
|
483 | $ cat > ../legacyrepo.py <<EOF | |
484 | > from __future__ import absolute_import |
|
484 | > from __future__ import absolute_import | |
485 |
> from mercurial import commit, error, extensions |
|
485 | > from mercurial import commit, error, extensions | |
486 | > def _filecommit(orig, repo, fctx, manifest1, manifest2, |
|
486 | > def _filecommit(orig, repo, fctx, manifest1, manifest2, | |
487 | > linkrev, tr, includecopymeta, ms): |
|
487 | > linkrev, tr, includecopymeta, ms): | |
488 | > fname = fctx.path() |
|
488 | > fname = fctx.path() | |
489 | > text = fctx.data() |
|
489 | > text = fctx.data() | |
490 | > flog = repo.file(fname) |
|
490 | > flog = repo.file(fname) | |
491 |
> fparent1 = manifest1.get(fname, |
|
491 | > fparent1 = manifest1.get(fname, repo.nullid) | |
492 |
> fparent2 = manifest2.get(fname, |
|
492 | > fparent2 = manifest2.get(fname, repo.nullid) | |
493 | > meta = {} |
|
493 | > meta = {} | |
494 | > copy = fctx.copysource() |
|
494 | > copy = fctx.copysource() | |
495 | > if copy and copy != fname: |
|
495 | > if copy and copy != fname: | |
496 | > raise error.Abort('copying is not supported') |
|
496 | > raise error.Abort('copying is not supported') | |
497 |
> if fparent2 != |
|
497 | > if fparent2 != repo.nullid: | |
498 | > return flog.add(text, meta, tr, linkrev, |
|
498 | > return flog.add(text, meta, tr, linkrev, | |
499 | > fparent1, fparent2), 'modified' |
|
499 | > fparent1, fparent2), 'modified' | |
500 | > raise error.Abort('only merging is supported') |
|
500 | > raise error.Abort('only merging is supported') |
@@ -4,10 +4,7 b' Tests the behavior of filelog w.r.t. dat' | |||||
4 | """ |
|
4 | """ | |
5 | from __future__ import absolute_import, print_function |
|
5 | from __future__ import absolute_import, print_function | |
6 |
|
6 | |||
7 |
from mercurial.node import |
|
7 | from mercurial.node import hex | |
8 | hex, |
|
|||
9 | nullid, |
|
|||
10 | ) |
|
|||
11 | from mercurial import ( |
|
8 | from mercurial import ( | |
12 | hg, |
|
9 | hg, | |
13 | ui as uimod, |
|
10 | ui as uimod, | |
@@ -22,7 +19,7 b" fl = repo.file(b'foobar')" | |||||
22 | def addrev(text, renamed=False): |
|
19 | def addrev(text, renamed=False): | |
23 | if renamed: |
|
20 | if renamed: | |
24 | # data doesn't matter. Just make sure filelog.renamed() returns True |
|
21 | # data doesn't matter. Just make sure filelog.renamed() returns True | |
25 | meta = {b'copyrev': hex(nullid), b'copy': b'bar'} |
|
22 | meta = {b'copyrev': hex(repo.nullid), b'copy': b'bar'} | |
26 | else: |
|
23 | else: | |
27 | meta = {} |
|
24 | meta = {} | |
28 |
|
25 | |||
@@ -30,7 +27,7 b' def addrev(text, renamed=False):' | |||||
30 | try: |
|
27 | try: | |
31 | lock = repo.lock() |
|
28 | lock = repo.lock() | |
32 | t = repo.transaction(b'commit') |
|
29 | t = repo.transaction(b'commit') | |
33 | node = fl.add(text, meta, t, 0, nullid, nullid) |
|
30 | node = fl.add(text, meta, t, 0, repo.nullid, repo.nullid) | |
34 | return node |
|
31 | return node | |
35 | finally: |
|
32 | finally: | |
36 | if t: |
|
33 | if t: |
@@ -14,8 +14,8 b' import unittest' | |||||
14 | from mercurial.node import ( |
|
14 | from mercurial.node import ( | |
15 | bin, |
|
15 | bin, | |
16 | hex, |
|
16 | hex, | |
17 | nullid, |
|
|||
18 | nullrev, |
|
17 | nullrev, | |
|
18 | sha1nodeconstants, | |||
19 | ) |
|
19 | ) | |
20 | from mercurial import ( |
|
20 | from mercurial import ( | |
21 | policy, |
|
21 | policy, | |
@@ -40,7 +40,7 b' def py_parseindex(data, inline):' | |||||
40 | s = 64 |
|
40 | s = 64 | |
41 | cache = None |
|
41 | cache = None | |
42 | index = [] |
|
42 | index = [] | |
43 | nodemap = {nullid: nullrev} |
|
43 | nodemap = {sha1nodeconstants.nullid: nullrev} | |
44 | n = off = 0 |
|
44 | n = off = 0 | |
45 |
|
45 | |||
46 | l = len(data) - s |
|
46 | l = len(data) - s | |
@@ -227,7 +227,7 b' class parseindex2tests(unittest.TestCase' | |||||
227 |
|
227 | |||
228 | ix = parsers.parse_index2(data_inlined, True)[0] |
|
228 | ix = parsers.parse_index2(data_inlined, True)[0] | |
229 | for i, r in enumerate(ix): |
|
229 | for i, r in enumerate(ix): | |
230 | if r[7] == nullid: |
|
230 | if r[7] == sha1nodeconstants.nullid: | |
231 | i = -1 |
|
231 | i = -1 | |
232 | try: |
|
232 | try: | |
233 | self.assertEqual( |
|
233 | self.assertEqual( | |
@@ -240,7 +240,7 b' class parseindex2tests(unittest.TestCase' | |||||
240 | break |
|
240 | break | |
241 |
|
241 | |||
242 | def testminusone(self): |
|
242 | def testminusone(self): | |
243 | want = (0, 0, 0, -1, -1, -1, -1, nullid) |
|
243 | want = (0, 0, 0, -1, -1, -1, -1, sha1nodeconstants.nullid) | |
244 | index, junk = parsers.parse_index2(data_inlined, True) |
|
244 | index, junk = parsers.parse_index2(data_inlined, True) | |
245 | got = index[-1] |
|
245 | got = index[-1] | |
246 | self.assertEqual(want, got) # inline data |
|
246 | self.assertEqual(want, got) # inline data |
@@ -16,7 +16,7 b' import silenttestrunner' | |||||
16 |
|
16 | |||
17 | # Load the local remotefilelog, not the system one |
|
17 | # Load the local remotefilelog, not the system one | |
18 | sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')] |
|
18 | sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')] | |
19 |
from mercurial.node import |
|
19 | from mercurial.node import sha1nodeconstants | |
20 | from mercurial import policy |
|
20 | from mercurial import policy | |
21 |
|
21 | |||
22 | if not policy._packageprefs.get(policy.policy, (False, False))[1]: |
|
22 | if not policy._packageprefs.get(policy.policy, (False, False))[1]: | |
@@ -63,7 +63,14 b' class datapacktestsbase(object):' | |||||
63 |
|
63 | |||
64 | def createPack(self, revisions=None, packdir=None): |
|
64 | def createPack(self, revisions=None, packdir=None): | |
65 | if revisions is None: |
|
65 | if revisions is None: | |
66 | revisions = [(b"filename", self.getFakeHash(), nullid, b"content")] |
|
66 | revisions = [ | |
|
67 | ( | |||
|
68 | b"filename", | |||
|
69 | self.getFakeHash(), | |||
|
70 | sha1nodeconstants.nullid, | |||
|
71 | b"content", | |||
|
72 | ) | |||
|
73 | ] | |||
67 |
|
74 | |||
68 | if packdir is None: |
|
75 | if packdir is None: | |
69 | packdir = self.makeTempDir() |
|
76 | packdir = self.makeTempDir() | |
@@ -86,7 +93,7 b' class datapacktestsbase(object):' | |||||
86 | filename = b"foo" |
|
93 | filename = b"foo" | |
87 | node = self.getHash(content) |
|
94 | node = self.getHash(content) | |
88 |
|
95 | |||
89 | revisions = [(filename, node, nullid, content)] |
|
96 | revisions = [(filename, node, sha1nodeconstants.nullid, content)] | |
90 | pack = self.createPack(revisions) |
|
97 | pack = self.createPack(revisions) | |
91 | if self.paramsavailable: |
|
98 | if self.paramsavailable: | |
92 | self.assertEqual( |
|
99 | self.assertEqual( | |
@@ -126,7 +133,7 b' class datapacktestsbase(object):' | |||||
126 | """Test putting multiple delta blobs into a pack and read the chain.""" |
|
133 | """Test putting multiple delta blobs into a pack and read the chain.""" | |
127 | revisions = [] |
|
134 | revisions = [] | |
128 | filename = b"foo" |
|
135 | filename = b"foo" | |
129 | lastnode = nullid |
|
136 | lastnode = sha1nodeconstants.nullid | |
130 | for i in range(10): |
|
137 | for i in range(10): | |
131 | content = b"abcdef%d" % i |
|
138 | content = b"abcdef%d" % i | |
132 | node = self.getHash(content) |
|
139 | node = self.getHash(content) | |
@@ -157,7 +164,7 b' class datapacktestsbase(object):' | |||||
157 | for j in range(random.randint(1, 100)): |
|
164 | for j in range(random.randint(1, 100)): | |
158 | content = b"content-%d" % j |
|
165 | content = b"content-%d" % j | |
159 | node = self.getHash(content) |
|
166 | node = self.getHash(content) | |
160 | lastnode = nullid |
|
167 | lastnode = sha1nodeconstants.nullid | |
161 | if len(filerevs) > 0: |
|
168 | if len(filerevs) > 0: | |
162 | lastnode = filerevs[random.randint(0, len(filerevs) - 1)] |
|
169 | lastnode = filerevs[random.randint(0, len(filerevs) - 1)] | |
163 | filerevs.append(node) |
|
170 | filerevs.append(node) | |
@@ -185,7 +192,9 b' class datapacktestsbase(object):' | |||||
185 | b'Z': b'random_string', |
|
192 | b'Z': b'random_string', | |
186 | b'_': b'\0' * i, |
|
193 | b'_': b'\0' * i, | |
187 | } |
|
194 | } | |
188 | revisions.append((filename, node, nullid, content, meta)) |
|
195 | revisions.append( | |
|
196 | (filename, node, sha1nodeconstants.nullid, content, meta) | |||
|
197 | ) | |||
189 | pack = self.createPack(revisions) |
|
198 | pack = self.createPack(revisions) | |
190 | for name, node, x, content, origmeta in revisions: |
|
199 | for name, node, x, content, origmeta in revisions: | |
191 | parsedmeta = pack.getmeta(name, node) |
|
200 | parsedmeta = pack.getmeta(name, node) | |
@@ -198,7 +207,7 b' class datapacktestsbase(object):' | |||||
198 | """Test the getmissing() api.""" |
|
207 | """Test the getmissing() api.""" | |
199 | revisions = [] |
|
208 | revisions = [] | |
200 | filename = b"foo" |
|
209 | filename = b"foo" | |
201 | lastnode = nullid |
|
210 | lastnode = sha1nodeconstants.nullid | |
202 | for i in range(10): |
|
211 | for i in range(10): | |
203 | content = b"abcdef%d" % i |
|
212 | content = b"abcdef%d" % i | |
204 | node = self.getHash(content) |
|
213 | node = self.getHash(content) | |
@@ -225,7 +234,7 b' class datapacktestsbase(object):' | |||||
225 | pack = self.createPack() |
|
234 | pack = self.createPack() | |
226 |
|
235 | |||
227 | try: |
|
236 | try: | |
228 | pack.add(b'filename', nullid, b'contents') |
|
237 | pack.add(b'filename', sha1nodeconstants.nullid, b'contents') | |
229 | self.assertTrue(False, "datapack.add should throw") |
|
238 | self.assertTrue(False, "datapack.add should throw") | |
230 | except RuntimeError: |
|
239 | except RuntimeError: | |
231 | pass |
|
240 | pass | |
@@ -264,7 +273,9 b' class datapacktestsbase(object):' | |||||
264 | content = filename |
|
273 | content = filename | |
265 | node = self.getHash(content) |
|
274 | node = self.getHash(content) | |
266 | blobs[(filename, node)] = content |
|
275 | blobs[(filename, node)] = content | |
267 |
revisions.append( |
|
276 | revisions.append( | |
|
277 | (filename, node, sha1nodeconstants.nullid, content) | |||
|
278 | ) | |||
268 |
|
279 | |||
269 | pack = self.createPack(revisions) |
|
280 | pack = self.createPack(revisions) | |
270 | if self.paramsavailable: |
|
281 | if self.paramsavailable: | |
@@ -288,7 +299,12 b' class datapacktestsbase(object):' | |||||
288 |
|
299 | |||
289 | for i in range(numpacks): |
|
300 | for i in range(numpacks): | |
290 | chain = [] |
|
301 | chain = [] | |
291 | revision = (b'%d' % i, self.getFakeHash(), nullid, b"content") |
|
302 | revision = ( | |
|
303 | b'%d' % i, | |||
|
304 | self.getFakeHash(), | |||
|
305 | sha1nodeconstants.nullid, | |||
|
306 | b"content", | |||
|
307 | ) | |||
292 |
|
308 | |||
293 | for _ in range(revisionsperpack): |
|
309 | for _ in range(revisionsperpack): | |
294 | chain.append(revision) |
|
310 | chain.append(revision) | |
@@ -346,7 +362,9 b' class datapacktestsbase(object):' | |||||
346 | filename = b"filename-%d" % i |
|
362 | filename = b"filename-%d" % i | |
347 | content = b"content-%d" % i |
|
363 | content = b"content-%d" % i | |
348 | node = self.getHash(content) |
|
364 | node = self.getHash(content) | |
349 |
revisions.append( |
|
365 | revisions.append( | |
|
366 | (filename, node, sha1nodeconstants.nullid, content) | |||
|
367 | ) | |||
350 |
|
368 | |||
351 | path = self.createPack(revisions).path |
|
369 | path = self.createPack(revisions).path | |
352 |
|
370 |
@@ -13,7 +13,7 b' import unittest' | |||||
13 |
|
13 | |||
14 | import silenttestrunner |
|
14 | import silenttestrunner | |
15 |
|
15 | |||
16 |
from mercurial.node import |
|
16 | from mercurial.node import sha1nodeconstants | |
17 | from mercurial import ( |
|
17 | from mercurial import ( | |
18 | pycompat, |
|
18 | pycompat, | |
19 | ui as uimod, |
|
19 | ui as uimod, | |
@@ -59,8 +59,8 b' class histpacktests(unittest.TestCase):' | |||||
59 | ( |
|
59 | ( | |
60 | b"filename", |
|
60 | b"filename", | |
61 | self.getFakeHash(), |
|
61 | self.getFakeHash(), | |
62 | nullid, |
|
62 | sha1nodeconstants.nullid, | |
63 | nullid, |
|
63 | sha1nodeconstants.nullid, | |
64 | self.getFakeHash(), |
|
64 | self.getFakeHash(), | |
65 | None, |
|
65 | None, | |
66 | ) |
|
66 | ) | |
@@ -119,10 +119,19 b' class histpacktests(unittest.TestCase):' | |||||
119 | """ |
|
119 | """ | |
120 | revisions = [] |
|
120 | revisions = [] | |
121 | filename = b"foo" |
|
121 | filename = b"foo" | |
122 | lastnode = nullid |
|
122 | lastnode = sha1nodeconstants.nullid | |
123 | for i in range(10): |
|
123 | for i in range(10): | |
124 | node = self.getFakeHash() |
|
124 | node = self.getFakeHash() | |
125 | revisions.append((filename, node, lastnode, nullid, nullid, None)) |
|
125 | revisions.append( | |
|
126 | ( | |||
|
127 | filename, | |||
|
128 | node, | |||
|
129 | lastnode, | |||
|
130 | sha1nodeconstants.nullid, | |||
|
131 | sha1nodeconstants.nullid, | |||
|
132 | None, | |||
|
133 | ) | |||
|
134 | ) | |||
126 | lastnode = node |
|
135 | lastnode = node | |
127 |
|
136 | |||
128 | # revisions must be added in topological order, newest first |
|
137 | # revisions must be added in topological order, newest first | |
@@ -148,17 +157,17 b' class histpacktests(unittest.TestCase):' | |||||
148 | for i in range(100): |
|
157 | for i in range(100): | |
149 | filename = b"filename-%d" % i |
|
158 | filename = b"filename-%d" % i | |
150 | entries = [] |
|
159 | entries = [] | |
151 | p2 = nullid |
|
160 | p2 = sha1nodeconstants.nullid | |
152 | linknode = nullid |
|
161 | linknode = sha1nodeconstants.nullid | |
153 | for j in range(random.randint(1, 100)): |
|
162 | for j in range(random.randint(1, 100)): | |
154 | node = self.getFakeHash() |
|
163 | node = self.getFakeHash() | |
155 | p1 = nullid |
|
164 | p1 = sha1nodeconstants.nullid | |
156 | if len(entries) > 0: |
|
165 | if len(entries) > 0: | |
157 | p1 = entries[random.randint(0, len(entries) - 1)] |
|
166 | p1 = entries[random.randint(0, len(entries) - 1)] | |
158 | entries.append(node) |
|
167 | entries.append(node) | |
159 | revisions.append((filename, node, p1, p2, linknode, None)) |
|
168 | revisions.append((filename, node, p1, p2, linknode, None)) | |
160 | allentries[(filename, node)] = (p1, p2, linknode) |
|
169 | allentries[(filename, node)] = (p1, p2, linknode) | |
161 | if p1 == nullid: |
|
170 | if p1 == sha1nodeconstants.nullid: | |
162 | ancestorcounts[(filename, node)] = 1 |
|
171 | ancestorcounts[(filename, node)] = 1 | |
163 | else: |
|
172 | else: | |
164 | newcount = ancestorcounts[(filename, p1)] + 1 |
|
173 | newcount = ancestorcounts[(filename, p1)] + 1 | |
@@ -182,10 +191,19 b' class histpacktests(unittest.TestCase):' | |||||
182 | def testGetNodeInfo(self): |
|
191 | def testGetNodeInfo(self): | |
183 | revisions = [] |
|
192 | revisions = [] | |
184 | filename = b"foo" |
|
193 | filename = b"foo" | |
185 | lastnode = nullid |
|
194 | lastnode = sha1nodeconstants.nullid | |
186 | for i in range(10): |
|
195 | for i in range(10): | |
187 | node = self.getFakeHash() |
|
196 | node = self.getFakeHash() | |
188 | revisions.append((filename, node, lastnode, nullid, nullid, None)) |
|
197 | revisions.append( | |
|
198 | ( | |||
|
199 | filename, | |||
|
200 | node, | |||
|
201 | lastnode, | |||
|
202 | sha1nodeconstants.nullid, | |||
|
203 | sha1nodeconstants.nullid, | |||
|
204 | None, | |||
|
205 | ) | |||
|
206 | ) | |||
189 | lastnode = node |
|
207 | lastnode = node | |
190 |
|
208 | |||
191 | pack = self.createPack(revisions) |
|
209 | pack = self.createPack(revisions) | |
@@ -233,7 +251,14 b' class histpacktests(unittest.TestCase):' | |||||
233 | pack = self.createPack() |
|
251 | pack = self.createPack() | |
234 |
|
252 | |||
235 | try: |
|
253 | try: | |
236 | pack.add(b'filename', nullid, nullid, nullid, nullid, None) |
|
254 | pack.add( | |
|
255 | b'filename', | |||
|
256 | sha1nodeconstants.nullid, | |||
|
257 | sha1nodeconstants.nullid, | |||
|
258 | sha1nodeconstants.nullid, | |||
|
259 | sha1nodeconstants.nullid, | |||
|
260 | None, | |||
|
261 | ) | |||
237 | self.assertTrue(False, "historypack.add should throw") |
|
262 | self.assertTrue(False, "historypack.add should throw") | |
238 | except RuntimeError: |
|
263 | except RuntimeError: | |
239 | pass |
|
264 | pass |
@@ -6,7 +6,6 b' import collections' | |||||
6 | import hashlib |
|
6 | import hashlib | |
7 | import sys |
|
7 | import sys | |
8 |
|
8 | |||
9 | from mercurial.node import nullid |
|
|||
10 | from mercurial import ( |
|
9 | from mercurial import ( | |
11 | encoding, |
|
10 | encoding, | |
12 | revlog, |
|
11 | revlog, | |
@@ -93,7 +92,7 b' def appendrev(rlog, text, tr, isext=Fals' | |||||
93 | """ |
|
92 | """ | |
94 | nextrev = len(rlog) |
|
93 | nextrev = len(rlog) | |
95 | p1 = rlog.node(nextrev - 1) |
|
94 | p1 = rlog.node(nextrev - 1) | |
96 | p2 = nullid |
|
95 | p2 = rlog.nullid | |
97 | if isext: |
|
96 | if isext: | |
98 | flags = revlog.REVIDX_EXTSTORED |
|
97 | flags = revlog.REVIDX_EXTSTORED | |
99 | else: |
|
98 | else: | |
@@ -127,7 +126,7 b" def addgroupcopy(rlog, tr, destname=b'_d" | |||||
127 | class dummychangegroup(object): |
|
126 | class dummychangegroup(object): | |
128 | @staticmethod |
|
127 | @staticmethod | |
129 | def deltachunk(pnode): |
|
128 | def deltachunk(pnode): | |
130 | pnode = pnode or nullid |
|
129 | pnode = pnode or rlog.nullid | |
131 | parentrev = rlog.rev(pnode) |
|
130 | parentrev = rlog.rev(pnode) | |
132 | r = parentrev + 1 |
|
131 | r = parentrev + 1 | |
133 | if r >= len(rlog): |
|
132 | if r >= len(rlog): | |
@@ -142,7 +141,7 b" def addgroupcopy(rlog, tr, destname=b'_d" | |||||
142 | return { |
|
141 | return { | |
143 | b'node': rlog.node(r), |
|
142 | b'node': rlog.node(r), | |
144 | b'p1': pnode, |
|
143 | b'p1': pnode, | |
145 | b'p2': nullid, |
|
144 | b'p2': rlog.nullid, | |
146 | b'cs': rlog.node(rlog.linkrev(r)), |
|
145 | b'cs': rlog.node(rlog.linkrev(r)), | |
147 | b'flags': rlog.flags(r), |
|
146 | b'flags': rlog.flags(r), | |
148 | b'deltabase': rlog.node(deltaparent), |
|
147 | b'deltabase': rlog.node(deltaparent), | |
@@ -183,7 +182,7 b" def lowlevelcopy(rlog, tr, destname=b'_d" | |||||
183 | dlog = newrevlog(destname, recreate=True) |
|
182 | dlog = newrevlog(destname, recreate=True) | |
184 | for r in rlog: |
|
183 | for r in rlog: | |
185 | p1 = rlog.node(r - 1) |
|
184 | p1 = rlog.node(r - 1) | |
186 | p2 = nullid |
|
185 | p2 = rlog.nullid | |
187 | if r == 0 or (rlog.flags(r) & revlog.REVIDX_EXTSTORED): |
|
186 | if r == 0 or (rlog.flags(r) & revlog.REVIDX_EXTSTORED): | |
188 | text = rlog.rawdata(r) |
|
187 | text = rlog.rawdata(r) | |
189 | cachedelta = None |
|
188 | cachedelta = None |
@@ -10,10 +10,7 b' from __future__ import absolute_import' | |||||
10 | import hashlib |
|
10 | import hashlib | |
11 | import struct |
|
11 | import struct | |
12 |
|
12 | |||
13 |
from mercurial.node import |
|
13 | from mercurial.node import nullrev | |
14 | nullid, |
|
|||
15 | nullrev, |
|
|||
16 | ) |
|
|||
17 | from mercurial import ( |
|
14 | from mercurial import ( | |
18 | extensions, |
|
15 | extensions, | |
19 | requirements, |
|
16 | requirements, | |
@@ -46,7 +43,7 b' def wrap_revisiondata(orig, self, nodeor' | |||||
46 | return text, sd |
|
43 | return text, sd | |
47 | if self.version & 0xFFFF != 2: |
|
44 | if self.version & 0xFFFF != 2: | |
48 | return text, sd |
|
45 | return text, sd | |
49 | if nodeorrev != nullrev and nodeorrev != nullid: |
|
46 | if nodeorrev != nullrev and nodeorrev != self.nullid: | |
50 | cat1 = sd.get(sidedata.SD_TEST1) |
|
47 | cat1 = sd.get(sidedata.SD_TEST1) | |
51 | if cat1 is not None and len(text) != struct.unpack('>I', cat1)[0]: |
|
48 | if cat1 is not None and len(text) != struct.unpack('>I', cat1)[0]: | |
52 | raise RuntimeError('text size mismatch') |
|
49 | raise RuntimeError('text size mismatch') |
General Comments 0
You need to be logged in to leave comments.
Login now