##// END OF EJS Templates
Abstract manifest block parsing.
Brendan Cully -
r3196:f3b93944 default
parent child Browse files
Show More
@@ -1,199 +1,209 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "array bisect struct")
11 demandload(globals(), "array bisect struct")
12 demandload(globals(), "mdiff")
12
13
13 class manifestdict(dict):
14 class manifestdict(dict):
14 def __init__(self, mapping=None, flags=None):
15 def __init__(self, mapping=None, flags=None):
15 if mapping is None: mapping = {}
16 if mapping is None: mapping = {}
16 if flags is None: flags = {}
17 if flags is None: flags = {}
17 dict.__init__(self, mapping)
18 dict.__init__(self, mapping)
18 self._flags = flags
19 self._flags = flags
19 def flags(self, f):
20 def flags(self, f):
20 return self._flags.get(f, "")
21 return self._flags.get(f, "")
21 def execf(self, f):
22 def execf(self, f):
22 "test for executable in manifest flags"
23 "test for executable in manifest flags"
23 return "x" in self.flags(f)
24 return "x" in self.flags(f)
24 def linkf(self, f):
25 def linkf(self, f):
25 "test for symlink in manifest flags"
26 "test for symlink in manifest flags"
26 return "l" in self.flags(f)
27 return "l" in self.flags(f)
27 def rawset(self, f, entry):
28 def rawset(self, f, entry):
28 self[f] = bin(entry[:40])
29 self[f] = bin(entry[:40])
29 fl = entry[40:-1]
30 fl = entry[40:-1]
30 if fl: self._flags[f] = fl
31 if fl: self._flags[f] = fl
31 def set(self, f, execf=False, linkf=False):
32 def set(self, f, execf=False, linkf=False):
32 if linkf: self._flags[f] = "l"
33 if linkf: self._flags[f] = "l"
33 elif execf: self._flags[f] = "x"
34 elif execf: self._flags[f] = "x"
34 else: self._flags[f] = ""
35 else: self._flags[f] = ""
35 def copy(self):
36 def copy(self):
36 return manifestdict(dict.copy(self), dict.copy(self._flags))
37 return manifestdict(dict.copy(self), dict.copy(self._flags))
37
38
38 class manifest(revlog):
39 class manifest(revlog):
39 def __init__(self, opener, defversion=REVLOGV0):
40 def __init__(self, opener, defversion=REVLOGV0):
40 self.mapcache = None
41 self.mapcache = None
41 self.listcache = None
42 self.listcache = None
42 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
43 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
43 defversion)
44 defversion)
44
45
46 def parselines(self, lines):
47 for l in lines.splitlines(1):
48 yield l.split('\0')
49
50 def readdelta(self, node):
51 delta = mdiff.patchtext(self.delta(node))
52 deltamap = manifestdict()
53 for f, n in self.parselines(delta):
54 deltamap.rawset(f, n)
55 return deltamap
56
45 def read(self, node):
57 def read(self, node):
46 if node == nullid: return manifestdict() # don't upset local cache
58 if node == nullid: return manifestdict() # don't upset local cache
47 if self.mapcache and self.mapcache[0] == node:
59 if self.mapcache and self.mapcache[0] == node:
48 return self.mapcache[1]
60 return self.mapcache[1]
49 text = self.revision(node)
61 text = self.revision(node)
50 self.listcache = array.array('c', text)
62 self.listcache = array.array('c', text)
51 lines = text.splitlines(1)
52 mapping = manifestdict()
63 mapping = manifestdict()
53 for l in lines:
64 for f, n in self.parselines(text):
54 (f, n) = l.split('\0')
55 mapping.rawset(f, n)
65 mapping.rawset(f, n)
56 self.mapcache = (node, mapping)
66 self.mapcache = (node, mapping)
57 return mapping
67 return mapping
58
68
59 def _search(self, m, s, lo=0, hi=None):
69 def _search(self, m, s, lo=0, hi=None):
60 '''return a tuple (start, end) that says where to find s within m.
70 '''return a tuple (start, end) that says where to find s within m.
61
71
62 If the string is found m[start:end] are the line containing
72 If the string is found m[start:end] are the line containing
63 that string. If start == end the string was not found and
73 that string. If start == end the string was not found and
64 they indicate the proper sorted insertion point. This was
74 they indicate the proper sorted insertion point. This was
65 taken from bisect_left, and modified to find line start/end as
75 taken from bisect_left, and modified to find line start/end as
66 it goes along.
76 it goes along.
67
77
68 m should be a buffer or a string
78 m should be a buffer or a string
69 s is a string'''
79 s is a string'''
70 def advance(i, c):
80 def advance(i, c):
71 while i < lenm and m[i] != c:
81 while i < lenm and m[i] != c:
72 i += 1
82 i += 1
73 return i
83 return i
74 lenm = len(m)
84 lenm = len(m)
75 if not hi:
85 if not hi:
76 hi = lenm
86 hi = lenm
77 while lo < hi:
87 while lo < hi:
78 mid = (lo + hi) // 2
88 mid = (lo + hi) // 2
79 start = mid
89 start = mid
80 while start > 0 and m[start-1] != '\n':
90 while start > 0 and m[start-1] != '\n':
81 start -= 1
91 start -= 1
82 end = advance(start, '\0')
92 end = advance(start, '\0')
83 if m[start:end] < s:
93 if m[start:end] < s:
84 # we know that after the null there are 40 bytes of sha1
94 # we know that after the null there are 40 bytes of sha1
85 # this translates to the bisect lo = mid + 1
95 # this translates to the bisect lo = mid + 1
86 lo = advance(end + 40, '\n') + 1
96 lo = advance(end + 40, '\n') + 1
87 else:
97 else:
88 # this translates to the bisect hi = mid
98 # this translates to the bisect hi = mid
89 hi = start
99 hi = start
90 end = advance(lo, '\0')
100 end = advance(lo, '\0')
91 found = m[lo:end]
101 found = m[lo:end]
92 if cmp(s, found) == 0:
102 if cmp(s, found) == 0:
93 # we know that after the null there are 40 bytes of sha1
103 # we know that after the null there are 40 bytes of sha1
94 end = advance(end + 40, '\n')
104 end = advance(end + 40, '\n')
95 return (lo, end+1)
105 return (lo, end+1)
96 else:
106 else:
97 return (lo, lo)
107 return (lo, lo)
98
108
99 def find(self, node, f):
109 def find(self, node, f):
100 '''look up entry for a single file efficiently.
110 '''look up entry for a single file efficiently.
101 return (node, flag) pair if found, (None, None) if not.'''
111 return (node, flag) pair if found, (None, None) if not.'''
102 if self.mapcache and node == self.mapcache[0]:
112 if self.mapcache and node == self.mapcache[0]:
103 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
113 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
104 text = self.revision(node)
114 text = self.revision(node)
105 start, end = self._search(text, f)
115 start, end = self._search(text, f)
106 if start == end:
116 if start == end:
107 return None, None
117 return None, None
108 l = text[start:end]
118 l = text[start:end]
109 f, n = l.split('\0')
119 f, n = l.split('\0')
110 return bin(n[:40]), n[40:-1] == 'x'
120 return bin(n[:40]), n[40:-1] == 'x'
111
121
112 def add(self, map, transaction, link, p1=None, p2=None,
122 def add(self, map, transaction, link, p1=None, p2=None,
113 changed=None):
123 changed=None):
114 # apply the changes collected during the bisect loop to our addlist
124 # apply the changes collected during the bisect loop to our addlist
115 # return a delta suitable for addrevision
125 # return a delta suitable for addrevision
116 def addlistdelta(addlist, x):
126 def addlistdelta(addlist, x):
117 # start from the bottom up
127 # start from the bottom up
118 # so changes to the offsets don't mess things up.
128 # so changes to the offsets don't mess things up.
119 i = len(x)
129 i = len(x)
120 while i > 0:
130 while i > 0:
121 i -= 1
131 i -= 1
122 start = x[i][0]
132 start = x[i][0]
123 end = x[i][1]
133 end = x[i][1]
124 if x[i][2]:
134 if x[i][2]:
125 addlist[start:end] = array.array('c', x[i][2])
135 addlist[start:end] = array.array('c', x[i][2])
126 else:
136 else:
127 del addlist[start:end]
137 del addlist[start:end]
128 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
138 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
129 for d in x ])
139 for d in x ])
130
140
131 # if we're using the listcache, make sure it is valid and
141 # if we're using the listcache, make sure it is valid and
132 # parented by the same node we're diffing against
142 # parented by the same node we're diffing against
133 if not changed or not self.listcache or not p1 or \
143 if not changed or not self.listcache or not p1 or \
134 self.mapcache[0] != p1:
144 self.mapcache[0] != p1:
135 files = map.keys()
145 files = map.keys()
136 files.sort()
146 files.sort()
137
147
138 # if this is changed to support newlines in filenames,
148 # if this is changed to support newlines in filenames,
139 # be sure to check the templates/ dir again (especially *-raw.tmpl)
149 # be sure to check the templates/ dir again (especially *-raw.tmpl)
140 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f)) for f in files]
150 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f)) for f in files]
141 self.listcache = array.array('c', "".join(text))
151 self.listcache = array.array('c', "".join(text))
142 cachedelta = None
152 cachedelta = None
143 else:
153 else:
144 addlist = self.listcache
154 addlist = self.listcache
145
155
146 # combine the changed lists into one list for sorting
156 # combine the changed lists into one list for sorting
147 work = [[x, 0] for x in changed[0]]
157 work = [[x, 0] for x in changed[0]]
148 work[len(work):] = [[x, 1] for x in changed[1]]
158 work[len(work):] = [[x, 1] for x in changed[1]]
149 work.sort()
159 work.sort()
150
160
151 delta = []
161 delta = []
152 dstart = None
162 dstart = None
153 dend = None
163 dend = None
154 dline = [""]
164 dline = [""]
155 start = 0
165 start = 0
156 # zero copy representation of addlist as a buffer
166 # zero copy representation of addlist as a buffer
157 addbuf = buffer(addlist)
167 addbuf = buffer(addlist)
158
168
159 # start with a readonly loop that finds the offset of
169 # start with a readonly loop that finds the offset of
160 # each line and creates the deltas
170 # each line and creates the deltas
161 for w in work:
171 for w in work:
162 f = w[0]
172 f = w[0]
163 # bs will either be the index of the item or the insert point
173 # bs will either be the index of the item or the insert point
164 start, end = self._search(addbuf, f, start)
174 start, end = self._search(addbuf, f, start)
165 if w[1] == 0:
175 if w[1] == 0:
166 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
176 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
167 else:
177 else:
168 l = ""
178 l = ""
169 if start == end and w[1] == 1:
179 if start == end and w[1] == 1:
170 # item we want to delete was not found, error out
180 # item we want to delete was not found, error out
171 raise AssertionError(
181 raise AssertionError(
172 _("failed to remove %s from manifest") % f)
182 _("failed to remove %s from manifest") % f)
173 if dstart != None and dstart <= start and dend >= start:
183 if dstart != None and dstart <= start and dend >= start:
174 if dend < end:
184 if dend < end:
175 dend = end
185 dend = end
176 if l:
186 if l:
177 dline.append(l)
187 dline.append(l)
178 else:
188 else:
179 if dstart != None:
189 if dstart != None:
180 delta.append([dstart, dend, "".join(dline)])
190 delta.append([dstart, dend, "".join(dline)])
181 dstart = start
191 dstart = start
182 dend = end
192 dend = end
183 dline = [l]
193 dline = [l]
184
194
185 if dstart != None:
195 if dstart != None:
186 delta.append([dstart, dend, "".join(dline)])
196 delta.append([dstart, dend, "".join(dline)])
187 # apply the delta to the addlist, and get a delta for addrevision
197 # apply the delta to the addlist, and get a delta for addrevision
188 cachedelta = addlistdelta(addlist, delta)
198 cachedelta = addlistdelta(addlist, delta)
189
199
190 # the delta is only valid if we've been processing the tip revision
200 # the delta is only valid if we've been processing the tip revision
191 if self.mapcache[0] != self.tip():
201 if self.mapcache[0] != self.tip():
192 cachedelta = None
202 cachedelta = None
193 self.listcache = addlist
203 self.listcache = addlist
194
204
195 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
205 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
196 p2, cachedelta)
206 p2, cachedelta)
197 self.mapcache = (n, map)
207 self.mapcache = (n, map)
198
208
199 return n
209 return n
@@ -1,200 +1,194 b''
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 import revlog, mdiff
10 import revlog, mdiff
11
11
12 def verify(repo):
12 def verify(repo):
13 filelinkrevs = {}
13 filelinkrevs = {}
14 filenodes = {}
14 filenodes = {}
15 changesets = revisions = files = 0
15 changesets = revisions = files = 0
16 errors = [0]
16 errors = [0]
17 warnings = [0]
17 warnings = [0]
18 neededmanifests = {}
18 neededmanifests = {}
19
19
20 def err(msg):
20 def err(msg):
21 repo.ui.warn(msg + "\n")
21 repo.ui.warn(msg + "\n")
22 errors[0] += 1
22 errors[0] += 1
23
23
24 def warn(msg):
24 def warn(msg):
25 repo.ui.warn(msg + "\n")
25 repo.ui.warn(msg + "\n")
26 warnings[0] += 1
26 warnings[0] += 1
27
27
28 def checksize(obj, name):
28 def checksize(obj, name):
29 d = obj.checksize()
29 d = obj.checksize()
30 if d[0]:
30 if d[0]:
31 err(_("%s data length off by %d bytes") % (name, d[0]))
31 err(_("%s data length off by %d bytes") % (name, d[0]))
32 if d[1]:
32 if d[1]:
33 err(_("%s index contains %d extra bytes") % (name, d[1]))
33 err(_("%s index contains %d extra bytes") % (name, d[1]))
34
34
35 def checkversion(obj, name):
35 def checkversion(obj, name):
36 if obj.version != revlog.REVLOGV0:
36 if obj.version != revlog.REVLOGV0:
37 if not revlogv1:
37 if not revlogv1:
38 warn(_("warning: `%s' uses revlog format 1") % name)
38 warn(_("warning: `%s' uses revlog format 1") % name)
39 elif revlogv1:
39 elif revlogv1:
40 warn(_("warning: `%s' uses revlog format 0") % name)
40 warn(_("warning: `%s' uses revlog format 0") % name)
41
41
42 revlogv1 = repo.revlogversion != revlog.REVLOGV0
42 revlogv1 = repo.revlogversion != revlog.REVLOGV0
43 if repo.ui.verbose or revlogv1 != repo.revlogv1:
43 if repo.ui.verbose or revlogv1 != repo.revlogv1:
44 repo.ui.status(_("repository uses revlog format %d\n") %
44 repo.ui.status(_("repository uses revlog format %d\n") %
45 (revlogv1 and 1 or 0))
45 (revlogv1 and 1 or 0))
46
46
47 seen = {}
47 seen = {}
48 repo.ui.status(_("checking changesets\n"))
48 repo.ui.status(_("checking changesets\n"))
49 checksize(repo.changelog, "changelog")
49 checksize(repo.changelog, "changelog")
50
50
51 for i in range(repo.changelog.count()):
51 for i in range(repo.changelog.count()):
52 changesets += 1
52 changesets += 1
53 n = repo.changelog.node(i)
53 n = repo.changelog.node(i)
54 l = repo.changelog.linkrev(n)
54 l = repo.changelog.linkrev(n)
55 if l != i:
55 if l != i:
56 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
56 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
57 if n in seen:
57 if n in seen:
58 err(_("duplicate changeset at revision %d") % i)
58 err(_("duplicate changeset at revision %d") % i)
59 seen[n] = 1
59 seen[n] = 1
60
60
61 for p in repo.changelog.parents(n):
61 for p in repo.changelog.parents(n):
62 if p not in repo.changelog.nodemap:
62 if p not in repo.changelog.nodemap:
63 err(_("changeset %s has unknown parent %s") %
63 err(_("changeset %s has unknown parent %s") %
64 (short(n), short(p)))
64 (short(n), short(p)))
65 try:
65 try:
66 changes = repo.changelog.read(n)
66 changes = repo.changelog.read(n)
67 except KeyboardInterrupt:
67 except KeyboardInterrupt:
68 repo.ui.warn(_("interrupted"))
68 repo.ui.warn(_("interrupted"))
69 raise
69 raise
70 except Exception, inst:
70 except Exception, inst:
71 err(_("unpacking changeset %s: %s") % (short(n), inst))
71 err(_("unpacking changeset %s: %s") % (short(n), inst))
72 continue
72 continue
73
73
74 neededmanifests[changes[0]] = n
74 neededmanifests[changes[0]] = n
75
75
76 for f in changes[3]:
76 for f in changes[3]:
77 filelinkrevs.setdefault(f, []).append(i)
77 filelinkrevs.setdefault(f, []).append(i)
78
78
79 seen = {}
79 seen = {}
80 repo.ui.status(_("checking manifests\n"))
80 repo.ui.status(_("checking manifests\n"))
81 checkversion(repo.manifest, "manifest")
81 checkversion(repo.manifest, "manifest")
82 checksize(repo.manifest, "manifest")
82 checksize(repo.manifest, "manifest")
83
83
84 for i in range(repo.manifest.count()):
84 for i in range(repo.manifest.count()):
85 n = repo.manifest.node(i)
85 n = repo.manifest.node(i)
86 l = repo.manifest.linkrev(n)
86 l = repo.manifest.linkrev(n)
87
87
88 if l < 0 or l >= repo.changelog.count():
88 if l < 0 or l >= repo.changelog.count():
89 err(_("bad manifest link (%d) at revision %d") % (l, i))
89 err(_("bad manifest link (%d) at revision %d") % (l, i))
90
90
91 if n in neededmanifests:
91 if n in neededmanifests:
92 del neededmanifests[n]
92 del neededmanifests[n]
93
93
94 if n in seen:
94 if n in seen:
95 err(_("duplicate manifest at revision %d") % i)
95 err(_("duplicate manifest at revision %d") % i)
96
96
97 seen[n] = 1
97 seen[n] = 1
98
98
99 for p in repo.manifest.parents(n):
99 for p in repo.manifest.parents(n):
100 if p not in repo.manifest.nodemap:
100 if p not in repo.manifest.nodemap:
101 err(_("manifest %s has unknown parent %s") %
101 err(_("manifest %s has unknown parent %s") %
102 (short(n), short(p)))
102 (short(n), short(p)))
103
103
104 try:
104 try:
105 delta = mdiff.patchtext(repo.manifest.delta(n))
105 for f, fn in repo.manifest.readdelta(n).iteritems():
106 filenodes.setdefault(f, {})[fn] = 1
106 except KeyboardInterrupt:
107 except KeyboardInterrupt:
107 repo.ui.warn(_("interrupted"))
108 repo.ui.warn(_("interrupted"))
108 raise
109 raise
109 except Exception, inst:
110 except Exception, inst:
110 err(_("unpacking manifest %s: %s") % (short(n), inst))
111 err(_("reading delta for manifest %s: %s") % (short(n), inst))
111 continue
112 continue
112
113
113 try:
114 ff = [ l.split('\0') for l in delta.splitlines() ]
115 for f, fn in ff:
116 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
117 except (ValueError, TypeError), inst:
118 err(_("broken delta in manifest %s: %s") % (short(n), inst))
119
120 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
114 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
121
115
122 for m, c in neededmanifests.items():
116 for m, c in neededmanifests.items():
123 err(_("Changeset %s refers to unknown manifest %s") %
117 err(_("Changeset %s refers to unknown manifest %s") %
124 (short(m), short(c)))
118 (short(m), short(c)))
125 del neededmanifests
119 del neededmanifests
126
120
127 for f in filenodes:
121 for f in filenodes:
128 if f not in filelinkrevs:
122 if f not in filelinkrevs:
129 err(_("file %s in manifest but not in changesets") % f)
123 err(_("file %s in manifest but not in changesets") % f)
130
124
131 for f in filelinkrevs:
125 for f in filelinkrevs:
132 if f not in filenodes:
126 if f not in filenodes:
133 err(_("file %s in changeset but not in manifest") % f)
127 err(_("file %s in changeset but not in manifest") % f)
134
128
135 repo.ui.status(_("checking files\n"))
129 repo.ui.status(_("checking files\n"))
136 ff = filenodes.keys()
130 ff = filenodes.keys()
137 ff.sort()
131 ff.sort()
138 for f in ff:
132 for f in ff:
139 if f == "/dev/null":
133 if f == "/dev/null":
140 continue
134 continue
141 files += 1
135 files += 1
142 if not f:
136 if not f:
143 err(_("file without name in manifest %s") % short(n))
137 err(_("file without name in manifest %s") % short(n))
144 continue
138 continue
145 fl = repo.file(f)
139 fl = repo.file(f)
146 checkversion(fl, f)
140 checkversion(fl, f)
147 checksize(fl, f)
141 checksize(fl, f)
148
142
149 nodes = {nullid: 1}
143 nodes = {nullid: 1}
150 seen = {}
144 seen = {}
151 for i in range(fl.count()):
145 for i in range(fl.count()):
152 revisions += 1
146 revisions += 1
153 n = fl.node(i)
147 n = fl.node(i)
154
148
155 if n in seen:
149 if n in seen:
156 err(_("%s: duplicate revision %d") % (f, i))
150 err(_("%s: duplicate revision %d") % (f, i))
157 if n not in filenodes[f]:
151 if n not in filenodes[f]:
158 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
152 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
159 else:
153 else:
160 del filenodes[f][n]
154 del filenodes[f][n]
161
155
162 flr = fl.linkrev(n)
156 flr = fl.linkrev(n)
163 if flr not in filelinkrevs.get(f, []):
157 if flr not in filelinkrevs.get(f, []):
164 err(_("%s:%s points to unexpected changeset %d")
158 err(_("%s:%s points to unexpected changeset %d")
165 % (f, short(n), flr))
159 % (f, short(n), flr))
166 else:
160 else:
167 filelinkrevs[f].remove(flr)
161 filelinkrevs[f].remove(flr)
168
162
169 # verify contents
163 # verify contents
170 try:
164 try:
171 t = fl.read(n)
165 t = fl.read(n)
172 except KeyboardInterrupt:
166 except KeyboardInterrupt:
173 repo.ui.warn(_("interrupted"))
167 repo.ui.warn(_("interrupted"))
174 raise
168 raise
175 except Exception, inst:
169 except Exception, inst:
176 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
170 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
177
171
178 # verify parents
172 # verify parents
179 (p1, p2) = fl.parents(n)
173 (p1, p2) = fl.parents(n)
180 if p1 not in nodes:
174 if p1 not in nodes:
181 err(_("file %s:%s unknown parent 1 %s") %
175 err(_("file %s:%s unknown parent 1 %s") %
182 (f, short(n), short(p1)))
176 (f, short(n), short(p1)))
183 if p2 not in nodes:
177 if p2 not in nodes:
184 err(_("file %s:%s unknown parent 2 %s") %
178 err(_("file %s:%s unknown parent 2 %s") %
185 (f, short(n), short(p1)))
179 (f, short(n), short(p1)))
186 nodes[n] = 1
180 nodes[n] = 1
187
181
188 # cross-check
182 # cross-check
189 for node in filenodes[f]:
183 for node in filenodes[f]:
190 err(_("node %s in manifests not in %s") % (hex(node), f))
184 err(_("node %s in manifests not in %s") % (hex(node), f))
191
185
192 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
186 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
193 (files, changesets, revisions))
187 (files, changesets, revisions))
194
188
195 if warnings[0]:
189 if warnings[0]:
196 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
190 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
197 if errors[0]:
191 if errors[0]:
198 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
192 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
199 return 1
193 return 1
200
194
General Comments 0
You need to be logged in to leave comments. Login now