##// END OF EJS Templates
revlog: make sure the files are closed after an exception happens...
Benoit Boissinot -
r6261:7c8101b5 default
parent child Browse files
Show More
@@ -977,15 +977,18 b' class revlog(object):'
977
977
978 tr.add(self.datafile, dataoff)
978 tr.add(self.datafile, dataoff)
979 df = self.opener(self.datafile, 'w')
979 df = self.opener(self.datafile, 'w')
980 calc = self._io.size
980 try:
981 for r in xrange(self.count()):
981 calc = self._io.size
982 start = self.start(r) + (r + 1) * calc
982 for r in xrange(self.count()):
983 length = self.length(r)
983 start = self.start(r) + (r + 1) * calc
984 fp.seek(start)
984 length = self.length(r)
985 d = fp.read(length)
985 fp.seek(start)
986 df.write(d)
986 d = fp.read(length)
987 df.write(d)
988 finally:
989 df.close()
990
987 fp.close()
991 fp.close()
988 df.close()
989 fp = self.opener(self.indexfile, 'w', atomictemp=True)
992 fp = self.opener(self.indexfile, 'w', atomictemp=True)
990 self.version &= ~(REVLOGNGINLINEDATA)
993 self.version &= ~(REVLOGNGINLINEDATA)
991 self._inline = False
994 self._inline = False
@@ -993,6 +996,7 b' class revlog(object):'
993 e = self._io.packentry(self.index[i], self.node, self.version, i)
996 e = self._io.packentry(self.index[i], self.node, self.version, i)
994 fp.write(e)
997 fp.write(e)
995
998
999 fp.close()
996 # if we don't call rename, the temp file will never replace the
1000 # if we don't call rename, the temp file will never replace the
997 # real index
1001 # real index
998 fp.rename()
1002 fp.rename()
@@ -1013,7 +1017,12 b' class revlog(object):'
1013 if not self._inline:
1017 if not self._inline:
1014 dfh = self.opener(self.datafile, "a")
1018 dfh = self.opener(self.datafile, "a")
1015 ifh = self.opener(self.indexfile, "a+")
1019 ifh = self.opener(self.indexfile, "a+")
1016 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1020 try:
1021 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1022 finally:
1023 if dfh:
1024 dfh.close()
1025 ifh.close()
1017
1026
1018 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1027 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1019 node = hash(text, p1, p2)
1028 node = hash(text, p1, p2)
@@ -1154,86 +1163,91 b' class revlog(object):'
1154 transaction.add(self.datafile, end)
1163 transaction.add(self.datafile, end)
1155 dfh = self.opener(self.datafile, "a")
1164 dfh = self.opener(self.datafile, "a")
1156
1165
1157 # loop through our set of deltas
1166 try:
1158 chain = None
1167 # loop through our set of deltas
1159 for chunk in revs:
1168 chain = None
1160 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1169 for chunk in revs:
1161 link = linkmapper(cs)
1170 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1162 if node in self.nodemap:
1171 link = linkmapper(cs)
1163 # this can happen if two branches make the same change
1172 if node in self.nodemap:
1164 # if unique:
1173 # this can happen if two branches make the same change
1165 # raise RevlogError(_("already have %s") % hex(node[:4]))
1174 # if unique:
1166 chain = node
1175 # raise RevlogError(_("already have %s") % hex(node[:4]))
1167 continue
1176 chain = node
1168 delta = buffer(chunk, 80)
1177 continue
1169 del chunk
1178 delta = buffer(chunk, 80)
1179 del chunk
1170
1180
1171 for p in (p1, p2):
1181 for p in (p1, p2):
1172 if not p in self.nodemap:
1182 if not p in self.nodemap:
1173 raise LookupError(p, self.indexfile, _('unknown parent'))
1183 raise LookupError(p, self.indexfile, _('unknown parent'))
1174
1175 if not chain:
1176 # retrieve the parent revision of the delta chain
1177 chain = p1
1178 if not chain in self.nodemap:
1179 raise LookupError(chain, self.indexfile, _('unknown base'))
1180
1184
1181 # full versions are inserted when the needed deltas become
1185 if not chain:
1182 # comparable to the uncompressed text or when the previous
1186 # retrieve the parent revision of the delta chain
1183 # version is not the one we have a delta against. We use
1187 chain = p1
1184 # the size of the previous full rev as a proxy for the
1188 if not chain in self.nodemap:
1185 # current size.
1189 raise LookupError(chain, self.indexfile, _('unknown base'))
1186
1187 if chain == prev:
1188 cdelta = compress(delta)
1189 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1190 textlen = mdiff.patchedsize(textlen, delta)
1191
1190
1192 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1191 # full versions are inserted when the needed deltas become
1193 # flush our writes here so we can read it in revision
1192 # comparable to the uncompressed text or when the previous
1194 if dfh:
1193 # version is not the one we have a delta against. We use
1195 dfh.flush()
1194 # the size of the previous full rev as a proxy for the
1196 ifh.flush()
1195 # current size.
1197 text = self.revision(chain)
1196
1198 if len(text) == 0:
1197 if chain == prev:
1199 # skip over trivial delta header
1198 cdelta = compress(delta)
1200 text = buffer(delta, 12)
1199 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1201 else:
1200 textlen = mdiff.patchedsize(textlen, delta)
1202 text = mdiff.patches(text, [delta])
1201
1203 del delta
1202 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1204 chk = self._addrevision(text, transaction, link, p1, p2, None,
1203 # flush our writes here so we can read it in revision
1205 ifh, dfh)
1204 if dfh:
1206 if not dfh and not self._inline:
1205 dfh.flush()
1207 # addrevision switched from inline to conventional
1206 ifh.flush()
1208 # reopen the index
1207 text = self.revision(chain)
1209 dfh = self.opener(self.datafile, "a")
1208 if len(text) == 0:
1210 ifh = self.opener(self.indexfile, "a")
1209 # skip over trivial delta header
1211 if chk != node:
1210 text = buffer(delta, 12)
1212 raise RevlogError(_("consistency error adding group"))
1211 else:
1213 textlen = len(text)
1212 text = mdiff.patches(text, [delta])
1214 else:
1213 del delta
1215 e = (offset_type(end, 0), cdeltalen, textlen, base,
1214 chk = self._addrevision(text, transaction, link, p1, p2, None,
1216 link, self.rev(p1), self.rev(p2), node)
1215 ifh, dfh)
1217 self.index.insert(-1, e)
1216 if not dfh and not self._inline:
1218 self.nodemap[node] = r
1217 # addrevision switched from inline to conventional
1219 entry = self._io.packentry(e, self.node, self.version, r)
1218 # reopen the index
1220 if self._inline:
1221 ifh.write(entry)
1222 ifh.write(cdelta[0])
1223 ifh.write(cdelta[1])
1224 self.checkinlinesize(transaction, ifh)
1225 if not self._inline:
1226 dfh = self.opener(self.datafile, "a")
1219 dfh = self.opener(self.datafile, "a")
1227 ifh = self.opener(self.indexfile, "a")
1220 ifh = self.opener(self.indexfile, "a")
1221 if chk != node:
1222 raise RevlogError(_("consistency error adding group"))
1223 textlen = len(text)
1228 else:
1224 else:
1229 dfh.write(cdelta[0])
1225 e = (offset_type(end, 0), cdeltalen, textlen, base,
1230 dfh.write(cdelta[1])
1226 link, self.rev(p1), self.rev(p2), node)
1231 ifh.write(entry)
1227 self.index.insert(-1, e)
1228 self.nodemap[node] = r
1229 entry = self._io.packentry(e, self.node, self.version, r)
1230 if self._inline:
1231 ifh.write(entry)
1232 ifh.write(cdelta[0])
1233 ifh.write(cdelta[1])
1234 self.checkinlinesize(transaction, ifh)
1235 if not self._inline:
1236 dfh = self.opener(self.datafile, "a")
1237 ifh = self.opener(self.indexfile, "a")
1238 else:
1239 dfh.write(cdelta[0])
1240 dfh.write(cdelta[1])
1241 ifh.write(entry)
1232
1242
1233 t, r, chain, prev = r, r + 1, node, node
1243 t, r, chain, prev = r, r + 1, node, node
1234 base = self.base(t)
1244 base = self.base(t)
1235 start = self.start(base)
1245 start = self.start(base)
1236 end = self.end(t)
1246 end = self.end(t)
1247 finally:
1248 if dfh:
1249 dfh.close()
1250 ifh.close()
1237
1251
1238 return node
1252 return node
1239
1253
General Comments 0
You need to be logged in to leave comments. Login now