##// END OF EJS Templates
sidedatacopies: only read from copies when in this mode...
marmoute -
r43504:e51f5d06 default
parent child Browse files
Show More
@@ -1,712 +1,727
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from .node import (
12 12 bin,
13 13 hex,
14 14 nullid,
15 15 )
16 16 from .thirdparty import attr
17 17
18 18 from . import (
19 19 copies,
20 20 encoding,
21 21 error,
22 22 pycompat,
23 23 revlog,
24 24 util,
25 25 )
26 26 from .utils import (
27 27 dateutil,
28 28 stringutil,
29 29 )
30 30
31 31 from .revlogutils import sidedata as sidedatamod
32 32
33 33 _defaultextra = {b'branch': b'default'}
34 34
35 35
36 36 def _string_escape(text):
37 37 """
38 38 >>> from .pycompat import bytechr as chr
39 39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 41 >>> s
42 42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 43 >>> res = _string_escape(s)
44 44 >>> s == _string_unescape(res)
45 45 True
46 46 """
47 47 # subset of the string_escape codec
48 48 text = (
49 49 text.replace(b'\\', b'\\\\')
50 50 .replace(b'\n', b'\\n')
51 51 .replace(b'\r', b'\\r')
52 52 )
53 53 return text.replace(b'\0', b'\\0')
54 54
55 55
56 56 def _string_unescape(text):
57 57 if b'\\0' in text:
58 58 # fix up \0 without getting into trouble with \\0
59 59 text = text.replace(b'\\\\', b'\\\\\n')
60 60 text = text.replace(b'\\0', b'\0')
61 61 text = text.replace(b'\n', b'')
62 62 return stringutil.unescapestr(text)
63 63
64 64
65 65 def decodeextra(text):
66 66 """
67 67 >>> from .pycompat import bytechr as chr
68 68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 69 ... ).items())
70 70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 72 ... b'baz': chr(92) + chr(0) + b'2'})
73 73 ... ).items())
74 74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 75 """
76 76 extra = _defaultextra.copy()
77 77 for l in text.split(b'\0'):
78 78 if l:
79 79 k, v = _string_unescape(l).split(b':', 1)
80 80 extra[k] = v
81 81 return extra
82 82
83 83
84 84 def encodeextra(d):
85 85 # keys must be sorted to produce a deterministic changelog entry
86 86 items = [
87 87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 88 for k in sorted(d)
89 89 ]
90 90 return b"\0".join(items)
91 91
92 92
93 93 def stripdesc(desc):
94 94 """strip trailing whitespace and leading and trailing empty lines"""
95 95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96 96
97 97
98 98 class appender(object):
99 99 '''the changelog index must be updated last on disk, so we use this class
100 100 to delay writes to it'''
101 101
102 102 def __init__(self, vfs, name, mode, buf):
103 103 self.data = buf
104 104 fp = vfs(name, mode)
105 105 self.fp = fp
106 106 self.offset = fp.tell()
107 107 self.size = vfs.fstat(fp).st_size
108 108 self._end = self.size
109 109
110 110 def end(self):
111 111 return self._end
112 112
113 113 def tell(self):
114 114 return self.offset
115 115
116 116 def flush(self):
117 117 pass
118 118
119 119 @property
120 120 def closed(self):
121 121 return self.fp.closed
122 122
123 123 def close(self):
124 124 self.fp.close()
125 125
126 126 def seek(self, offset, whence=0):
127 127 '''virtual file offset spans real file and data'''
128 128 if whence == 0:
129 129 self.offset = offset
130 130 elif whence == 1:
131 131 self.offset += offset
132 132 elif whence == 2:
133 133 self.offset = self.end() + offset
134 134 if self.offset < self.size:
135 135 self.fp.seek(self.offset)
136 136
137 137 def read(self, count=-1):
138 138 '''only trick here is reads that span real file and data'''
139 139 ret = b""
140 140 if self.offset < self.size:
141 141 s = self.fp.read(count)
142 142 ret = s
143 143 self.offset += len(s)
144 144 if count > 0:
145 145 count -= len(s)
146 146 if count != 0:
147 147 doff = self.offset - self.size
148 148 self.data.insert(0, b"".join(self.data))
149 149 del self.data[1:]
150 150 s = self.data[0][doff : doff + count]
151 151 self.offset += len(s)
152 152 ret += s
153 153 return ret
154 154
155 155 def write(self, s):
156 156 self.data.append(bytes(s))
157 157 self.offset += len(s)
158 158 self._end += len(s)
159 159
160 160 def __enter__(self):
161 161 self.fp.__enter__()
162 162 return self
163 163
164 164 def __exit__(self, *args):
165 165 return self.fp.__exit__(*args)
166 166
167 167
168 168 def _divertopener(opener, target):
169 169 """build an opener that writes in 'target.a' instead of 'target'"""
170 170
171 171 def _divert(name, mode=b'r', checkambig=False):
172 172 if name != target:
173 173 return opener(name, mode)
174 174 return opener(name + b".a", mode)
175 175
176 176 return _divert
177 177
178 178
179 179 def _delayopener(opener, target, buf):
180 180 """build an opener that stores chunks in 'buf' instead of 'target'"""
181 181
182 182 def _delay(name, mode=b'r', checkambig=False):
183 183 if name != target:
184 184 return opener(name, mode)
185 185 return appender(opener, name, mode, buf)
186 186
187 187 return _delay
188 188
189 189
190 190 @attr.s
191 191 class _changelogrevision(object):
192 192 # Extensions might modify _defaultextra, so let the constructor below pass
193 193 # it in
194 194 extra = attr.ib()
195 195 manifest = attr.ib(default=nullid)
196 196 user = attr.ib(default=b'')
197 197 date = attr.ib(default=(0, 0))
198 198 files = attr.ib(default=attr.Factory(list))
199 199 filesadded = attr.ib(default=None)
200 200 filesremoved = attr.ib(default=None)
201 201 p1copies = attr.ib(default=None)
202 202 p2copies = attr.ib(default=None)
203 203 description = attr.ib(default=b'')
204 204
205 205
206 206 class changelogrevision(object):
207 207 """Holds results of a parsed changelog revision.
208 208
209 209 Changelog revisions consist of multiple pieces of data, including
210 210 the manifest node, user, and date. This object exposes a view into
211 211 the parsed object.
212 212 """
213 213
214 214 __slots__ = (
215 215 r'_offsets',
216 216 r'_text',
217 217 r'_sidedata',
218 r'_cpsd',
218 219 )
219 220
220 def __new__(cls, text, sidedata):
221 def __new__(cls, text, sidedata, cpsd):
221 222 if not text:
222 223 return _changelogrevision(extra=_defaultextra)
223 224
224 225 self = super(changelogrevision, cls).__new__(cls)
225 226 # We could return here and implement the following as an __init__.
226 227 # But doing it here is equivalent and saves an extra function call.
227 228
228 229 # format used:
229 230 # nodeid\n : manifest node in ascii
230 231 # user\n : user, no \n or \r allowed
231 232 # time tz extra\n : date (time is int or float, timezone is int)
232 233 # : extra is metadata, encoded and separated by '\0'
233 234 # : older versions ignore it
234 235 # files\n\n : files modified by the cset, no \n or \r allowed
235 236 # (.*) : comment (free text, ideally utf-8)
236 237 #
237 238 # changelog v0 doesn't use extra
238 239
239 240 nl1 = text.index(b'\n')
240 241 nl2 = text.index(b'\n', nl1 + 1)
241 242 nl3 = text.index(b'\n', nl2 + 1)
242 243
243 244 # The list of files may be empty. Which means nl3 is the first of the
244 245 # double newline that precedes the description.
245 246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 247 doublenl = nl3
247 248 else:
248 249 doublenl = text.index(b'\n\n', nl3 + 1)
249 250
250 251 self._offsets = (nl1, nl2, nl3, doublenl)
251 252 self._text = text
252 253 self._sidedata = sidedata
254 self._cpsd = cpsd
253 255
254 256 return self
255 257
256 258 @property
257 259 def manifest(self):
258 260 return bin(self._text[0 : self._offsets[0]])
259 261
260 262 @property
261 263 def user(self):
262 264 off = self._offsets
263 265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264 266
265 267 @property
266 268 def _rawdate(self):
267 269 off = self._offsets
268 270 dateextra = self._text[off[1] + 1 : off[2]]
269 271 return dateextra.split(b' ', 2)[0:2]
270 272
271 273 @property
272 274 def _rawextra(self):
273 275 off = self._offsets
274 276 dateextra = self._text[off[1] + 1 : off[2]]
275 277 fields = dateextra.split(b' ', 2)
276 278 if len(fields) != 3:
277 279 return None
278 280
279 281 return fields[2]
280 282
281 283 @property
282 284 def date(self):
283 285 raw = self._rawdate
284 286 time = float(raw[0])
285 287 # Various tools did silly things with the timezone.
286 288 try:
287 289 timezone = int(raw[1])
288 290 except ValueError:
289 291 timezone = 0
290 292
291 293 return time, timezone
292 294
293 295 @property
294 296 def extra(self):
295 297 raw = self._rawextra
296 298 if raw is None:
297 299 return _defaultextra
298 300
299 301 return decodeextra(raw)
300 302
301 303 @property
302 304 def files(self):
303 305 off = self._offsets
304 306 if off[2] == off[3]:
305 307 return []
306 308
307 309 return self._text[off[2] + 1 : off[3]].split(b'\n')
308 310
309 311 @property
310 312 def filesadded(self):
311 if sidedatamod.SD_FILESADDED in self._sidedata:
313 if self._cpsd:
312 314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
316 return []
313 317 else:
314 318 rawindices = self.extra.get(b'filesadded')
315 319 if rawindices is None:
316 320 return None
317 321 return copies.decodefileindices(self.files, rawindices)
318 322
319 323 @property
320 324 def filesremoved(self):
321 if sidedatamod.SD_FILESREMOVED in self._sidedata:
325 if self._cpsd:
322 326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
328 return []
323 329 else:
324 330 rawindices = self.extra.get(b'filesremoved')
325 331 if rawindices is None:
326 332 return None
327 333 return copies.decodefileindices(self.files, rawindices)
328 334
329 335 @property
330 336 def p1copies(self):
331 if sidedatamod.SD_P1COPIES in self._sidedata:
337 if self._cpsd:
332 338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
340 return {}
333 341 else:
334 342 rawcopies = self.extra.get(b'p1copies')
335 343 if rawcopies is None:
336 344 return None
337 345 return copies.decodecopies(self.files, rawcopies)
338 346
339 347 @property
340 348 def p2copies(self):
341 if sidedatamod.SD_P2COPIES in self._sidedata:
349 if self._cpsd:
342 350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
352 return {}
343 353 else:
344 354 rawcopies = self.extra.get(b'p2copies')
345 355 if rawcopies is None:
346 356 return None
347 357 return copies.decodecopies(self.files, rawcopies)
348 358
349 359 @property
350 360 def description(self):
351 361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
352 362
353 363
354 364 class changelog(revlog.revlog):
355 365 def __init__(self, opener, trypending=False):
356 366 """Load a changelog revlog using an opener.
357 367
358 368 If ``trypending`` is true, we attempt to load the index from a
359 369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
360 370 The ``00changelog.i.a`` file contains index (and possibly inline
361 371 revision) data for a transaction that hasn't been finalized yet.
362 372 It exists in a separate file to facilitate readers (such as
363 373 hooks processes) accessing data before a transaction is finalized.
364 374 """
365 375 if trypending and opener.exists(b'00changelog.i.a'):
366 376 indexfile = b'00changelog.i.a'
367 377 else:
368 378 indexfile = b'00changelog.i'
369 379
370 380 datafile = b'00changelog.d'
371 381 revlog.revlog.__init__(
372 382 self,
373 383 opener,
374 384 indexfile,
375 385 datafile=datafile,
376 386 checkambig=True,
377 387 mmaplargeindex=True,
378 388 )
379 389
380 390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
381 391 # changelogs don't benefit from generaldelta.
382 392
383 393 self.version &= ~revlog.FLAG_GENERALDELTA
384 394 self._generaldelta = False
385 395
386 396 # Delta chains for changelogs tend to be very small because entries
387 397 # tend to be small and don't delta well with each. So disable delta
388 398 # chains.
389 399 self._storedeltachains = False
390 400
391 401 self._realopener = opener
392 402 self._delayed = False
393 403 self._delaybuf = None
394 404 self._divert = False
395 405 self.filteredrevs = frozenset()
396 406 self._copiesstorage = opener.options.get(b'copies-storage')
397 407
398 408 def tiprev(self):
399 409 for i in pycompat.xrange(len(self) - 1, -2, -1):
400 410 if i not in self.filteredrevs:
401 411 return i
402 412
403 413 def tip(self):
404 414 """filtered version of revlog.tip"""
405 415 return self.node(self.tiprev())
406 416
407 417 def __contains__(self, rev):
408 418 """filtered version of revlog.__contains__"""
409 419 return 0 <= rev < len(self) and rev not in self.filteredrevs
410 420
411 421 def __iter__(self):
412 422 """filtered version of revlog.__iter__"""
413 423 if len(self.filteredrevs) == 0:
414 424 return revlog.revlog.__iter__(self)
415 425
416 426 def filterediter():
417 427 for i in pycompat.xrange(len(self)):
418 428 if i not in self.filteredrevs:
419 429 yield i
420 430
421 431 return filterediter()
422 432
423 433 def revs(self, start=0, stop=None):
424 434 """filtered version of revlog.revs"""
425 435 for i in super(changelog, self).revs(start, stop):
426 436 if i not in self.filteredrevs:
427 437 yield i
428 438
429 439 def _checknofilteredinrevs(self, revs):
430 440 """raise the appropriate error if 'revs' contains a filtered revision
431 441
432 442 This returns a version of 'revs' to be used thereafter by the caller.
433 443 In particular, if revs is an iterator, it is converted into a set.
434 444 """
435 445 safehasattr = util.safehasattr
436 446 if safehasattr(revs, '__next__'):
437 447 # Note that inspect.isgenerator() is not true for iterators,
438 448 revs = set(revs)
439 449
440 450 filteredrevs = self.filteredrevs
441 451 if safehasattr(revs, 'first'): # smartset
442 452 offenders = revs & filteredrevs
443 453 else:
444 454 offenders = filteredrevs.intersection(revs)
445 455
446 456 for rev in offenders:
447 457 raise error.FilteredIndexError(rev)
448 458 return revs
449 459
450 460 def headrevs(self, revs=None):
451 461 if revs is None and self.filteredrevs:
452 462 try:
453 463 return self.index.headrevsfiltered(self.filteredrevs)
454 464 # AttributeError covers non-c-extension environments and
455 465 # old c extensions without filter handling.
456 466 except AttributeError:
457 467 return self._headrevs()
458 468
459 469 if self.filteredrevs:
460 470 revs = self._checknofilteredinrevs(revs)
461 471 return super(changelog, self).headrevs(revs)
462 472
463 473 def strip(self, *args, **kwargs):
464 474 # XXX make something better than assert
465 475 # We can't expect proper strip behavior if we are filtered.
466 476 assert not self.filteredrevs
467 477 super(changelog, self).strip(*args, **kwargs)
468 478
469 479 def rev(self, node):
470 480 """filtered version of revlog.rev"""
471 481 r = super(changelog, self).rev(node)
472 482 if r in self.filteredrevs:
473 483 raise error.FilteredLookupError(
474 484 hex(node), self.indexfile, _(b'filtered node')
475 485 )
476 486 return r
477 487
478 488 def node(self, rev):
479 489 """filtered version of revlog.node"""
480 490 if rev in self.filteredrevs:
481 491 raise error.FilteredIndexError(rev)
482 492 return super(changelog, self).node(rev)
483 493
484 494 def linkrev(self, rev):
485 495 """filtered version of revlog.linkrev"""
486 496 if rev in self.filteredrevs:
487 497 raise error.FilteredIndexError(rev)
488 498 return super(changelog, self).linkrev(rev)
489 499
490 500 def parentrevs(self, rev):
491 501 """filtered version of revlog.parentrevs"""
492 502 if rev in self.filteredrevs:
493 503 raise error.FilteredIndexError(rev)
494 504 return super(changelog, self).parentrevs(rev)
495 505
496 506 def flags(self, rev):
497 507 """filtered version of revlog.flags"""
498 508 if rev in self.filteredrevs:
499 509 raise error.FilteredIndexError(rev)
500 510 return super(changelog, self).flags(rev)
501 511
502 512 def delayupdate(self, tr):
503 513 b"delay visibility of index updates to other readers"
504 514
505 515 if not self._delayed:
506 516 if len(self) == 0:
507 517 self._divert = True
508 518 if self._realopener.exists(self.indexfile + b'.a'):
509 519 self._realopener.unlink(self.indexfile + b'.a')
510 520 self.opener = _divertopener(self._realopener, self.indexfile)
511 521 else:
512 522 self._delaybuf = []
513 523 self.opener = _delayopener(
514 524 self._realopener, self.indexfile, self._delaybuf
515 525 )
516 526 self._delayed = True
517 527 tr.addpending(b'cl-%i' % id(self), self._writepending)
518 528 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
519 529
520 530 def _finalize(self, tr):
521 531 b"finalize index updates"
522 532 self._delayed = False
523 533 self.opener = self._realopener
524 534 # move redirected index data back into place
525 535 if self._divert:
526 536 assert not self._delaybuf
527 537 tmpname = self.indexfile + b".a"
528 538 nfile = self.opener.open(tmpname)
529 539 nfile.close()
530 540 self.opener.rename(tmpname, self.indexfile, checkambig=True)
531 541 elif self._delaybuf:
532 542 fp = self.opener(self.indexfile, b'a', checkambig=True)
533 543 fp.write(b"".join(self._delaybuf))
534 544 fp.close()
535 545 self._delaybuf = None
536 546 self._divert = False
537 547 # split when we're done
538 548 self._enforceinlinesize(tr)
539 549
540 550 def _writepending(self, tr):
541 551 b"create a file containing the unfinalized state for pretxnchangegroup"
542 552 if self._delaybuf:
543 553 # make a temporary copy of the index
544 554 fp1 = self._realopener(self.indexfile)
545 555 pendingfilename = self.indexfile + b".a"
546 556 # register as a temp file to ensure cleanup on failure
547 557 tr.registertmp(pendingfilename)
548 558 # write existing data
549 559 fp2 = self._realopener(pendingfilename, b"w")
550 560 fp2.write(fp1.read())
551 561 # add pending data
552 562 fp2.write(b"".join(self._delaybuf))
553 563 fp2.close()
554 564 # switch modes so finalize can simply rename
555 565 self._delaybuf = None
556 566 self._divert = True
557 567 self.opener = _divertopener(self._realopener, self.indexfile)
558 568
559 569 if self._divert:
560 570 return True
561 571
562 572 return False
563 573
564 574 def _enforceinlinesize(self, tr, fp=None):
565 575 if not self._delayed:
566 576 revlog.revlog._enforceinlinesize(self, tr, fp)
567 577
568 578 def read(self, node):
569 579 """Obtain data from a parsed changelog revision.
570 580
571 581 Returns a 6-tuple of:
572 582
573 583 - manifest node in binary
574 584 - author/user as a localstr
575 585 - date as a 2-tuple of (time, timezone)
576 586 - list of files
577 587 - commit message as a localstr
578 588 - dict of extra metadata
579 589
580 590 Unless you need to access all fields, consider calling
581 591 ``changelogrevision`` instead, as it is faster for partial object
582 592 access.
583 593 """
584 c = changelogrevision(*self._revisiondata(node))
594 d, s = self._revisiondata(node)
595 c = changelogrevision(
596 d, s, self._copiesstorage == b'changeset-sidedata'
597 )
585 598 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
586 599
587 600 def changelogrevision(self, nodeorrev):
588 601 """Obtain a ``changelogrevision`` for a node or revision."""
589 602 text, sidedata = self._revisiondata(nodeorrev)
590 return changelogrevision(text, sidedata)
603 return changelogrevision(
604 text, sidedata, self._copiesstorage == b'changeset-sidedata'
605 )
591 606
592 607 def readfiles(self, node):
593 608 """
594 609 short version of read that only returns the files modified by the cset
595 610 """
596 611 text = self.revision(node)
597 612 if not text:
598 613 return []
599 614 last = text.index(b"\n\n")
600 615 l = text[:last].split(b'\n')
601 616 return l[3:]
602 617
603 618 def add(
604 619 self,
605 620 manifest,
606 621 files,
607 622 desc,
608 623 transaction,
609 624 p1,
610 625 p2,
611 626 user,
612 627 date=None,
613 628 extra=None,
614 629 p1copies=None,
615 630 p2copies=None,
616 631 filesadded=None,
617 632 filesremoved=None,
618 633 ):
619 634 # Convert to UTF-8 encoded bytestrings as the very first
620 635 # thing: calling any method on a localstr object will turn it
621 636 # into a str object and the cached UTF-8 string is thus lost.
622 637 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
623 638
624 639 user = user.strip()
625 640 # An empty username or a username with a "\n" will make the
626 641 # revision text contain two "\n\n" sequences -> corrupt
627 642 # repository since read cannot unpack the revision.
628 643 if not user:
629 644 raise error.StorageError(_(b"empty username"))
630 645 if b"\n" in user:
631 646 raise error.StorageError(
632 647 _(b"username %r contains a newline") % pycompat.bytestr(user)
633 648 )
634 649
635 650 desc = stripdesc(desc)
636 651
637 652 if date:
638 653 parseddate = b"%d %d" % dateutil.parsedate(date)
639 654 else:
640 655 parseddate = b"%d %d" % dateutil.makedate()
641 656 if extra:
642 657 branch = extra.get(b"branch")
643 658 if branch in (b"default", b""):
644 659 del extra[b"branch"]
645 660 elif branch in (b".", b"null", b"tip"):
646 661 raise error.StorageError(
647 662 _(b'the name \'%s\' is reserved') % branch
648 663 )
649 664 sortedfiles = sorted(files)
650 665 sidedata = None
651 666 if extra is not None:
652 667 for name in (
653 668 b'p1copies',
654 669 b'p2copies',
655 670 b'filesadded',
656 671 b'filesremoved',
657 672 ):
658 673 extra.pop(name, None)
659 674 if p1copies is not None:
660 675 p1copies = copies.encodecopies(sortedfiles, p1copies)
661 676 if p2copies is not None:
662 677 p2copies = copies.encodecopies(sortedfiles, p2copies)
663 678 if filesadded is not None:
664 679 filesadded = copies.encodefileindices(sortedfiles, filesadded)
665 680 if filesremoved is not None:
666 681 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
667 682 if self._copiesstorage == b'extra':
668 683 extrasentries = p1copies, p2copies, filesadded, filesremoved
669 684 if extra is None and any(x is not None for x in extrasentries):
670 685 extra = {}
671 686 if p1copies is not None:
672 687 extra[b'p1copies'] = p1copies
673 688 if p2copies is not None:
674 689 extra[b'p2copies'] = p2copies
675 690 if filesadded is not None:
676 691 extra[b'filesadded'] = filesadded
677 692 if filesremoved is not None:
678 693 extra[b'filesremoved'] = filesremoved
679 694 elif self._copiesstorage == b'changeset-sidedata':
680 695 sidedata = {}
681 696 if p1copies is not None:
682 697 sidedata[sidedatamod.SD_P1COPIES] = p1copies
683 698 if p2copies is not None:
684 699 sidedata[sidedatamod.SD_P2COPIES] = p2copies
685 700 if filesadded is not None:
686 701 sidedata[sidedatamod.SD_FILESADDED] = filesadded
687 702 if filesremoved is not None:
688 703 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
689 704
690 705 if extra:
691 706 extra = encodeextra(extra)
692 707 parseddate = b"%s %s" % (parseddate, extra)
693 708 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
694 709 text = b"\n".join(l)
695 710 return self.addrevision(
696 711 text, transaction, len(self), p1, p2, sidedata=sidedata
697 712 )
698 713
699 714 def branchinfo(self, rev):
700 715 """return the branch name and open/close state of a revision
701 716
702 717 This function exists because creating a changectx object
703 718 just to access this is costly."""
704 719 extra = self.read(rev)[5]
705 720 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
706 721
707 722 def _nodeduplicatecallback(self, transaction, node):
708 723 # keep track of revisions that got "re-added", eg: unbunde of know rev.
709 724 #
710 725 # We track them in a list to preserve their order from the source bundle
711 726 duplicates = transaction.changes.setdefault(b'revduplicates', [])
712 727 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now