##// END OF EJS Templates
store: yield phases before changelog...
marmoute -
r51406:5a62d56e default
parent child Browse files
Show More
@@ -1,1088 +1,1089 b''
1 1 # store.py - repository store handling for Mercurial
2 2 #
3 3 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import collections
9 9 import functools
10 10 import os
11 11 import re
12 12 import stat
13 13 from typing import Generator
14 14
15 15 from .i18n import _
16 16 from .pycompat import getattr
17 17 from .thirdparty import attr
18 18 from .node import hex
19 19 from . import (
20 20 changelog,
21 21 error,
22 22 manifest,
23 23 policy,
24 24 pycompat,
25 25 util,
26 26 vfs as vfsmod,
27 27 )
28 28 from .utils import hashutil
29 29
30 30 parsers = policy.importmod('parsers')
31 31 # how much bytes should be read from fncache in one read
32 32 # It is done to prevent loading large fncache files into memory
33 33 fncache_chunksize = 10 ** 6
34 34
35 35
36 36 def _match_tracked_entry(entry, matcher):
37 37 """parses a fncache entry and returns whether the entry is tracking a path
38 38 matched by matcher or not.
39 39
40 40 If matcher is None, returns True"""
41 41
42 42 if matcher is None:
43 43 return True
44 44 if entry.is_filelog:
45 45 return matcher(entry.target_id)
46 46 elif entry.is_manifestlog:
47 47 return matcher.visitdir(entry.target_id.rstrip(b'/'))
48 48 raise error.ProgrammingError(b"cannot process entry %r" % entry)
49 49
50 50
51 51 # This avoids a collision between a file named foo and a dir named
52 52 # foo.i or foo.d
53 53 def _encodedir(path):
54 54 """
55 55 >>> _encodedir(b'data/foo.i')
56 56 'data/foo.i'
57 57 >>> _encodedir(b'data/foo.i/bla.i')
58 58 'data/foo.i.hg/bla.i'
59 59 >>> _encodedir(b'data/foo.i.hg/bla.i')
60 60 'data/foo.i.hg.hg/bla.i'
61 61 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
62 62 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
63 63 """
64 64 return (
65 65 path.replace(b".hg/", b".hg.hg/")
66 66 .replace(b".i/", b".i.hg/")
67 67 .replace(b".d/", b".d.hg/")
68 68 )
69 69
70 70
71 71 encodedir = getattr(parsers, 'encodedir', _encodedir)
72 72
73 73
74 74 def decodedir(path):
75 75 """
76 76 >>> decodedir(b'data/foo.i')
77 77 'data/foo.i'
78 78 >>> decodedir(b'data/foo.i.hg/bla.i')
79 79 'data/foo.i/bla.i'
80 80 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
81 81 'data/foo.i.hg/bla.i'
82 82 """
83 83 if b".hg/" not in path:
84 84 return path
85 85 return (
86 86 path.replace(b".d.hg/", b".d/")
87 87 .replace(b".i.hg/", b".i/")
88 88 .replace(b".hg.hg/", b".hg/")
89 89 )
90 90
91 91
92 92 def _reserved():
93 93 """characters that are problematic for filesystems
94 94
95 95 * ascii escapes (0..31)
96 96 * ascii hi (126..255)
97 97 * windows specials
98 98
99 99 these characters will be escaped by encodefunctions
100 100 """
101 101 winreserved = [ord(x) for x in u'\\:*?"<>|']
102 102 for x in range(32):
103 103 yield x
104 104 for x in range(126, 256):
105 105 yield x
106 106 for x in winreserved:
107 107 yield x
108 108
109 109
110 110 def _buildencodefun():
111 111 """
112 112 >>> enc, dec = _buildencodefun()
113 113
114 114 >>> enc(b'nothing/special.txt')
115 115 'nothing/special.txt'
116 116 >>> dec(b'nothing/special.txt')
117 117 'nothing/special.txt'
118 118
119 119 >>> enc(b'HELLO')
120 120 '_h_e_l_l_o'
121 121 >>> dec(b'_h_e_l_l_o')
122 122 'HELLO'
123 123
124 124 >>> enc(b'hello:world?')
125 125 'hello~3aworld~3f'
126 126 >>> dec(b'hello~3aworld~3f')
127 127 'hello:world?'
128 128
129 129 >>> enc(b'the\\x07quick\\xADshot')
130 130 'the~07quick~adshot'
131 131 >>> dec(b'the~07quick~adshot')
132 132 'the\\x07quick\\xadshot'
133 133 """
134 134 e = b'_'
135 135 xchr = pycompat.bytechr
136 136 asciistr = list(map(xchr, range(127)))
137 137 capitals = list(range(ord(b"A"), ord(b"Z") + 1))
138 138
139 139 cmap = {x: x for x in asciistr}
140 140 for x in _reserved():
141 141 cmap[xchr(x)] = b"~%02x" % x
142 142 for x in capitals + [ord(e)]:
143 143 cmap[xchr(x)] = e + xchr(x).lower()
144 144
145 145 dmap = {}
146 146 for k, v in cmap.items():
147 147 dmap[v] = k
148 148
149 149 def decode(s):
150 150 i = 0
151 151 while i < len(s):
152 152 for l in range(1, 4):
153 153 try:
154 154 yield dmap[s[i : i + l]]
155 155 i += l
156 156 break
157 157 except KeyError:
158 158 pass
159 159 else:
160 160 raise KeyError
161 161
162 162 return (
163 163 lambda s: b''.join([cmap[s[c : c + 1]] for c in range(len(s))]),
164 164 lambda s: b''.join(list(decode(s))),
165 165 )
166 166
167 167
168 168 _encodefname, _decodefname = _buildencodefun()
169 169
170 170
171 171 def encodefilename(s):
172 172 """
173 173 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
174 174 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
175 175 """
176 176 return _encodefname(encodedir(s))
177 177
178 178
179 179 def decodefilename(s):
180 180 """
181 181 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
182 182 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
183 183 """
184 184 return decodedir(_decodefname(s))
185 185
186 186
187 187 def _buildlowerencodefun():
188 188 """
189 189 >>> f = _buildlowerencodefun()
190 190 >>> f(b'nothing/special.txt')
191 191 'nothing/special.txt'
192 192 >>> f(b'HELLO')
193 193 'hello'
194 194 >>> f(b'hello:world?')
195 195 'hello~3aworld~3f'
196 196 >>> f(b'the\\x07quick\\xADshot')
197 197 'the~07quick~adshot'
198 198 """
199 199 xchr = pycompat.bytechr
200 200 cmap = {xchr(x): xchr(x) for x in range(127)}
201 201 for x in _reserved():
202 202 cmap[xchr(x)] = b"~%02x" % x
203 203 for x in range(ord(b"A"), ord(b"Z") + 1):
204 204 cmap[xchr(x)] = xchr(x).lower()
205 205
206 206 def lowerencode(s):
207 207 return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
208 208
209 209 return lowerencode
210 210
211 211
212 212 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
213 213
214 214 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
215 215 _winres3 = (b'aux', b'con', b'prn', b'nul') # length 3
216 216 _winres4 = (b'com', b'lpt') # length 4 (with trailing 1..9)
217 217
218 218
219 219 def _auxencode(path, dotencode):
220 220 """
221 221 Encodes filenames containing names reserved by Windows or which end in
222 222 period or space. Does not touch other single reserved characters c.
223 223 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
224 224 Additionally encodes space or period at the beginning, if dotencode is
225 225 True. Parameter path is assumed to be all lowercase.
226 226 A segment only needs encoding if a reserved name appears as a
227 227 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
228 228 doesn't need encoding.
229 229
230 230 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
231 231 >>> _auxencode(s.split(b'/'), True)
232 232 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
233 233 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
234 234 >>> _auxencode(s.split(b'/'), False)
235 235 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
236 236 >>> _auxencode([b'foo. '], True)
237 237 ['foo.~20']
238 238 >>> _auxencode([b' .foo'], True)
239 239 ['~20.foo']
240 240 """
241 241 for i, n in enumerate(path):
242 242 if not n:
243 243 continue
244 244 if dotencode and n[0] in b'. ':
245 245 n = b"~%02x" % ord(n[0:1]) + n[1:]
246 246 path[i] = n
247 247 else:
248 248 l = n.find(b'.')
249 249 if l == -1:
250 250 l = len(n)
251 251 if (l == 3 and n[:3] in _winres3) or (
252 252 l == 4
253 253 and n[3:4] <= b'9'
254 254 and n[3:4] >= b'1'
255 255 and n[:3] in _winres4
256 256 ):
257 257 # encode third letter ('aux' -> 'au~78')
258 258 ec = b"~%02x" % ord(n[2:3])
259 259 n = n[0:2] + ec + n[3:]
260 260 path[i] = n
261 261 if n[-1] in b'. ':
262 262 # encode last period or space ('foo...' -> 'foo..~2e')
263 263 path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
264 264 return path
265 265
266 266
267 267 _maxstorepathlen = 120
268 268 _dirprefixlen = 8
269 269 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
270 270
271 271
272 272 def _hashencode(path, dotencode):
273 273 digest = hex(hashutil.sha1(path).digest())
274 274 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
275 275 parts = _auxencode(le, dotencode)
276 276 basename = parts[-1]
277 277 _root, ext = os.path.splitext(basename)
278 278 sdirs = []
279 279 sdirslen = 0
280 280 for p in parts[:-1]:
281 281 d = p[:_dirprefixlen]
282 282 if d[-1] in b'. ':
283 283 # Windows can't access dirs ending in period or space
284 284 d = d[:-1] + b'_'
285 285 if sdirslen == 0:
286 286 t = len(d)
287 287 else:
288 288 t = sdirslen + 1 + len(d)
289 289 if t > _maxshortdirslen:
290 290 break
291 291 sdirs.append(d)
292 292 sdirslen = t
293 293 dirs = b'/'.join(sdirs)
294 294 if len(dirs) > 0:
295 295 dirs += b'/'
296 296 res = b'dh/' + dirs + digest + ext
297 297 spaceleft = _maxstorepathlen - len(res)
298 298 if spaceleft > 0:
299 299 filler = basename[:spaceleft]
300 300 res = b'dh/' + dirs + filler + digest + ext
301 301 return res
302 302
303 303
304 304 def _hybridencode(path, dotencode):
305 305 """encodes path with a length limit
306 306
307 307 Encodes all paths that begin with 'data/', according to the following.
308 308
309 309 Default encoding (reversible):
310 310
311 311 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
312 312 characters are encoded as '~xx', where xx is the two digit hex code
313 313 of the character (see encodefilename).
314 314 Relevant path components consisting of Windows reserved filenames are
315 315 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
316 316
317 317 Hashed encoding (not reversible):
318 318
319 319 If the default-encoded path is longer than _maxstorepathlen, a
320 320 non-reversible hybrid hashing of the path is done instead.
321 321 This encoding uses up to _dirprefixlen characters of all directory
322 322 levels of the lowerencoded path, but not more levels than can fit into
323 323 _maxshortdirslen.
324 324 Then follows the filler followed by the sha digest of the full path.
325 325 The filler is the beginning of the basename of the lowerencoded path
326 326 (the basename is everything after the last path separator). The filler
327 327 is as long as possible, filling in characters from the basename until
328 328 the encoded path has _maxstorepathlen characters (or all chars of the
329 329 basename have been taken).
330 330 The extension (e.g. '.i' or '.d') is preserved.
331 331
332 332 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
333 333 encoding was used.
334 334 """
335 335 path = encodedir(path)
336 336 ef = _encodefname(path).split(b'/')
337 337 res = b'/'.join(_auxencode(ef, dotencode))
338 338 if len(res) > _maxstorepathlen:
339 339 res = _hashencode(path, dotencode)
340 340 return res
341 341
342 342
343 343 def _pathencode(path):
344 344 de = encodedir(path)
345 345 if len(path) > _maxstorepathlen:
346 346 return _hashencode(de, True)
347 347 ef = _encodefname(de).split(b'/')
348 348 res = b'/'.join(_auxencode(ef, True))
349 349 if len(res) > _maxstorepathlen:
350 350 return _hashencode(de, True)
351 351 return res
352 352
353 353
354 354 _pathencode = getattr(parsers, 'pathencode', _pathencode)
355 355
356 356
357 357 def _plainhybridencode(f):
358 358 return _hybridencode(f, False)
359 359
360 360
361 361 def _calcmode(vfs):
362 362 try:
363 363 # files in .hg/ will be created using this mode
364 364 mode = vfs.stat().st_mode
365 365 # avoid some useless chmods
366 366 if (0o777 & ~util.umask) == (0o777 & mode):
367 367 mode = None
368 368 except OSError:
369 369 mode = None
370 370 return mode
371 371
372 372
373 373 _data = [
374 374 b'bookmarks',
375 375 b'narrowspec',
376 376 b'data',
377 377 b'meta',
378 378 b'00manifest.d',
379 379 b'00manifest.i',
380 380 b'00changelog.d',
381 381 b'00changelog.i',
382 382 b'phaseroots',
383 383 b'obsstore',
384 384 b'requires',
385 385 ]
386 386
387 387 REVLOG_FILES_MAIN_EXT = (b'.i',)
388 388 REVLOG_FILES_OTHER_EXT = (
389 389 b'.idx',
390 390 b'.d',
391 391 b'.dat',
392 392 b'.n',
393 393 b'.nd',
394 394 b'.sda',
395 395 )
396 396 # file extension that also use a `-SOMELONGIDHASH.ext` form
397 397 REVLOG_FILES_LONG_EXT = (
398 398 b'.nd',
399 399 b'.idx',
400 400 b'.dat',
401 401 b'.sda',
402 402 )
403 403 # files that are "volatile" and might change between listing and streaming
404 404 #
405 405 # note: the ".nd" file are nodemap data and won't "change" but they might be
406 406 # deleted.
407 407 REVLOG_FILES_VOLATILE_EXT = (b'.n', b'.nd')
408 408
409 409 # some exception to the above matching
410 410 #
411 411 # XXX This is currently not in use because of issue6542
412 412 EXCLUDED = re.compile(br'.*undo\.[^/]+\.(nd?|i)$')
413 413
414 414
415 415 def is_revlog(f, kind, st):
416 416 if kind != stat.S_IFREG:
417 417 return None
418 418 return revlog_type(f)
419 419
420 420
421 421 def revlog_type(f):
422 422 # XXX we need to filter `undo.` created by the transaction here, however
423 423 # being naive about it also filter revlog for `undo.*` files, leading to
424 424 # issue6542. So we no longer use EXCLUDED.
425 425 if f.endswith(REVLOG_FILES_MAIN_EXT):
426 426 return FILEFLAGS_REVLOG_MAIN
427 427 elif f.endswith(REVLOG_FILES_OTHER_EXT):
428 428 t = FILETYPE_FILELOG_OTHER
429 429 if f.endswith(REVLOG_FILES_VOLATILE_EXT):
430 430 t |= FILEFLAGS_VOLATILE
431 431 return t
432 432 return None
433 433
434 434
435 435 # the file is part of changelog data
436 436 FILEFLAGS_CHANGELOG = 1 << 13
437 437 # the file is part of manifest data
438 438 FILEFLAGS_MANIFESTLOG = 1 << 12
439 439 # the file is part of filelog data
440 440 FILEFLAGS_FILELOG = 1 << 11
441 441 # file that are not directly part of a revlog
442 442 FILEFLAGS_OTHER = 1 << 10
443 443
444 444 # the main entry point for a revlog
445 445 FILEFLAGS_REVLOG_MAIN = 1 << 1
446 446 # a secondary file for a revlog
447 447 FILEFLAGS_REVLOG_OTHER = 1 << 0
448 448
449 449 # files that are "volatile" and might change between listing and streaming
450 450 FILEFLAGS_VOLATILE = 1 << 20
451 451
452 452 FILETYPE_CHANGELOG_MAIN = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_MAIN
453 453 FILETYPE_CHANGELOG_OTHER = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_OTHER
454 454 FILETYPE_MANIFESTLOG_MAIN = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_MAIN
455 455 FILETYPE_MANIFESTLOG_OTHER = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_OTHER
456 456 FILETYPE_FILELOG_MAIN = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_MAIN
457 457 FILETYPE_FILELOG_OTHER = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_OTHER
458 458 FILETYPE_OTHER = FILEFLAGS_OTHER
459 459
460 460
461 461 @attr.s(slots=True, init=False)
462 462 class BaseStoreEntry:
463 463 """An entry in the store
464 464
465 465 This is returned by `store.walk` and represent some data in the store."""
466 466
467 467
468 468 @attr.s(slots=True, init=False)
469 469 class SimpleStoreEntry(BaseStoreEntry):
470 470 """A generic entry in the store"""
471 471
472 472 is_revlog = False
473 473
474 474 _entry_path = attr.ib()
475 475 _is_volatile = attr.ib(default=False)
476 476 _file_size = attr.ib(default=None)
477 477
478 478 def __init__(
479 479 self,
480 480 entry_path,
481 481 is_volatile=False,
482 482 file_size=None,
483 483 ):
484 484 super().__init__()
485 485 self._entry_path = entry_path
486 486 self._is_volatile = is_volatile
487 487 self._file_size = file_size
488 488
489 489 def files(self):
490 490 return [
491 491 StoreFile(
492 492 unencoded_path=self._entry_path,
493 493 file_size=self._file_size,
494 494 is_volatile=self._is_volatile,
495 495 )
496 496 ]
497 497
498 498
499 499 @attr.s(slots=True, init=False)
500 500 class RevlogStoreEntry(BaseStoreEntry):
501 501 """A revlog entry in the store"""
502 502
503 503 is_revlog = True
504 504
505 505 revlog_type = attr.ib(default=None)
506 506 target_id = attr.ib(default=None)
507 507 _path_prefix = attr.ib(default=None)
508 508 _details = attr.ib(default=None)
509 509
510 510 def __init__(
511 511 self,
512 512 revlog_type,
513 513 path_prefix,
514 514 target_id,
515 515 details,
516 516 ):
517 517 super().__init__()
518 518 self.revlog_type = revlog_type
519 519 self.target_id = target_id
520 520 self._path_prefix = path_prefix
521 521 assert b'.i' in details, (path_prefix, details)
522 522 self._details = details
523 523
524 524 @property
525 525 def is_changelog(self):
526 526 return self.revlog_type & FILEFLAGS_CHANGELOG
527 527
528 528 @property
529 529 def is_manifestlog(self):
530 530 return self.revlog_type & FILEFLAGS_MANIFESTLOG
531 531
532 532 @property
533 533 def is_filelog(self):
534 534 return self.revlog_type & FILEFLAGS_FILELOG
535 535
536 536 def main_file_path(self):
537 537 """unencoded path of the main revlog file"""
538 538 return self._path_prefix + b'.i'
539 539
540 540 def files(self):
541 541 files = []
542 542 for ext in sorted(self._details, key=_ext_key):
543 543 path = self._path_prefix + ext
544 544 data = self._details[ext]
545 545 files.append(StoreFile(unencoded_path=path, **data))
546 546 return files
547 547
548 548
549 549 @attr.s(slots=True)
550 550 class StoreFile:
551 551 """a file matching an entry"""
552 552
553 553 unencoded_path = attr.ib()
554 554 _file_size = attr.ib(default=None)
555 555 is_volatile = attr.ib(default=False)
556 556
557 557 def file_size(self, vfs):
558 558 if self._file_size is not None:
559 559 return self._file_size
560 560 try:
561 561 return vfs.stat(self.unencoded_path).st_size
562 562 except FileNotFoundError:
563 563 return 0
564 564
565 565
566 566 def _gather_revlog(files_data):
567 567 """group files per revlog prefix
568 568
569 569 The returns a two level nested dict. The top level key is the revlog prefix
570 570 without extension, the second level is all the file "suffix" that were
571 571 seen for this revlog and arbitrary file data as value.
572 572 """
573 573 revlogs = collections.defaultdict(dict)
574 574 for u, value in files_data:
575 575 name, ext = _split_revlog_ext(u)
576 576 revlogs[name][ext] = value
577 577 return sorted(revlogs.items())
578 578
579 579
580 580 def _split_revlog_ext(filename):
581 581 """split the revlog file prefix from the variable extension"""
582 582 if filename.endswith(REVLOG_FILES_LONG_EXT):
583 583 char = b'-'
584 584 else:
585 585 char = b'.'
586 586 idx = filename.rfind(char)
587 587 return filename[:idx], filename[idx:]
588 588
589 589
590 590 def _ext_key(ext):
591 591 """a key to order revlog suffix
592 592
593 593 important to issue .i after other entry."""
594 594 # the only important part of this order is to keep the `.i` last.
595 595 if ext.endswith(b'.n'):
596 596 return (0, ext)
597 597 elif ext.endswith(b'.nd'):
598 598 return (10, ext)
599 599 elif ext.endswith(b'.d'):
600 600 return (20, ext)
601 601 elif ext.endswith(b'.i'):
602 602 return (50, ext)
603 603 else:
604 604 return (40, ext)
605 605
606 606
607 607 class basicstore:
608 608 '''base class for local repository stores'''
609 609
610 610 def __init__(self, path, vfstype):
611 611 vfs = vfstype(path)
612 612 self.path = vfs.base
613 613 self.createmode = _calcmode(vfs)
614 614 vfs.createmode = self.createmode
615 615 self.rawvfs = vfs
616 616 self.vfs = vfsmod.filtervfs(vfs, encodedir)
617 617 self.opener = self.vfs
618 618
619 619 def join(self, f):
620 620 return self.path + b'/' + encodedir(f)
621 621
622 622 def _walk(self, relpath, recurse, undecodable=None):
623 623 '''yields (revlog_type, unencoded, size)'''
624 624 path = self.path
625 625 if relpath:
626 626 path += b'/' + relpath
627 627 striplen = len(self.path) + 1
628 628 l = []
629 629 if self.rawvfs.isdir(path):
630 630 visit = [path]
631 631 readdir = self.rawvfs.readdir
632 632 while visit:
633 633 p = visit.pop()
634 634 for f, kind, st in readdir(p, stat=True):
635 635 fp = p + b'/' + f
636 636 rl_type = is_revlog(f, kind, st)
637 637 if rl_type is not None:
638 638 n = util.pconvert(fp[striplen:])
639 639 l.append((decodedir(n), (rl_type, st.st_size)))
640 640 elif kind == stat.S_IFDIR and recurse:
641 641 visit.append(fp)
642 642
643 643 l.sort()
644 644 return l
645 645
646 646 def changelog(self, trypending, concurrencychecker=None):
647 647 return changelog.changelog(
648 648 self.vfs,
649 649 trypending=trypending,
650 650 concurrencychecker=concurrencychecker,
651 651 )
652 652
653 653 def manifestlog(self, repo, storenarrowmatch):
654 654 rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs)
655 655 return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
656 656
657 657 def data_entries(
658 658 self, matcher=None, undecodable=None
659 659 ) -> Generator[BaseStoreEntry, None, None]:
660 660 """Like walk, but excluding the changelog and root manifest.
661 661
662 662 When [undecodable] is None, revlogs names that can't be
663 663 decoded cause an exception. When it is provided, it should
664 664 be a list and the filenames that can't be decoded are added
665 665 to it instead. This is very rarely needed."""
666 666 dirs = [
667 667 (b'data', FILEFLAGS_FILELOG),
668 668 (b'meta', FILEFLAGS_MANIFESTLOG),
669 669 ]
670 670 for base_dir, rl_type in dirs:
671 671 files = self._walk(base_dir, True, undecodable=undecodable)
672 672 files = (f for f in files if f[1][0] is not None)
673 673 for revlog, details in _gather_revlog(files):
674 674 file_details = {}
675 675 revlog_target_id = revlog.split(b'/', 1)[1]
676 676 for ext, (t, s) in sorted(details.items()):
677 677 file_details[ext] = {
678 678 'is_volatile': bool(t & FILEFLAGS_VOLATILE),
679 679 'file_size': s,
680 680 }
681 681 yield RevlogStoreEntry(
682 682 path_prefix=revlog,
683 683 revlog_type=rl_type,
684 684 target_id=revlog_target_id,
685 685 details=file_details,
686 686 )
687 687
688 688 def top_entries(self, phase=False) -> Generator[BaseStoreEntry, None, None]:
689 if phase and self.vfs.exists(b'phaseroots'):
690 yield SimpleStoreEntry(
691 entry_path=b'phaseroots',
692 is_volatile=True,
693 )
694
689 695 files = reversed(self._walk(b'', False))
690 696
691 697 changelogs = collections.defaultdict(dict)
692 698 manifestlogs = collections.defaultdict(dict)
693 699
694 700 for u, (t, s) in files:
695 701 if u.startswith(b'00changelog'):
696 702 name, ext = _split_revlog_ext(u)
697 703 changelogs[name][ext] = (t, s)
698 704 elif u.startswith(b'00manifest'):
699 705 name, ext = _split_revlog_ext(u)
700 706 manifestlogs[name][ext] = (t, s)
701 707 else:
702 708 yield SimpleStoreEntry(
703 709 entry_path=u,
704 710 is_volatile=bool(t & FILEFLAGS_VOLATILE),
705 711 file_size=s,
706 712 )
707 713 # yield manifest before changelog
708 714 top_rl = [
709 715 (manifestlogs, FILEFLAGS_MANIFESTLOG),
710 716 (changelogs, FILEFLAGS_CHANGELOG),
711 717 ]
712 718 assert len(manifestlogs) <= 1
713 719 assert len(changelogs) <= 1
714 720 for data, revlog_type in top_rl:
715 721 for revlog, details in sorted(data.items()):
716 722 file_details = {}
717 723 for ext, (t, s) in details.items():
718 724 file_details[ext] = {
719 725 'is_volatile': bool(t & FILEFLAGS_VOLATILE),
720 726 'file_size': s,
721 727 }
722 728 yield RevlogStoreEntry(
723 729 path_prefix=revlog,
724 730 revlog_type=revlog_type,
725 731 target_id=b'',
726 732 details=file_details,
727 733 )
728 if phase and self.vfs.exists(b'phaseroots'):
729 yield SimpleStoreEntry(
730 entry_path=b'phaseroots',
731 is_volatile=True,
732 )
733 734
734 735 def walk(
735 736 self, matcher=None, phase=False
736 737 ) -> Generator[BaseStoreEntry, None, None]:
737 738 """return files related to data storage (ie: revlogs)
738 739
739 740 yields instance from BaseStoreEntry subclasses
740 741
741 742 if a matcher is passed, storage files of only those tracked paths
742 743 are passed with matches the matcher
743 744 """
744 745 # yield data files first
745 746 for x in self.data_entries(matcher):
746 747 yield x
747 748 for x in self.top_entries(phase=phase):
748 749 yield x
749 750
750 751 def copylist(self):
751 752 return _data
752 753
753 754 def write(self, tr):
754 755 pass
755 756
756 757 def invalidatecaches(self):
757 758 pass
758 759
759 760 def markremoved(self, fn):
760 761 pass
761 762
762 763 def __contains__(self, path):
763 764 '''Checks if the store contains path'''
764 765 path = b"/".join((b"data", path))
765 766 # file?
766 767 if self.vfs.exists(path + b".i"):
767 768 return True
768 769 # dir?
769 770 if not path.endswith(b"/"):
770 771 path = path + b"/"
771 772 return self.vfs.exists(path)
772 773
773 774
774 775 class encodedstore(basicstore):
775 776 def __init__(self, path, vfstype):
776 777 vfs = vfstype(path + b'/store')
777 778 self.path = vfs.base
778 779 self.createmode = _calcmode(vfs)
779 780 vfs.createmode = self.createmode
780 781 self.rawvfs = vfs
781 782 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
782 783 self.opener = self.vfs
783 784
784 785 def _walk(self, relpath, recurse, undecodable=None):
785 786 old = super()._walk(relpath, recurse)
786 787 new = []
787 788 for f1, value in old:
788 789 try:
789 790 f2 = decodefilename(f1)
790 791 except KeyError:
791 792 if undecodable is None:
792 793 msg = _(b'undecodable revlog name %s') % f1
793 794 raise error.StorageError(msg)
794 795 else:
795 796 undecodable.append(f1)
796 797 continue
797 798 new.append((f2, value))
798 799 return new
799 800
800 801 def data_entries(
801 802 self, matcher=None, undecodable=None
802 803 ) -> Generator[BaseStoreEntry, None, None]:
803 804 entries = super(encodedstore, self).data_entries(
804 805 undecodable=undecodable
805 806 )
806 807 for entry in entries:
807 808 if _match_tracked_entry(entry, matcher):
808 809 yield entry
809 810
810 811 def join(self, f):
811 812 return self.path + b'/' + encodefilename(f)
812 813
813 814 def copylist(self):
814 815 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
815 816
816 817
817 818 class fncache:
818 819 # the filename used to be partially encoded
819 820 # hence the encodedir/decodedir dance
820 821 def __init__(self, vfs):
821 822 self.vfs = vfs
822 823 self._ignores = set()
823 824 self.entries = None
824 825 self._dirty = False
825 826 # set of new additions to fncache
826 827 self.addls = set()
827 828
828 829 def ensureloaded(self, warn=None):
829 830 """read the fncache file if not already read.
830 831
831 832 If the file on disk is corrupted, raise. If warn is provided,
832 833 warn and keep going instead."""
833 834 if self.entries is None:
834 835 self._load(warn)
835 836
836 837 def _load(self, warn=None):
837 838 '''fill the entries from the fncache file'''
838 839 self._dirty = False
839 840 try:
840 841 fp = self.vfs(b'fncache', mode=b'rb')
841 842 except IOError:
842 843 # skip nonexistent file
843 844 self.entries = set()
844 845 return
845 846
846 847 self.entries = set()
847 848 chunk = b''
848 849 for c in iter(functools.partial(fp.read, fncache_chunksize), b''):
849 850 chunk += c
850 851 try:
851 852 p = chunk.rindex(b'\n')
852 853 self.entries.update(decodedir(chunk[: p + 1]).splitlines())
853 854 chunk = chunk[p + 1 :]
854 855 except ValueError:
855 856 # substring '\n' not found, maybe the entry is bigger than the
856 857 # chunksize, so let's keep iterating
857 858 pass
858 859
859 860 if chunk:
860 861 msg = _(b"fncache does not ends with a newline")
861 862 if warn:
862 863 warn(msg + b'\n')
863 864 else:
864 865 raise error.Abort(
865 866 msg,
866 867 hint=_(
867 868 b"use 'hg debugrebuildfncache' to "
868 869 b"rebuild the fncache"
869 870 ),
870 871 )
871 872 self._checkentries(fp, warn)
872 873 fp.close()
873 874
874 875 def _checkentries(self, fp, warn):
875 876 """make sure there is no empty string in entries"""
876 877 if b'' in self.entries:
877 878 fp.seek(0)
878 879 for n, line in enumerate(fp):
879 880 if not line.rstrip(b'\n'):
880 881 t = _(b'invalid entry in fncache, line %d') % (n + 1)
881 882 if warn:
882 883 warn(t + b'\n')
883 884 else:
884 885 raise error.Abort(t)
885 886
886 887 def write(self, tr):
887 888 if self._dirty:
888 889 assert self.entries is not None
889 890 self.entries = self.entries | self.addls
890 891 self.addls = set()
891 892 tr.addbackup(b'fncache')
892 893 fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
893 894 if self.entries:
894 895 fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
895 896 fp.close()
896 897 self._dirty = False
897 898 if self.addls:
898 899 # if we have just new entries, let's append them to the fncache
899 900 tr.addbackup(b'fncache')
900 901 fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
901 902 if self.addls:
902 903 fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
903 904 fp.close()
904 905 self.entries = None
905 906 self.addls = set()
906 907
907 908 def addignore(self, fn):
908 909 self._ignores.add(fn)
909 910
910 911 def add(self, fn):
911 912 if fn in self._ignores:
912 913 return
913 914 if self.entries is None:
914 915 self._load()
915 916 if fn not in self.entries:
916 917 self.addls.add(fn)
917 918
918 919 def remove(self, fn):
919 920 if self.entries is None:
920 921 self._load()
921 922 if fn in self.addls:
922 923 self.addls.remove(fn)
923 924 return
924 925 try:
925 926 self.entries.remove(fn)
926 927 self._dirty = True
927 928 except KeyError:
928 929 pass
929 930
930 931 def __contains__(self, fn):
931 932 if fn in self.addls:
932 933 return True
933 934 if self.entries is None:
934 935 self._load()
935 936 return fn in self.entries
936 937
937 938 def __iter__(self):
938 939 if self.entries is None:
939 940 self._load()
940 941 return iter(self.entries | self.addls)
941 942
942 943
943 944 class _fncachevfs(vfsmod.proxyvfs):
944 945 def __init__(self, vfs, fnc, encode):
945 946 vfsmod.proxyvfs.__init__(self, vfs)
946 947 self.fncache = fnc
947 948 self.encode = encode
948 949
949 950 def __call__(self, path, mode=b'r', *args, **kw):
950 951 encoded = self.encode(path)
951 952 if (
952 953 mode not in (b'r', b'rb')
953 954 and (path.startswith(b'data/') or path.startswith(b'meta/'))
954 955 and revlog_type(path) is not None
955 956 ):
956 957 # do not trigger a fncache load when adding a file that already is
957 958 # known to exist.
958 959 notload = self.fncache.entries is None and self.vfs.exists(encoded)
959 960 if notload and b'r+' in mode and not self.vfs.stat(encoded).st_size:
960 961 # when appending to an existing file, if the file has size zero,
961 962 # it should be considered as missing. Such zero-size files are
962 963 # the result of truncation when a transaction is aborted.
963 964 notload = False
964 965 if not notload:
965 966 self.fncache.add(path)
966 967 return self.vfs(encoded, mode, *args, **kw)
967 968
968 969 def join(self, path):
969 970 if path:
970 971 return self.vfs.join(self.encode(path))
971 972 else:
972 973 return self.vfs.join(path)
973 974
974 975 def register_file(self, path):
975 976 """generic hook point to lets fncache steer its stew"""
976 977 if path.startswith(b'data/') or path.startswith(b'meta/'):
977 978 self.fncache.add(path)
978 979
979 980
980 981 class fncachestore(basicstore):
981 982 def __init__(self, path, vfstype, dotencode):
982 983 if dotencode:
983 984 encode = _pathencode
984 985 else:
985 986 encode = _plainhybridencode
986 987 self.encode = encode
987 988 vfs = vfstype(path + b'/store')
988 989 self.path = vfs.base
989 990 self.pathsep = self.path + b'/'
990 991 self.createmode = _calcmode(vfs)
991 992 vfs.createmode = self.createmode
992 993 self.rawvfs = vfs
993 994 fnc = fncache(vfs)
994 995 self.fncache = fnc
995 996 self.vfs = _fncachevfs(vfs, fnc, encode)
996 997 self.opener = self.vfs
997 998
998 999 def join(self, f):
999 1000 return self.pathsep + self.encode(f)
1000 1001
1001 1002 def getsize(self, path):
1002 1003 return self.rawvfs.stat(path).st_size
1003 1004
1004 1005 def data_entries(
1005 1006 self, matcher=None, undecodable=None
1006 1007 ) -> Generator[BaseStoreEntry, None, None]:
1007 1008 files = ((f, revlog_type(f)) for f in self.fncache)
1008 1009 # Note: all files in fncache should be revlog related, However the
1009 1010 # fncache might contains such file added by previous version of
1010 1011 # Mercurial.
1011 1012 files = (f for f in files if f[1] is not None)
1012 1013 by_revlog = _gather_revlog(files)
1013 1014 for revlog, details in by_revlog:
1014 1015 file_details = {}
1015 1016 if revlog.startswith(b'data/'):
1016 1017 rl_type = FILEFLAGS_FILELOG
1017 1018 revlog_target_id = revlog.split(b'/', 1)[1]
1018 1019 elif revlog.startswith(b'meta/'):
1019 1020 rl_type = FILEFLAGS_MANIFESTLOG
1020 1021 # drop the initial directory and the `00manifest` file part
1021 1022 tmp = revlog.split(b'/', 1)[1]
1022 1023 revlog_target_id = tmp.rsplit(b'/', 1)[0] + b'/'
1023 1024 else:
1024 1025 # unreachable
1025 1026 assert False, revlog
1026 1027 for ext, t in details.items():
1027 1028 file_details[ext] = {
1028 1029 'is_volatile': bool(t & FILEFLAGS_VOLATILE),
1029 1030 }
1030 1031 entry = RevlogStoreEntry(
1031 1032 path_prefix=revlog,
1032 1033 revlog_type=rl_type,
1033 1034 target_id=revlog_target_id,
1034 1035 details=file_details,
1035 1036 )
1036 1037 if _match_tracked_entry(entry, matcher):
1037 1038 yield entry
1038 1039
1039 1040 def copylist(self):
1040 1041 d = (
1041 1042 b'bookmarks',
1042 1043 b'narrowspec',
1043 1044 b'data',
1044 1045 b'meta',
1045 1046 b'dh',
1046 1047 b'fncache',
1047 1048 b'phaseroots',
1048 1049 b'obsstore',
1049 1050 b'00manifest.d',
1050 1051 b'00manifest.i',
1051 1052 b'00changelog.d',
1052 1053 b'00changelog.i',
1053 1054 b'requires',
1054 1055 )
1055 1056 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in d]
1056 1057
1057 1058 def write(self, tr):
1058 1059 self.fncache.write(tr)
1059 1060
1060 1061 def invalidatecaches(self):
1061 1062 self.fncache.entries = None
1062 1063 self.fncache.addls = set()
1063 1064
1064 1065 def markremoved(self, fn):
1065 1066 self.fncache.remove(fn)
1066 1067
1067 1068 def _exists(self, f):
1068 1069 ef = self.encode(f)
1069 1070 try:
1070 1071 self.getsize(ef)
1071 1072 return True
1072 1073 except FileNotFoundError:
1073 1074 return False
1074 1075
1075 1076 def __contains__(self, path):
1076 1077 '''Checks if the store contains path'''
1077 1078 path = b"/".join((b"data", path))
1078 1079 # check for files (exact match)
1079 1080 e = path + b'.i'
1080 1081 if e in self.fncache and self._exists(e):
1081 1082 return True
1082 1083 # now check for directories (prefix match)
1083 1084 if not path.endswith(b'/'):
1084 1085 path += b'/'
1085 1086 for e in self.fncache:
1086 1087 if e.startswith(path) and self._exists(e):
1087 1088 return True
1088 1089 return False
@@ -1,181 +1,181 b''
1 1 #require no-reposimplestore
2 2
3 3 Test creating a consuming stream bundle v2
4 4
5 5 $ getmainid() {
6 6 > hg -R main log --template '{node}\n' --rev "$1"
7 7 > }
8 8
9 9 $ cp $HGRCPATH $TESTTMP/hgrc.orig
10 10
11 11 $ cat >> $HGRCPATH << EOF
12 12 > [experimental]
13 13 > evolution.createmarkers=True
14 14 > evolution.exchange=True
15 15 > bundle2-output-capture=True
16 16 > [ui]
17 17 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
18 18 > [web]
19 19 > push_ssl = false
20 20 > allow_push = *
21 21 > [phases]
22 22 > publish=False
23 23 > [extensions]
24 24 > drawdag=$TESTDIR/drawdag.py
25 25 > clonebundles=
26 26 > EOF
27 27
28 28 The extension requires a repo (currently unused)
29 29
30 30 $ hg init main
31 31 $ cd main
32 32
33 33 $ hg debugdrawdag <<'EOF'
34 34 > E
35 35 > |
36 36 > D
37 37 > |
38 38 > C
39 39 > |
40 40 > B
41 41 > |
42 42 > A
43 43 > EOF
44 44
45 45 $ hg bundle -a --type="none-v2;stream=v2" bundle.hg
46 46 $ hg debugbundle bundle.hg
47 47 Stream params: {}
48 48 stream2 -- {bytecount: 1693, filecount: 11, requirements: generaldelta%2Crevlogv1%2Csparserevlog} (mandatory: True) (no-zstd !)
49 49 stream2 -- {bytecount: 1693, filecount: 11, requirements: generaldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog} (mandatory: True) (zstd no-rust !)
50 50 stream2 -- {bytecount: 1693, filecount: 11, requirements: generaldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog} (mandatory: True) (rust !)
51 51 $ hg debugbundle --spec bundle.hg
52 52 none-v2;stream=v2;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog (no-zstd !)
53 53 none-v2;stream=v2;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog (zstd no-rust !)
54 54 none-v2;stream=v2;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog (rust !)
55 55
56 56 Test that we can apply the bundle as a stream clone bundle
57 57
58 58 $ cat > .hg/clonebundles.manifest << EOF
59 59 > http://localhost:$HGPORT1/bundle.hg BUNDLESPEC=`hg debugbundle --spec bundle.hg`
60 60 > EOF
61 61
62 62 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
63 63 $ cat hg.pid >> $DAEMON_PIDS
64 64
65 65 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
66 66 $ cat http.pid >> $DAEMON_PIDS
67 67
68 68 $ cd ..
69 69 $ hg clone http://localhost:$HGPORT streamv2-clone-implicit --debug
70 70 using http://localhost:$HGPORT/
71 71 sending capabilities command
72 72 sending clonebundles command
73 73 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
74 74 bundle2-input-bundle: with-transaction
75 75 bundle2-input-part: "stream2" (params: 3 mandatory) supported
76 76 applying stream bundle
77 77 11 files to transfer, 1.65 KB of data
78 78 starting 4 threads for background file closing (?)
79 79 starting 4 threads for background file closing (?)
80 80 adding [s] data/A.i (66 bytes)
81 81 adding [s] data/B.i (66 bytes)
82 82 adding [s] data/C.i (66 bytes)
83 83 adding [s] data/D.i (66 bytes)
84 84 adding [s] data/E.i (66 bytes)
85 adding [s] phaseroots (43 bytes)
85 86 adding [s] 00manifest.i (584 bytes)
86 87 adding [s] 00changelog.i (595 bytes)
87 adding [s] phaseroots (43 bytes)
88 88 adding [c] branch2-served (94 bytes)
89 89 adding [c] rbc-names-v1 (7 bytes)
90 90 adding [c] rbc-revs-v1 (40 bytes)
91 91 transferred 1.65 KB in * seconds (* */sec) (glob)
92 92 bundle2-input-part: total payload size 1840
93 93 bundle2-input-bundle: 1 parts total
94 94 updating the branch cache
95 95 finished applying clone bundle
96 96 query 1; heads
97 97 sending batch command
98 98 searching for changes
99 99 all remote heads known locally
100 100 no changes found
101 101 sending getbundle command
102 102 bundle2-input-bundle: with-transaction
103 103 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
104 104 bundle2-input-part: "phase-heads" supported
105 105 bundle2-input-part: total payload size 24
106 106 bundle2-input-bundle: 2 parts total
107 107 checking for updated bookmarks
108 108 updating to branch default
109 109 resolving manifests
110 110 branchmerge: False, force: False, partial: False
111 111 ancestor: 000000000000, local: 000000000000+, remote: 9bc730a19041
112 112 A: remote created -> g
113 113 getting A
114 114 B: remote created -> g
115 115 getting B
116 116 C: remote created -> g
117 117 getting C
118 118 D: remote created -> g
119 119 getting D
120 120 E: remote created -> g
121 121 getting E
122 122 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
123 123 updating the branch cache
124 124 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
125 125
126 126 $ hg clone --stream http://localhost:$HGPORT streamv2-clone-explicit --debug
127 127 using http://localhost:$HGPORT/
128 128 sending capabilities command
129 129 sending clonebundles command
130 130 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
131 131 bundle2-input-bundle: with-transaction
132 132 bundle2-input-part: "stream2" (params: 3 mandatory) supported
133 133 applying stream bundle
134 134 11 files to transfer, 1.65 KB of data
135 135 starting 4 threads for background file closing (?)
136 136 starting 4 threads for background file closing (?)
137 137 adding [s] data/A.i (66 bytes)
138 138 adding [s] data/B.i (66 bytes)
139 139 adding [s] data/C.i (66 bytes)
140 140 adding [s] data/D.i (66 bytes)
141 141 adding [s] data/E.i (66 bytes)
142 adding [s] phaseroots (43 bytes)
142 143 adding [s] 00manifest.i (584 bytes)
143 144 adding [s] 00changelog.i (595 bytes)
144 adding [s] phaseroots (43 bytes)
145 145 adding [c] branch2-served (94 bytes)
146 146 adding [c] rbc-names-v1 (7 bytes)
147 147 adding [c] rbc-revs-v1 (40 bytes)
148 148 transferred 1.65 KB in * seconds (* */sec) (glob)
149 149 bundle2-input-part: total payload size 1840
150 150 bundle2-input-bundle: 1 parts total
151 151 updating the branch cache
152 152 finished applying clone bundle
153 153 query 1; heads
154 154 sending batch command
155 155 searching for changes
156 156 all remote heads known locally
157 157 no changes found
158 158 sending getbundle command
159 159 bundle2-input-bundle: with-transaction
160 160 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
161 161 bundle2-input-part: "phase-heads" supported
162 162 bundle2-input-part: total payload size 24
163 163 bundle2-input-bundle: 2 parts total
164 164 checking for updated bookmarks
165 165 updating to branch default
166 166 resolving manifests
167 167 branchmerge: False, force: False, partial: False
168 168 ancestor: 000000000000, local: 000000000000+, remote: 9bc730a19041
169 169 A: remote created -> g
170 170 getting A
171 171 B: remote created -> g
172 172 getting B
173 173 C: remote created -> g
174 174 getting C
175 175 D: remote created -> g
176 176 getting D
177 177 E: remote created -> g
178 178 getting E
179 179 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
180 180 updating the branch cache
181 181 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
General Comments 0
You need to be logged in to leave comments. Login now