##// END OF EJS Templates
errors: stop passing non-strings to Abort's constructor...
Martin von Zweigbergk -
r46273:a736ab68 default
parent child Browse files
Show More
@@ -1,1070 +1,1070 b''
1 1 # Mercurial built-in replacement for cvsps.
2 2 #
3 3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import functools
10 10 import os
11 11 import re
12 12
13 13 from mercurial.i18n import _
14 14 from mercurial.pycompat import open
15 15 from mercurial import (
16 16 encoding,
17 17 error,
18 18 hook,
19 19 pycompat,
20 20 util,
21 21 )
22 22 from mercurial.utils import (
23 23 dateutil,
24 24 procutil,
25 25 stringutil,
26 26 )
27 27
28 28 pickle = util.pickle
29 29
30 30
31 31 class logentry(object):
32 32 '''Class logentry has the following attributes:
33 33 .author - author name as CVS knows it
34 34 .branch - name of branch this revision is on
35 35 .branches - revision tuple of branches starting at this revision
36 36 .comment - commit message
37 37 .commitid - CVS commitid or None
38 38 .date - the commit date as a (time, tz) tuple
39 39 .dead - true if file revision is dead
40 40 .file - Name of file
41 41 .lines - a tuple (+lines, -lines) or None
42 42 .parent - Previous revision of this entry
43 43 .rcs - name of file as returned from CVS
44 44 .revision - revision number as tuple
45 45 .tags - list of tags on the file
46 46 .synthetic - is this a synthetic "file ... added on ..." revision?
47 47 .mergepoint - the branch that has been merged from (if present in
48 48 rlog output) or None
49 49 .branchpoints - the branches that start at the current entry or empty
50 50 '''
51 51
52 52 def __init__(self, **entries):
53 53 self.synthetic = False
54 54 self.__dict__.update(entries)
55 55
56 56 def __repr__(self):
57 57 items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
58 58 return "%s(%s)" % (type(self).__name__, ", ".join(items))
59 59
60 60
61 61 class logerror(Exception):
62 62 pass
63 63
64 64
65 65 def getrepopath(cvspath):
66 66 """Return the repository path from a CVS path.
67 67
68 68 >>> getrepopath(b'/foo/bar')
69 69 '/foo/bar'
70 70 >>> getrepopath(b'c:/foo/bar')
71 71 '/foo/bar'
72 72 >>> getrepopath(b':pserver:10/foo/bar')
73 73 '/foo/bar'
74 74 >>> getrepopath(b':pserver:10c:/foo/bar')
75 75 '/foo/bar'
76 76 >>> getrepopath(b':pserver:/foo/bar')
77 77 '/foo/bar'
78 78 >>> getrepopath(b':pserver:c:/foo/bar')
79 79 '/foo/bar'
80 80 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
81 81 '/foo/bar'
82 82 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
83 83 '/foo/bar'
84 84 >>> getrepopath(b'user@server/path/to/repository')
85 85 '/path/to/repository'
86 86 """
87 87 # According to CVS manual, CVS paths are expressed like:
88 88 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
89 89 #
90 90 # CVSpath is splitted into parts and then position of the first occurrence
91 91 # of the '/' char after the '@' is located. The solution is the rest of the
92 92 # string after that '/' sign including it
93 93
94 94 parts = cvspath.split(b':')
95 95 atposition = parts[-1].find(b'@')
96 96 start = 0
97 97
98 98 if atposition != -1:
99 99 start = atposition
100 100
101 101 repopath = parts[-1][parts[-1].find(b'/', start) :]
102 102 return repopath
103 103
104 104
105 105 def createlog(ui, directory=None, root=b"", rlog=True, cache=None):
106 106 '''Collect the CVS rlog'''
107 107
108 108 # Because we store many duplicate commit log messages, reusing strings
109 109 # saves a lot of memory and pickle storage space.
110 110 _scache = {}
111 111
112 112 def scache(s):
113 113 """return a shared version of a string"""
114 114 return _scache.setdefault(s, s)
115 115
116 116 ui.status(_(b'collecting CVS rlog\n'))
117 117
118 118 log = [] # list of logentry objects containing the CVS state
119 119
120 120 # patterns to match in CVS (r)log output, by state of use
121 121 re_00 = re.compile(b'RCS file: (.+)$')
122 122 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
123 123 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
124 124 re_03 = re.compile(
125 125 b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$"
126 126 )
127 127 re_10 = re.compile(b'Working file: (.+)$')
128 128 re_20 = re.compile(b'symbolic names:')
129 129 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
130 130 re_31 = re.compile(b'----------------------------$')
131 131 re_32 = re.compile(
132 132 b'======================================='
133 133 b'======================================$'
134 134 )
135 135 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
136 136 re_60 = re.compile(
137 137 br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
138 138 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
139 139 br'(\s+commitid:\s+([^;]+);)?'
140 140 br'(.*mergepoint:\s+([^;]+);)?'
141 141 )
142 142 re_70 = re.compile(b'branches: (.+);$')
143 143
144 144 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
145 145
146 146 prefix = b'' # leading path to strip of what we get from CVS
147 147
148 148 if directory is None:
149 149 # Current working directory
150 150
151 151 # Get the real directory in the repository
152 152 try:
153 153 with open(os.path.join(b'CVS', b'Repository'), b'rb') as f:
154 154 prefix = f.read().strip()
155 155 directory = prefix
156 156 if prefix == b".":
157 157 prefix = b""
158 158 except IOError:
159 159 raise logerror(_(b'not a CVS sandbox'))
160 160
161 161 if prefix and not prefix.endswith(pycompat.ossep):
162 162 prefix += pycompat.ossep
163 163
164 164 # Use the Root file in the sandbox, if it exists
165 165 try:
166 166 root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip()
167 167 except IOError:
168 168 pass
169 169
170 170 if not root:
171 171 root = encoding.environ.get(b'CVSROOT', b'')
172 172
173 173 # read log cache if one exists
174 174 oldlog = []
175 175 date = None
176 176
177 177 if cache:
178 178 cachedir = os.path.expanduser(b'~/.hg.cvsps')
179 179 if not os.path.exists(cachedir):
180 180 os.mkdir(cachedir)
181 181
182 182 # The cvsps cache pickle needs a uniquified name, based on the
183 183 # repository location. The address may have all sort of nasties
184 184 # in it, slashes, colons and such. So here we take just the
185 185 # alphanumeric characters, concatenated in a way that does not
186 186 # mix up the various components, so that
187 187 # :pserver:user@server:/path
188 188 # and
189 189 # /pserver/user/server/path
190 190 # are mapped to different cache file names.
191 191 cachefile = root.split(b":") + [directory, b"cache"]
192 192 cachefile = [b'-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
193 193 cachefile = os.path.join(
194 194 cachedir, b'.'.join([s for s in cachefile if s])
195 195 )
196 196
197 197 if cache == b'update':
198 198 try:
199 199 ui.note(_(b'reading cvs log cache %s\n') % cachefile)
200 200 oldlog = pickle.load(open(cachefile, b'rb'))
201 201 for e in oldlog:
202 202 if not (
203 203 util.safehasattr(e, b'branchpoints')
204 204 and util.safehasattr(e, b'commitid')
205 205 and util.safehasattr(e, b'mergepoint')
206 206 ):
207 207 ui.status(_(b'ignoring old cache\n'))
208 208 oldlog = []
209 209 break
210 210
211 211 ui.note(_(b'cache has %d log entries\n') % len(oldlog))
212 212 except Exception as e:
213 213 ui.note(_(b'error reading cache: %r\n') % e)
214 214
215 215 if oldlog:
216 216 date = oldlog[-1].date # last commit date as a (time,tz) tuple
217 217 date = dateutil.datestr(date, b'%Y/%m/%d %H:%M:%S %1%2')
218 218
219 219 # build the CVS commandline
220 220 cmd = [b'cvs', b'-q']
221 221 if root:
222 222 cmd.append(b'-d%s' % root)
223 223 p = util.normpath(getrepopath(root))
224 224 if not p.endswith(b'/'):
225 225 p += b'/'
226 226 if prefix:
227 227 # looks like normpath replaces "" by "."
228 228 prefix = p + util.normpath(prefix)
229 229 else:
230 230 prefix = p
231 231 cmd.append([b'log', b'rlog'][rlog])
232 232 if date:
233 233 # no space between option and date string
234 234 cmd.append(b'-d>%s' % date)
235 235 cmd.append(directory)
236 236
237 237 # state machine begins here
238 238 tags = {} # dictionary of revisions on current file with their tags
239 239 branchmap = {} # mapping between branch names and revision numbers
240 240 rcsmap = {}
241 241 state = 0
242 242 store = False # set when a new record can be appended
243 243
244 244 cmd = [procutil.shellquote(arg) for arg in cmd]
245 245 ui.note(_(b"running %s\n") % (b' '.join(cmd)))
246 246 ui.debug(b"prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
247 247
248 248 pfp = procutil.popen(b' '.join(cmd), b'rb')
249 249 peek = util.fromnativeeol(pfp.readline())
250 250 while True:
251 251 line = peek
252 252 if line == b'':
253 253 break
254 254 peek = util.fromnativeeol(pfp.readline())
255 255 if line.endswith(b'\n'):
256 256 line = line[:-1]
257 257 # ui.debug('state=%d line=%r\n' % (state, line))
258 258
259 259 if state == 0:
260 260 # initial state, consume input until we see 'RCS file'
261 261 match = re_00.match(line)
262 262 if match:
263 263 rcs = match.group(1)
264 264 tags = {}
265 265 if rlog:
266 266 filename = util.normpath(rcs[:-2])
267 267 if filename.startswith(prefix):
268 268 filename = filename[len(prefix) :]
269 269 if filename.startswith(b'/'):
270 270 filename = filename[1:]
271 271 if filename.startswith(b'Attic/'):
272 272 filename = filename[6:]
273 273 else:
274 274 filename = filename.replace(b'/Attic/', b'/')
275 275 state = 2
276 276 continue
277 277 state = 1
278 278 continue
279 279 match = re_01.match(line)
280 280 if match:
281 281 raise logerror(match.group(1))
282 282 match = re_02.match(line)
283 283 if match:
284 284 raise logerror(match.group(2))
285 285 if re_03.match(line):
286 286 raise logerror(line)
287 287
288 288 elif state == 1:
289 289 # expect 'Working file' (only when using log instead of rlog)
290 290 match = re_10.match(line)
291 291 assert match, _(b'RCS file must be followed by working file')
292 292 filename = util.normpath(match.group(1))
293 293 state = 2
294 294
295 295 elif state == 2:
296 296 # expect 'symbolic names'
297 297 if re_20.match(line):
298 298 branchmap = {}
299 299 state = 3
300 300
301 301 elif state == 3:
302 302 # read the symbolic names and store as tags
303 303 match = re_30.match(line)
304 304 if match:
305 305 rev = [int(x) for x in match.group(2).split(b'.')]
306 306
307 307 # Convert magic branch number to an odd-numbered one
308 308 revn = len(rev)
309 309 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
310 310 rev = rev[:-2] + rev[-1:]
311 311 rev = tuple(rev)
312 312
313 313 if rev not in tags:
314 314 tags[rev] = []
315 315 tags[rev].append(match.group(1))
316 316 branchmap[match.group(1)] = match.group(2)
317 317
318 318 elif re_31.match(line):
319 319 state = 5
320 320 elif re_32.match(line):
321 321 state = 0
322 322
323 323 elif state == 4:
324 324 # expecting '------' separator before first revision
325 325 if re_31.match(line):
326 326 state = 5
327 327 else:
328 328 assert not re_32.match(line), _(
329 329 b'must have at least some revisions'
330 330 )
331 331
332 332 elif state == 5:
333 333 # expecting revision number and possibly (ignored) lock indication
334 334 # we create the logentry here from values stored in states 0 to 4,
335 335 # as this state is re-entered for subsequent revisions of a file.
336 336 match = re_50.match(line)
337 337 assert match, _(b'expected revision number')
338 338 e = logentry(
339 339 rcs=scache(rcs),
340 340 file=scache(filename),
341 341 revision=tuple([int(x) for x in match.group(1).split(b'.')]),
342 342 branches=[],
343 343 parent=None,
344 344 commitid=None,
345 345 mergepoint=None,
346 346 branchpoints=set(),
347 347 )
348 348
349 349 state = 6
350 350
351 351 elif state == 6:
352 352 # expecting date, author, state, lines changed
353 353 match = re_60.match(line)
354 354 assert match, _(b'revision must be followed by date line')
355 355 d = match.group(1)
356 356 if d[2] == b'/':
357 357 # Y2K
358 358 d = b'19' + d
359 359
360 360 if len(d.split()) != 3:
361 361 # cvs log dates always in GMT
362 362 d = d + b' UTC'
363 363 e.date = dateutil.parsedate(
364 364 d,
365 365 [
366 366 b'%y/%m/%d %H:%M:%S',
367 367 b'%Y/%m/%d %H:%M:%S',
368 368 b'%Y-%m-%d %H:%M:%S',
369 369 ],
370 370 )
371 371 e.author = scache(match.group(2))
372 372 e.dead = match.group(3).lower() == b'dead'
373 373
374 374 if match.group(5):
375 375 if match.group(6):
376 376 e.lines = (int(match.group(5)), int(match.group(6)))
377 377 else:
378 378 e.lines = (int(match.group(5)), 0)
379 379 elif match.group(6):
380 380 e.lines = (0, int(match.group(6)))
381 381 else:
382 382 e.lines = None
383 383
384 384 if match.group(7): # cvs 1.12 commitid
385 385 e.commitid = match.group(8)
386 386
387 387 if match.group(9): # cvsnt mergepoint
388 388 myrev = match.group(10).split(b'.')
389 389 if len(myrev) == 2: # head
390 390 e.mergepoint = b'HEAD'
391 391 else:
392 392 myrev = b'.'.join(myrev[:-2] + [b'0', myrev[-2]])
393 393 branches = [b for b in branchmap if branchmap[b] == myrev]
394 394 assert len(branches) == 1, (
395 395 b'unknown branch: %s' % e.mergepoint
396 396 )
397 397 e.mergepoint = branches[0]
398 398
399 399 e.comment = []
400 400 state = 7
401 401
402 402 elif state == 7:
403 403 # read the revision numbers of branches that start at this revision
404 404 # or store the commit log message otherwise
405 405 m = re_70.match(line)
406 406 if m:
407 407 e.branches = [
408 408 tuple([int(y) for y in x.strip().split(b'.')])
409 409 for x in m.group(1).split(b';')
410 410 ]
411 411 state = 8
412 412 elif re_31.match(line) and re_50.match(peek):
413 413 state = 5
414 414 store = True
415 415 elif re_32.match(line):
416 416 state = 0
417 417 store = True
418 418 else:
419 419 e.comment.append(line)
420 420
421 421 elif state == 8:
422 422 # store commit log message
423 423 if re_31.match(line):
424 424 cpeek = peek
425 425 if cpeek.endswith(b'\n'):
426 426 cpeek = cpeek[:-1]
427 427 if re_50.match(cpeek):
428 428 state = 5
429 429 store = True
430 430 else:
431 431 e.comment.append(line)
432 432 elif re_32.match(line):
433 433 state = 0
434 434 store = True
435 435 else:
436 436 e.comment.append(line)
437 437
438 438 # When a file is added on a branch B1, CVS creates a synthetic
439 439 # dead trunk revision 1.1 so that the branch has a root.
440 440 # Likewise, if you merge such a file to a later branch B2 (one
441 441 # that already existed when the file was added on B1), CVS
442 442 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
443 443 # these revisions now, but mark them synthetic so
444 444 # createchangeset() can take care of them.
445 445 if (
446 446 store
447 447 and e.dead
448 448 and e.revision[-1] == 1
449 449 and len(e.comment) == 1 # 1.1 or 1.1.x.1
450 450 and file_added_re.match(e.comment[0])
451 451 ):
452 452 ui.debug(
453 453 b'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
454 454 )
455 455 e.synthetic = True
456 456
457 457 if store:
458 458 # clean up the results and save in the log.
459 459 store = False
460 460 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
461 461 e.comment = scache(b'\n'.join(e.comment))
462 462
463 463 revn = len(e.revision)
464 464 if revn > 3 and (revn % 2) == 0:
465 465 e.branch = tags.get(e.revision[:-1], [None])[0]
466 466 else:
467 467 e.branch = None
468 468
469 469 # find the branches starting from this revision
470 470 branchpoints = set()
471 471 for branch, revision in pycompat.iteritems(branchmap):
472 472 revparts = tuple([int(i) for i in revision.split(b'.')])
473 473 if len(revparts) < 2: # bad tags
474 474 continue
475 475 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
476 476 # normal branch
477 477 if revparts[:-2] == e.revision:
478 478 branchpoints.add(branch)
479 479 elif revparts == (1, 1, 1): # vendor branch
480 480 if revparts in e.branches:
481 481 branchpoints.add(branch)
482 482 e.branchpoints = branchpoints
483 483
484 484 log.append(e)
485 485
486 486 rcsmap[e.rcs.replace(b'/Attic/', b'/')] = e.rcs
487 487
488 488 if len(log) % 100 == 0:
489 489 ui.status(
490 490 stringutil.ellipsis(b'%d %s' % (len(log), e.file), 80)
491 491 + b'\n'
492 492 )
493 493
494 494 log.sort(key=lambda x: (x.rcs, x.revision))
495 495
496 496 # find parent revisions of individual files
497 497 versions = {}
498 498 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
499 499 rcs = e.rcs.replace(b'/Attic/', b'/')
500 500 if rcs in rcsmap:
501 501 e.rcs = rcsmap[rcs]
502 502 branch = e.revision[:-1]
503 503 versions[(e.rcs, branch)] = e.revision
504 504
505 505 for e in log:
506 506 branch = e.revision[:-1]
507 507 p = versions.get((e.rcs, branch), None)
508 508 if p is None:
509 509 p = e.revision[:-2]
510 510 e.parent = p
511 511 versions[(e.rcs, branch)] = e.revision
512 512
513 513 # update the log cache
514 514 if cache:
515 515 if log:
516 516 # join up the old and new logs
517 517 log.sort(key=lambda x: x.date)
518 518
519 519 if oldlog and oldlog[-1].date >= log[0].date:
520 520 raise logerror(
521 521 _(
522 522 b'log cache overlaps with new log entries,'
523 523 b' re-run without cache.'
524 524 )
525 525 )
526 526
527 527 log = oldlog + log
528 528
529 529 # write the new cachefile
530 530 ui.note(_(b'writing cvs log cache %s\n') % cachefile)
531 531 pickle.dump(log, open(cachefile, b'wb'))
532 532 else:
533 533 log = oldlog
534 534
535 535 ui.status(_(b'%d log entries\n') % len(log))
536 536
537 537 encodings = ui.configlist(b'convert', b'cvsps.logencoding')
538 538 if encodings:
539 539
540 540 def revstr(r):
541 541 # this is needed, because logentry.revision is a tuple of "int"
542 542 # (e.g. (1, 2) for "1.2")
543 543 return b'.'.join(pycompat.maplist(pycompat.bytestr, r))
544 544
545 545 for entry in log:
546 546 comment = entry.comment
547 547 for e in encodings:
548 548 try:
549 549 entry.comment = comment.decode(pycompat.sysstr(e)).encode(
550 550 'utf-8'
551 551 )
552 552 if ui.debugflag:
553 553 ui.debug(
554 554 b"transcoding by %s: %s of %s\n"
555 555 % (e, revstr(entry.revision), entry.file)
556 556 )
557 557 break
558 558 except UnicodeDecodeError:
559 559 pass # try next encoding
560 560 except LookupError as inst: # unknown encoding, maybe
561 561 raise error.Abort(
562 inst,
562 pycompat.bytestr(inst),
563 563 hint=_(
564 564 b'check convert.cvsps.logencoding configuration'
565 565 ),
566 566 )
567 567 else:
568 568 raise error.Abort(
569 569 _(
570 570 b"no encoding can transcode"
571 571 b" CVS log message for %s of %s"
572 572 )
573 573 % (revstr(entry.revision), entry.file),
574 574 hint=_(b'check convert.cvsps.logencoding configuration'),
575 575 )
576 576
577 577 hook.hook(ui, None, b"cvslog", True, log=log)
578 578
579 579 return log
580 580
581 581
582 582 class changeset(object):
583 583 '''Class changeset has the following attributes:
584 584 .id - integer identifying this changeset (list index)
585 585 .author - author name as CVS knows it
586 586 .branch - name of branch this changeset is on, or None
587 587 .comment - commit message
588 588 .commitid - CVS commitid or None
589 589 .date - the commit date as a (time,tz) tuple
590 590 .entries - list of logentry objects in this changeset
591 591 .parents - list of one or two parent changesets
592 592 .tags - list of tags on this changeset
593 593 .synthetic - from synthetic revision "file ... added on branch ..."
594 594 .mergepoint- the branch that has been merged from or None
595 595 .branchpoints- the branches that start at the current entry or empty
596 596 '''
597 597
598 598 def __init__(self, **entries):
599 599 self.id = None
600 600 self.synthetic = False
601 601 self.__dict__.update(entries)
602 602
603 603 def __repr__(self):
604 604 items = (
605 605 b"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
606 606 )
607 607 return b"%s(%s)" % (type(self).__name__, b", ".join(items))
608 608
609 609
610 610 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
611 611 '''Convert log into changesets.'''
612 612
613 613 ui.status(_(b'creating changesets\n'))
614 614
615 615 # try to order commitids by date
616 616 mindate = {}
617 617 for e in log:
618 618 if e.commitid:
619 619 if e.commitid not in mindate:
620 620 mindate[e.commitid] = e.date
621 621 else:
622 622 mindate[e.commitid] = min(e.date, mindate[e.commitid])
623 623
624 624 # Merge changesets
625 625 log.sort(
626 626 key=lambda x: (
627 627 mindate.get(x.commitid, (-1, 0)),
628 628 x.commitid or b'',
629 629 x.comment,
630 630 x.author,
631 631 x.branch or b'',
632 632 x.date,
633 633 x.branchpoints,
634 634 )
635 635 )
636 636
637 637 changesets = []
638 638 files = set()
639 639 c = None
640 640 for i, e in enumerate(log):
641 641
642 642 # Check if log entry belongs to the current changeset or not.
643 643
644 644 # Since CVS is file-centric, two different file revisions with
645 645 # different branchpoints should be treated as belonging to two
646 646 # different changesets (and the ordering is important and not
647 647 # honoured by cvsps at this point).
648 648 #
649 649 # Consider the following case:
650 650 # foo 1.1 branchpoints: [MYBRANCH]
651 651 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
652 652 #
653 653 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
654 654 # later version of foo may be in MYBRANCH2, so foo should be the
655 655 # first changeset and bar the next and MYBRANCH and MYBRANCH2
656 656 # should both start off of the bar changeset. No provisions are
657 657 # made to ensure that this is, in fact, what happens.
658 658 if not (
659 659 c
660 660 and e.branchpoints == c.branchpoints
661 661 and ( # cvs commitids
662 662 (e.commitid is not None and e.commitid == c.commitid)
663 663 or ( # no commitids, use fuzzy commit detection
664 664 (e.commitid is None or c.commitid is None)
665 665 and e.comment == c.comment
666 666 and e.author == c.author
667 667 and e.branch == c.branch
668 668 and (
669 669 (c.date[0] + c.date[1])
670 670 <= (e.date[0] + e.date[1])
671 671 <= (c.date[0] + c.date[1]) + fuzz
672 672 )
673 673 and e.file not in files
674 674 )
675 675 )
676 676 ):
677 677 c = changeset(
678 678 comment=e.comment,
679 679 author=e.author,
680 680 branch=e.branch,
681 681 date=e.date,
682 682 entries=[],
683 683 mergepoint=e.mergepoint,
684 684 branchpoints=e.branchpoints,
685 685 commitid=e.commitid,
686 686 )
687 687 changesets.append(c)
688 688
689 689 files = set()
690 690 if len(changesets) % 100 == 0:
691 691 t = b'%d %s' % (len(changesets), repr(e.comment)[1:-1])
692 692 ui.status(stringutil.ellipsis(t, 80) + b'\n')
693 693
694 694 c.entries.append(e)
695 695 files.add(e.file)
696 696 c.date = e.date # changeset date is date of latest commit in it
697 697
698 698 # Mark synthetic changesets
699 699
700 700 for c in changesets:
701 701 # Synthetic revisions always get their own changeset, because
702 702 # the log message includes the filename. E.g. if you add file3
703 703 # and file4 on a branch, you get four log entries and three
704 704 # changesets:
705 705 # "File file3 was added on branch ..." (synthetic, 1 entry)
706 706 # "File file4 was added on branch ..." (synthetic, 1 entry)
707 707 # "Add file3 and file4 to fix ..." (real, 2 entries)
708 708 # Hence the check for 1 entry here.
709 709 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
710 710
711 711 # Sort files in each changeset
712 712
713 713 def entitycompare(l, r):
714 714 """Mimic cvsps sorting order"""
715 715 l = l.file.split(b'/')
716 716 r = r.file.split(b'/')
717 717 nl = len(l)
718 718 nr = len(r)
719 719 n = min(nl, nr)
720 720 for i in range(n):
721 721 if i + 1 == nl and nl < nr:
722 722 return -1
723 723 elif i + 1 == nr and nl > nr:
724 724 return +1
725 725 elif l[i] < r[i]:
726 726 return -1
727 727 elif l[i] > r[i]:
728 728 return +1
729 729 return 0
730 730
731 731 for c in changesets:
732 732 c.entries.sort(key=functools.cmp_to_key(entitycompare))
733 733
734 734 # Sort changesets by date
735 735
736 736 odd = set()
737 737
738 738 def cscmp(l, r):
739 739 d = sum(l.date) - sum(r.date)
740 740 if d:
741 741 return d
742 742
743 743 # detect vendor branches and initial commits on a branch
744 744 le = {}
745 745 for e in l.entries:
746 746 le[e.rcs] = e.revision
747 747 re = {}
748 748 for e in r.entries:
749 749 re[e.rcs] = e.revision
750 750
751 751 d = 0
752 752 for e in l.entries:
753 753 if re.get(e.rcs, None) == e.parent:
754 754 assert not d
755 755 d = 1
756 756 break
757 757
758 758 for e in r.entries:
759 759 if le.get(e.rcs, None) == e.parent:
760 760 if d:
761 761 odd.add((l, r))
762 762 d = -1
763 763 break
764 764 # By this point, the changesets are sufficiently compared that
765 765 # we don't really care about ordering. However, this leaves
766 766 # some race conditions in the tests, so we compare on the
767 767 # number of files modified, the files contained in each
768 768 # changeset, and the branchpoints in the change to ensure test
769 769 # output remains stable.
770 770
771 771 # recommended replacement for cmp from
772 772 # https://docs.python.org/3.0/whatsnew/3.0.html
773 773 c = lambda x, y: (x > y) - (x < y)
774 774 # Sort bigger changes first.
775 775 if not d:
776 776 d = c(len(l.entries), len(r.entries))
777 777 # Try sorting by filename in the change.
778 778 if not d:
779 779 d = c([e.file for e in l.entries], [e.file for e in r.entries])
780 780 # Try and put changes without a branch point before ones with
781 781 # a branch point.
782 782 if not d:
783 783 d = c(len(l.branchpoints), len(r.branchpoints))
784 784 return d
785 785
786 786 changesets.sort(key=functools.cmp_to_key(cscmp))
787 787
788 788 # Collect tags
789 789
790 790 globaltags = {}
791 791 for c in changesets:
792 792 for e in c.entries:
793 793 for tag in e.tags:
794 794 # remember which is the latest changeset to have this tag
795 795 globaltags[tag] = c
796 796
797 797 for c in changesets:
798 798 tags = set()
799 799 for e in c.entries:
800 800 tags.update(e.tags)
801 801 # remember tags only if this is the latest changeset to have it
802 802 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
803 803
804 804 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
805 805 # by inserting dummy changesets with two parents, and handle
806 806 # {{mergefrombranch BRANCHNAME}} by setting two parents.
807 807
808 808 if mergeto is None:
809 809 mergeto = br'{{mergetobranch ([-\w]+)}}'
810 810 if mergeto:
811 811 mergeto = re.compile(mergeto)
812 812
813 813 if mergefrom is None:
814 814 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
815 815 if mergefrom:
816 816 mergefrom = re.compile(mergefrom)
817 817
818 818 versions = {} # changeset index where we saw any particular file version
819 819 branches = {} # changeset index where we saw a branch
820 820 n = len(changesets)
821 821 i = 0
822 822 while i < n:
823 823 c = changesets[i]
824 824
825 825 for f in c.entries:
826 826 versions[(f.rcs, f.revision)] = i
827 827
828 828 p = None
829 829 if c.branch in branches:
830 830 p = branches[c.branch]
831 831 else:
832 832 # first changeset on a new branch
833 833 # the parent is a changeset with the branch in its
834 834 # branchpoints such that it is the latest possible
835 835 # commit without any intervening, unrelated commits.
836 836
837 837 for candidate in pycompat.xrange(i):
838 838 if c.branch not in changesets[candidate].branchpoints:
839 839 if p is not None:
840 840 break
841 841 continue
842 842 p = candidate
843 843
844 844 c.parents = []
845 845 if p is not None:
846 846 p = changesets[p]
847 847
848 848 # Ensure no changeset has a synthetic changeset as a parent.
849 849 while p.synthetic:
850 850 assert len(p.parents) <= 1, _(
851 851 b'synthetic changeset cannot have multiple parents'
852 852 )
853 853 if p.parents:
854 854 p = p.parents[0]
855 855 else:
856 856 p = None
857 857 break
858 858
859 859 if p is not None:
860 860 c.parents.append(p)
861 861
862 862 if c.mergepoint:
863 863 if c.mergepoint == b'HEAD':
864 864 c.mergepoint = None
865 865 c.parents.append(changesets[branches[c.mergepoint]])
866 866
867 867 if mergefrom:
868 868 m = mergefrom.search(c.comment)
869 869 if m:
870 870 m = m.group(1)
871 871 if m == b'HEAD':
872 872 m = None
873 873 try:
874 874 candidate = changesets[branches[m]]
875 875 except KeyError:
876 876 ui.warn(
877 877 _(
878 878 b"warning: CVS commit message references "
879 879 b"non-existent branch %r:\n%s\n"
880 880 )
881 881 % (pycompat.bytestr(m), c.comment)
882 882 )
883 883 if m in branches and c.branch != m and not candidate.synthetic:
884 884 c.parents.append(candidate)
885 885
886 886 if mergeto:
887 887 m = mergeto.search(c.comment)
888 888 if m:
889 889 if m.groups():
890 890 m = m.group(1)
891 891 if m == b'HEAD':
892 892 m = None
893 893 else:
894 894 m = None # if no group found then merge to HEAD
895 895 if m in branches and c.branch != m:
896 896 # insert empty changeset for merge
897 897 cc = changeset(
898 898 author=c.author,
899 899 branch=m,
900 900 date=c.date,
901 901 comment=b'convert-repo: CVS merge from branch %s'
902 902 % c.branch,
903 903 entries=[],
904 904 tags=[],
905 905 parents=[changesets[branches[m]], c],
906 906 )
907 907 changesets.insert(i + 1, cc)
908 908 branches[m] = i + 1
909 909
910 910 # adjust our loop counters now we have inserted a new entry
911 911 n += 1
912 912 i += 2
913 913 continue
914 914
915 915 branches[c.branch] = i
916 916 i += 1
917 917
918 918 # Drop synthetic changesets (safe now that we have ensured no other
919 919 # changesets can have them as parents).
920 920 i = 0
921 921 while i < len(changesets):
922 922 if changesets[i].synthetic:
923 923 del changesets[i]
924 924 else:
925 925 i += 1
926 926
927 927 # Number changesets
928 928
929 929 for i, c in enumerate(changesets):
930 930 c.id = i + 1
931 931
932 932 if odd:
933 933 for l, r in odd:
934 934 if l.id is not None and r.id is not None:
935 935 ui.warn(
936 936 _(b'changeset %d is both before and after %d\n')
937 937 % (l.id, r.id)
938 938 )
939 939
940 940 ui.status(_(b'%d changeset entries\n') % len(changesets))
941 941
942 942 hook.hook(ui, None, b"cvschangesets", True, changesets=changesets)
943 943
944 944 return changesets
945 945
946 946
947 947 def debugcvsps(ui, *args, **opts):
948 948 '''Read CVS rlog for current directory or named path in
949 949 repository, and convert the log to changesets based on matching
950 950 commit log entries and dates.
951 951 '''
952 952 opts = pycompat.byteskwargs(opts)
953 953 if opts[b"new_cache"]:
954 954 cache = b"write"
955 955 elif opts[b"update_cache"]:
956 956 cache = b"update"
957 957 else:
958 958 cache = None
959 959
960 960 revisions = opts[b"revisions"]
961 961
962 962 try:
963 963 if args:
964 964 log = []
965 965 for d in args:
966 966 log += createlog(ui, d, root=opts[b"root"], cache=cache)
967 967 else:
968 968 log = createlog(ui, root=opts[b"root"], cache=cache)
969 969 except logerror as e:
970 970 ui.write(b"%r\n" % e)
971 971 return
972 972
973 973 changesets = createchangeset(ui, log, opts[b"fuzz"])
974 974 del log
975 975
976 976 # Print changesets (optionally filtered)
977 977
978 978 off = len(revisions)
979 979 branches = {} # latest version number in each branch
980 980 ancestors = {} # parent branch
981 981 for cs in changesets:
982 982
983 983 if opts[b"ancestors"]:
984 984 if cs.branch not in branches and cs.parents and cs.parents[0].id:
985 985 ancestors[cs.branch] = (
986 986 changesets[cs.parents[0].id - 1].branch,
987 987 cs.parents[0].id,
988 988 )
989 989 branches[cs.branch] = cs.id
990 990
991 991 # limit by branches
992 992 if (
993 993 opts[b"branches"]
994 994 and (cs.branch or b'HEAD') not in opts[b"branches"]
995 995 ):
996 996 continue
997 997
998 998 if not off:
999 999 # Note: trailing spaces on several lines here are needed to have
1000 1000 # bug-for-bug compatibility with cvsps.
1001 1001 ui.write(b'---------------------\n')
1002 1002 ui.write((b'PatchSet %d \n' % cs.id))
1003 1003 ui.write(
1004 1004 (
1005 1005 b'Date: %s\n'
1006 1006 % dateutil.datestr(cs.date, b'%Y/%m/%d %H:%M:%S %1%2')
1007 1007 )
1008 1008 )
1009 1009 ui.write((b'Author: %s\n' % cs.author))
1010 1010 ui.write((b'Branch: %s\n' % (cs.branch or b'HEAD')))
1011 1011 ui.write(
1012 1012 (
1013 1013 b'Tag%s: %s \n'
1014 1014 % (
1015 1015 [b'', b's'][len(cs.tags) > 1],
1016 1016 b','.join(cs.tags) or b'(none)',
1017 1017 )
1018 1018 )
1019 1019 )
1020 1020 if cs.branchpoints:
1021 1021 ui.writenoi18n(
1022 1022 b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints))
1023 1023 )
1024 1024 if opts[b"parents"] and cs.parents:
1025 1025 if len(cs.parents) > 1:
1026 1026 ui.write(
1027 1027 (
1028 1028 b'Parents: %s\n'
1029 1029 % (b','.join([(b"%d" % p.id) for p in cs.parents]))
1030 1030 )
1031 1031 )
1032 1032 else:
1033 1033 ui.write((b'Parent: %d\n' % cs.parents[0].id))
1034 1034
1035 1035 if opts[b"ancestors"]:
1036 1036 b = cs.branch
1037 1037 r = []
1038 1038 while b:
1039 1039 b, c = ancestors[b]
1040 1040 r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b]))
1041 1041 if r:
1042 1042 ui.write((b'Ancestors: %s\n' % (b','.join(r))))
1043 1043
1044 1044 ui.writenoi18n(b'Log:\n')
1045 1045 ui.write(b'%s\n\n' % cs.comment)
1046 1046 ui.writenoi18n(b'Members: \n')
1047 1047 for f in cs.entries:
1048 1048 fn = f.file
1049 1049 if fn.startswith(opts[b"prefix"]):
1050 1050 fn = fn[len(opts[b"prefix"]) :]
1051 1051 ui.write(
1052 1052 b'\t%s:%s->%s%s \n'
1053 1053 % (
1054 1054 fn,
1055 1055 b'.'.join([b"%d" % x for x in f.parent]) or b'INITIAL',
1056 1056 b'.'.join([(b"%d" % x) for x in f.revision]),
1057 1057 [b'', b'(DEAD)'][f.dead],
1058 1058 )
1059 1059 )
1060 1060 ui.write(b'\n')
1061 1061
1062 1062 # have we seen the start tag?
1063 1063 if revisions and off:
1064 1064 if revisions[0] == (b"%d" % cs.id) or revisions[0] in cs.tags:
1065 1065 off = False
1066 1066
1067 1067 # see if we reached the end tag
1068 1068 if len(revisions) > 1 and not off:
1069 1069 if revisions[1] == (b"%d" % cs.id) or revisions[1] in cs.tags:
1070 1070 break
@@ -1,702 +1,704 b''
1 1 # encoding.py - character transcoding support for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import, print_function
9 9
10 10 import locale
11 11 import os
12 12 import unicodedata
13 13
14 14 from .pycompat import getattr
15 15 from . import (
16 16 error,
17 17 policy,
18 18 pycompat,
19 19 )
20 20
21 21 from .pure import charencode as charencodepure
22 22
23 23 if pycompat.TYPE_CHECKING:
24 24 from typing import (
25 25 Any,
26 26 Callable,
27 27 List,
28 28 Text,
29 29 Type,
30 30 TypeVar,
31 31 Union,
32 32 )
33 33
34 34 # keep pyflakes happy
35 35 for t in (Any, Callable, List, Text, Type, Union):
36 36 assert t
37 37
38 38 _Tlocalstr = TypeVar('_Tlocalstr', bound='localstr')
39 39
40 40 charencode = policy.importmod('charencode')
41 41
42 42 isasciistr = charencode.isasciistr
43 43 asciilower = charencode.asciilower
44 44 asciiupper = charencode.asciiupper
45 45 _jsonescapeu8fast = charencode.jsonescapeu8fast
46 46
47 47 _sysstr = pycompat.sysstr
48 48
49 49 if pycompat.ispy3:
50 50 unichr = chr
51 51
52 52 # These unicode characters are ignored by HFS+ (Apple Technote 1150,
53 53 # "Unicode Subtleties"), so we need to ignore them in some places for
54 54 # sanity.
55 55 _ignore = [
56 56 unichr(int(x, 16)).encode("utf-8")
57 57 for x in b"200c 200d 200e 200f 202a 202b 202c 202d 202e "
58 58 b"206a 206b 206c 206d 206e 206f feff".split()
59 59 ]
60 60 # verify the next function will work
61 61 assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore)
62 62
63 63
64 64 def hfsignoreclean(s):
65 65 # type: (bytes) -> bytes
66 66 """Remove codepoints ignored by HFS+ from s.
67 67
68 68 >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8'))
69 69 '.hg'
70 70 >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8'))
71 71 '.hg'
72 72 """
73 73 if b"\xe2" in s or b"\xef" in s:
74 74 for c in _ignore:
75 75 s = s.replace(c, b'')
76 76 return s
77 77
78 78
79 79 # encoding.environ is provided read-only, which may not be used to modify
80 80 # the process environment
81 81 _nativeenviron = not pycompat.ispy3 or os.supports_bytes_environ
82 82 if not pycompat.ispy3:
83 83 environ = os.environ # re-exports
84 84 elif _nativeenviron:
85 85 environ = os.environb # re-exports
86 86 else:
87 87 # preferred encoding isn't known yet; use utf-8 to avoid unicode error
88 88 # and recreate it once encoding is settled
89 89 environ = {
90 90 k.encode('utf-8'): v.encode('utf-8')
91 91 for k, v in os.environ.items() # re-exports
92 92 }
93 93
94 94 _encodingrewrites = {
95 95 b'646': b'ascii',
96 96 b'ANSI_X3.4-1968': b'ascii',
97 97 }
98 98 # cp65001 is a Windows variant of utf-8, which isn't supported on Python 2.
99 99 # No idea if it should be rewritten to the canonical name 'utf-8' on Python 3.
100 100 # https://bugs.python.org/issue13216
101 101 if pycompat.iswindows and not pycompat.ispy3:
102 102 _encodingrewrites[b'cp65001'] = b'utf-8'
103 103
104 104 try:
105 105 encoding = environ.get(b"HGENCODING")
106 106 if not encoding:
107 107 encoding = locale.getpreferredencoding().encode('ascii') or b'ascii'
108 108 encoding = _encodingrewrites.get(encoding, encoding)
109 109 except locale.Error:
110 110 encoding = b'ascii'
111 111 encodingmode = environ.get(b"HGENCODINGMODE", b"strict")
112 112 fallbackencoding = b'ISO-8859-1'
113 113
114 114
115 115 class localstr(bytes):
116 116 '''This class allows strings that are unmodified to be
117 117 round-tripped to the local encoding and back'''
118 118
119 119 def __new__(cls, u, l):
120 120 s = bytes.__new__(cls, l)
121 121 s._utf8 = u
122 122 return s
123 123
124 124 if pycompat.TYPE_CHECKING:
125 125 # pseudo implementation to help pytype see localstr() constructor
126 126 def __init__(self, u, l):
127 127 # type: (bytes, bytes) -> None
128 128 super(localstr, self).__init__(l)
129 129 self._utf8 = u
130 130
131 131 def __hash__(self):
132 132 return hash(self._utf8) # avoid collisions in local string space
133 133
134 134
135 135 class safelocalstr(bytes):
136 136 """Tagged string denoting it was previously an internal UTF-8 string,
137 137 and can be converted back to UTF-8 losslessly
138 138
139 139 >>> assert safelocalstr(b'\\xc3') == b'\\xc3'
140 140 >>> assert b'\\xc3' == safelocalstr(b'\\xc3')
141 141 >>> assert b'\\xc3' in {safelocalstr(b'\\xc3'): 0}
142 142 >>> assert safelocalstr(b'\\xc3') in {b'\\xc3': 0}
143 143 """
144 144
145 145
146 146 def tolocal(s):
147 147 # type: (bytes) -> bytes
148 148 """
149 149 Convert a string from internal UTF-8 to local encoding
150 150
151 151 All internal strings should be UTF-8 but some repos before the
152 152 implementation of locale support may contain latin1 or possibly
153 153 other character sets. We attempt to decode everything strictly
154 154 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
155 155 replace unknown characters.
156 156
157 157 The localstr class is used to cache the known UTF-8 encoding of
158 158 strings next to their local representation to allow lossless
159 159 round-trip conversion back to UTF-8.
160 160
161 161 >>> u = b'foo: \\xc3\\xa4' # utf-8
162 162 >>> l = tolocal(u)
163 163 >>> l
164 164 'foo: ?'
165 165 >>> fromlocal(l)
166 166 'foo: \\xc3\\xa4'
167 167 >>> u2 = b'foo: \\xc3\\xa1'
168 168 >>> d = { l: 1, tolocal(u2): 2 }
169 169 >>> len(d) # no collision
170 170 2
171 171 >>> b'foo: ?' in d
172 172 False
173 173 >>> l1 = b'foo: \\xe4' # historical latin1 fallback
174 174 >>> l = tolocal(l1)
175 175 >>> l
176 176 'foo: ?'
177 177 >>> fromlocal(l) # magically in utf-8
178 178 'foo: \\xc3\\xa4'
179 179 """
180 180
181 181 if isasciistr(s):
182 182 return s
183 183
184 184 try:
185 185 try:
186 186 # make sure string is actually stored in UTF-8
187 187 u = s.decode('UTF-8')
188 188 if encoding == b'UTF-8':
189 189 # fast path
190 190 return s
191 191 r = u.encode(_sysstr(encoding), "replace")
192 192 if u == r.decode(_sysstr(encoding)):
193 193 # r is a safe, non-lossy encoding of s
194 194 return safelocalstr(r)
195 195 return localstr(s, r)
196 196 except UnicodeDecodeError:
197 197 # we should only get here if we're looking at an ancient changeset
198 198 try:
199 199 u = s.decode(_sysstr(fallbackencoding))
200 200 r = u.encode(_sysstr(encoding), "replace")
201 201 if u == r.decode(_sysstr(encoding)):
202 202 # r is a safe, non-lossy encoding of s
203 203 return safelocalstr(r)
204 204 return localstr(u.encode('UTF-8'), r)
205 205 except UnicodeDecodeError:
206 206 u = s.decode("utf-8", "replace") # last ditch
207 207 # can't round-trip
208 208 return u.encode(_sysstr(encoding), "replace")
209 209 except LookupError as k:
210 raise error.Abort(k, hint=b"please check your locale settings")
210 raise error.Abort(
211 pycompat.bytestr(k), hint=b"please check your locale settings"
212 )
211 213
212 214
213 215 def fromlocal(s):
214 216 # type: (bytes) -> bytes
215 217 """
216 218 Convert a string from the local character encoding to UTF-8
217 219
218 220 We attempt to decode strings using the encoding mode set by
219 221 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
220 222 characters will cause an error message. Other modes include
221 223 'replace', which replaces unknown characters with a special
222 224 Unicode character, and 'ignore', which drops the character.
223 225 """
224 226
225 227 # can we do a lossless round-trip?
226 228 if isinstance(s, localstr):
227 229 return s._utf8
228 230 if isasciistr(s):
229 231 return s
230 232
231 233 try:
232 234 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
233 235 return u.encode("utf-8")
234 236 except UnicodeDecodeError as inst:
235 237 sub = s[max(0, inst.start - 10) : inst.start + 10]
236 238 raise error.Abort(
237 239 b"decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
238 240 )
239 241 except LookupError as k:
240 242 raise error.Abort(k, hint=b"please check your locale settings")
241 243
242 244
243 245 def unitolocal(u):
244 246 # type: (Text) -> bytes
245 247 """Convert a unicode string to a byte string of local encoding"""
246 248 return tolocal(u.encode('utf-8'))
247 249
248 250
249 251 def unifromlocal(s):
250 252 # type: (bytes) -> Text
251 253 """Convert a byte string of local encoding to a unicode string"""
252 254 return fromlocal(s).decode('utf-8')
253 255
254 256
255 257 def unimethod(bytesfunc):
256 258 # type: (Callable[[Any], bytes]) -> Callable[[Any], Text]
257 259 """Create a proxy method that forwards __unicode__() and __str__() of
258 260 Python 3 to __bytes__()"""
259 261
260 262 def unifunc(obj):
261 263 return unifromlocal(bytesfunc(obj))
262 264
263 265 return unifunc
264 266
265 267
266 268 # converter functions between native str and byte string. use these if the
267 269 # character encoding is not aware (e.g. exception message) or is known to
268 270 # be locale dependent (e.g. date formatting.)
269 271 if pycompat.ispy3:
270 272 strtolocal = unitolocal
271 273 strfromlocal = unifromlocal
272 274 strmethod = unimethod
273 275 else:
274 276
275 277 def strtolocal(s):
276 278 # type: (str) -> bytes
277 279 return s # pytype: disable=bad-return-type
278 280
279 281 def strfromlocal(s):
280 282 # type: (bytes) -> str
281 283 return s # pytype: disable=bad-return-type
282 284
283 285 strmethod = pycompat.identity
284 286
285 287 if not _nativeenviron:
286 288 # now encoding and helper functions are available, recreate the environ
287 289 # dict to be exported to other modules
288 290 environ = {
289 291 tolocal(k.encode('utf-8')): tolocal(v.encode('utf-8'))
290 292 for k, v in os.environ.items() # re-exports
291 293 }
292 294
293 295 if pycompat.ispy3:
294 296 # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
295 297 # returns bytes.
296 298 if pycompat.iswindows:
297 299 # Python 3 on Windows issues a DeprecationWarning about using the bytes
298 300 # API when os.getcwdb() is called.
299 301 getcwd = lambda: strtolocal(os.getcwd()) # re-exports
300 302 else:
301 303 getcwd = os.getcwdb # re-exports
302 304 else:
303 305 getcwd = os.getcwd # re-exports
304 306
305 307 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
306 308 _wide = _sysstr(
307 309 environ.get(b"HGENCODINGAMBIGUOUS", b"narrow") == b"wide"
308 310 and b"WFA"
309 311 or b"WF"
310 312 )
311 313
312 314
313 315 def colwidth(s):
314 316 # type: (bytes) -> int
315 317 """Find the column width of a string for display in the local encoding"""
316 318 return ucolwidth(s.decode(_sysstr(encoding), 'replace'))
317 319
318 320
319 321 def ucolwidth(d):
320 322 # type: (Text) -> int
321 323 """Find the column width of a Unicode string for display"""
322 324 eaw = getattr(unicodedata, 'east_asian_width', None)
323 325 if eaw is not None:
324 326 return sum([eaw(c) in _wide and 2 or 1 for c in d])
325 327 return len(d)
326 328
327 329
328 330 def getcols(s, start, c):
329 331 # type: (bytes, int, int) -> bytes
330 332 '''Use colwidth to find a c-column substring of s starting at byte
331 333 index start'''
332 334 for x in pycompat.xrange(start + c, len(s)):
333 335 t = s[start:x]
334 336 if colwidth(t) == c:
335 337 return t
336 338 raise ValueError('substring not found')
337 339
338 340
339 341 def trim(s, width, ellipsis=b'', leftside=False):
340 342 # type: (bytes, int, bytes, bool) -> bytes
341 343 """Trim string 's' to at most 'width' columns (including 'ellipsis').
342 344
343 345 If 'leftside' is True, left side of string 's' is trimmed.
344 346 'ellipsis' is always placed at trimmed side.
345 347
346 348 >>> from .node import bin
347 349 >>> def bprint(s):
348 350 ... print(pycompat.sysstr(s))
349 351 >>> ellipsis = b'+++'
350 352 >>> from . import encoding
351 353 >>> encoding.encoding = b'utf-8'
352 354 >>> t = b'1234567890'
353 355 >>> bprint(trim(t, 12, ellipsis=ellipsis))
354 356 1234567890
355 357 >>> bprint(trim(t, 10, ellipsis=ellipsis))
356 358 1234567890
357 359 >>> bprint(trim(t, 8, ellipsis=ellipsis))
358 360 12345+++
359 361 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
360 362 +++67890
361 363 >>> bprint(trim(t, 8))
362 364 12345678
363 365 >>> bprint(trim(t, 8, leftside=True))
364 366 34567890
365 367 >>> bprint(trim(t, 3, ellipsis=ellipsis))
366 368 +++
367 369 >>> bprint(trim(t, 1, ellipsis=ellipsis))
368 370 +
369 371 >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
370 372 >>> t = u.encode(pycompat.sysstr(encoding.encoding))
371 373 >>> bprint(trim(t, 12, ellipsis=ellipsis))
372 374 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
373 375 >>> bprint(trim(t, 10, ellipsis=ellipsis))
374 376 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
375 377 >>> bprint(trim(t, 8, ellipsis=ellipsis))
376 378 \xe3\x81\x82\xe3\x81\x84+++
377 379 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
378 380 +++\xe3\x81\x88\xe3\x81\x8a
379 381 >>> bprint(trim(t, 5))
380 382 \xe3\x81\x82\xe3\x81\x84
381 383 >>> bprint(trim(t, 5, leftside=True))
382 384 \xe3\x81\x88\xe3\x81\x8a
383 385 >>> bprint(trim(t, 4, ellipsis=ellipsis))
384 386 +++
385 387 >>> bprint(trim(t, 4, ellipsis=ellipsis, leftside=True))
386 388 +++
387 389 >>> t = bin(b'112233445566778899aa') # invalid byte sequence
388 390 >>> bprint(trim(t, 12, ellipsis=ellipsis))
389 391 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
390 392 >>> bprint(trim(t, 10, ellipsis=ellipsis))
391 393 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
392 394 >>> bprint(trim(t, 8, ellipsis=ellipsis))
393 395 \x11\x22\x33\x44\x55+++
394 396 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
395 397 +++\x66\x77\x88\x99\xaa
396 398 >>> bprint(trim(t, 8))
397 399 \x11\x22\x33\x44\x55\x66\x77\x88
398 400 >>> bprint(trim(t, 8, leftside=True))
399 401 \x33\x44\x55\x66\x77\x88\x99\xaa
400 402 >>> bprint(trim(t, 3, ellipsis=ellipsis))
401 403 +++
402 404 >>> bprint(trim(t, 1, ellipsis=ellipsis))
403 405 +
404 406 """
405 407 try:
406 408 u = s.decode(_sysstr(encoding))
407 409 except UnicodeDecodeError:
408 410 if len(s) <= width: # trimming is not needed
409 411 return s
410 412 width -= len(ellipsis)
411 413 if width <= 0: # no enough room even for ellipsis
412 414 return ellipsis[: width + len(ellipsis)]
413 415 if leftside:
414 416 return ellipsis + s[-width:]
415 417 return s[:width] + ellipsis
416 418
417 419 if ucolwidth(u) <= width: # trimming is not needed
418 420 return s
419 421
420 422 width -= len(ellipsis)
421 423 if width <= 0: # no enough room even for ellipsis
422 424 return ellipsis[: width + len(ellipsis)]
423 425
424 426 if leftside:
425 427 uslice = lambda i: u[i:]
426 428 concat = lambda s: ellipsis + s
427 429 else:
428 430 uslice = lambda i: u[:-i]
429 431 concat = lambda s: s + ellipsis
430 432 for i in pycompat.xrange(1, len(u)):
431 433 usub = uslice(i)
432 434 if ucolwidth(usub) <= width:
433 435 return concat(usub.encode(_sysstr(encoding)))
434 436 return ellipsis # no enough room for multi-column characters
435 437
436 438
437 439 def lower(s):
438 440 # type: (bytes) -> bytes
439 441 """best-effort encoding-aware case-folding of local string s"""
440 442 try:
441 443 return asciilower(s)
442 444 except UnicodeDecodeError:
443 445 pass
444 446 try:
445 447 if isinstance(s, localstr):
446 448 u = s._utf8.decode("utf-8")
447 449 else:
448 450 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
449 451
450 452 lu = u.lower()
451 453 if u == lu:
452 454 return s # preserve localstring
453 455 return lu.encode(_sysstr(encoding))
454 456 except UnicodeError:
455 457 return s.lower() # we don't know how to fold this except in ASCII
456 458 except LookupError as k:
457 459 raise error.Abort(k, hint=b"please check your locale settings")
458 460
459 461
460 462 def upper(s):
461 463 # type: (bytes) -> bytes
462 464 """best-effort encoding-aware case-folding of local string s"""
463 465 try:
464 466 return asciiupper(s)
465 467 except UnicodeDecodeError:
466 468 return upperfallback(s)
467 469
468 470
469 471 def upperfallback(s):
470 472 # type: (Any) -> Any
471 473 try:
472 474 if isinstance(s, localstr):
473 475 u = s._utf8.decode("utf-8")
474 476 else:
475 477 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
476 478
477 479 uu = u.upper()
478 480 if u == uu:
479 481 return s # preserve localstring
480 482 return uu.encode(_sysstr(encoding))
481 483 except UnicodeError:
482 484 return s.upper() # we don't know how to fold this except in ASCII
483 485 except LookupError as k:
484 486 raise error.Abort(k, hint=b"please check your locale settings")
485 487
486 488
487 489 class normcasespecs(object):
488 490 '''what a platform's normcase does to ASCII strings
489 491
490 492 This is specified per platform, and should be consistent with what normcase
491 493 on that platform actually does.
492 494
493 495 lower: normcase lowercases ASCII strings
494 496 upper: normcase uppercases ASCII strings
495 497 other: the fallback function should always be called
496 498
497 499 This should be kept in sync with normcase_spec in util.h.'''
498 500
499 501 lower = -1
500 502 upper = 1
501 503 other = 0
502 504
503 505
504 506 def jsonescape(s, paranoid=False):
505 507 # type: (Any, Any) -> Any
506 508 '''returns a string suitable for JSON
507 509
508 510 JSON is problematic for us because it doesn't support non-Unicode
509 511 bytes. To deal with this, we take the following approach:
510 512
511 513 - localstr/safelocalstr objects are converted back to UTF-8
512 514 - valid UTF-8/ASCII strings are passed as-is
513 515 - other strings are converted to UTF-8b surrogate encoding
514 516 - apply JSON-specified string escaping
515 517
516 518 (escapes are doubled in these tests)
517 519
518 520 >>> jsonescape(b'this is a test')
519 521 'this is a test'
520 522 >>> jsonescape(b'escape characters: \\0 \\x0b \\x7f')
521 523 'escape characters: \\\\u0000 \\\\u000b \\\\u007f'
522 524 >>> jsonescape(b'escape characters: \\b \\t \\n \\f \\r \\" \\\\')
523 525 'escape characters: \\\\b \\\\t \\\\n \\\\f \\\\r \\\\" \\\\\\\\'
524 526 >>> jsonescape(b'a weird byte: \\xdd')
525 527 'a weird byte: \\xed\\xb3\\x9d'
526 528 >>> jsonescape(b'utf-8: caf\\xc3\\xa9')
527 529 'utf-8: caf\\xc3\\xa9'
528 530 >>> jsonescape(b'')
529 531 ''
530 532
531 533 If paranoid, non-ascii and common troublesome characters are also escaped.
532 534 This is suitable for web output.
533 535
534 536 >>> s = b'escape characters: \\0 \\x0b \\x7f'
535 537 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
536 538 >>> s = b'escape characters: \\b \\t \\n \\f \\r \\" \\\\'
537 539 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
538 540 >>> jsonescape(b'escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True)
539 541 'escape boundary: ~ \\\\u007f \\\\u0080'
540 542 >>> jsonescape(b'a weird byte: \\xdd', paranoid=True)
541 543 'a weird byte: \\\\udcdd'
542 544 >>> jsonescape(b'utf-8: caf\\xc3\\xa9', paranoid=True)
543 545 'utf-8: caf\\\\u00e9'
544 546 >>> jsonescape(b'non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True)
545 547 'non-BMP: \\\\ud834\\\\udd1e'
546 548 >>> jsonescape(b'<foo@example.org>', paranoid=True)
547 549 '\\\\u003cfoo@example.org\\\\u003e'
548 550 '''
549 551
550 552 u8chars = toutf8b(s)
551 553 try:
552 554 return _jsonescapeu8fast(u8chars, paranoid)
553 555 except ValueError:
554 556 pass
555 557 return charencodepure.jsonescapeu8fallback(u8chars, paranoid)
556 558
557 559
558 560 # We need to decode/encode U+DCxx codes transparently since invalid UTF-8
559 561 # bytes are mapped to that range.
560 562 if pycompat.ispy3:
561 563 _utf8strict = r'surrogatepass'
562 564 else:
563 565 _utf8strict = r'strict'
564 566
565 567 _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
566 568
567 569
568 570 def getutf8char(s, pos):
569 571 # type: (bytes, int) -> bytes
570 572 '''get the next full utf-8 character in the given string, starting at pos
571 573
572 574 Raises a UnicodeError if the given location does not start a valid
573 575 utf-8 character.
574 576 '''
575 577
576 578 # find how many bytes to attempt decoding from first nibble
577 579 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
578 580 if not l: # ascii
579 581 return s[pos : pos + 1]
580 582
581 583 c = s[pos : pos + l]
582 584 # validate with attempted decode
583 585 c.decode("utf-8", _utf8strict)
584 586 return c
585 587
586 588
587 589 def toutf8b(s):
588 590 # type: (bytes) -> bytes
589 591 '''convert a local, possibly-binary string into UTF-8b
590 592
591 593 This is intended as a generic method to preserve data when working
592 594 with schemes like JSON and XML that have no provision for
593 595 arbitrary byte strings. As Mercurial often doesn't know
594 596 what encoding data is in, we use so-called UTF-8b.
595 597
596 598 If a string is already valid UTF-8 (or ASCII), it passes unmodified.
597 599 Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
598 600 uDC00-uDCFF.
599 601
600 602 Principles of operation:
601 603
602 604 - ASCII and UTF-8 data successfully round-trips and is understood
603 605 by Unicode-oriented clients
604 606 - filenames and file contents in arbitrary other encodings can have
605 607 be round-tripped or recovered by clueful clients
606 608 - local strings that have a cached known UTF-8 encoding (aka
607 609 localstr) get sent as UTF-8 so Unicode-oriented clients get the
608 610 Unicode data they want
609 611 - non-lossy local strings (aka safelocalstr) get sent as UTF-8 as well
610 612 - because we must preserve UTF-8 bytestring in places such as
611 613 filenames, metadata can't be roundtripped without help
612 614
613 615 (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
614 616 arbitrary bytes into an internal Unicode format that can be
615 617 re-encoded back into the original. Here we are exposing the
616 618 internal surrogate encoding as a UTF-8 string.)
617 619 '''
618 620
619 621 if isinstance(s, localstr):
620 622 # assume that the original UTF-8 sequence would never contain
621 623 # invalid characters in U+DCxx range
622 624 return s._utf8
623 625 elif isinstance(s, safelocalstr):
624 626 # already verified that s is non-lossy in legacy encoding, which
625 627 # shouldn't contain characters in U+DCxx range
626 628 return fromlocal(s)
627 629 elif isasciistr(s):
628 630 return s
629 631 if b"\xed" not in s:
630 632 try:
631 633 s.decode('utf-8', _utf8strict)
632 634 return s
633 635 except UnicodeDecodeError:
634 636 pass
635 637
636 638 s = pycompat.bytestr(s)
637 639 r = b""
638 640 pos = 0
639 641 l = len(s)
640 642 while pos < l:
641 643 try:
642 644 c = getutf8char(s, pos)
643 645 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
644 646 # have to re-escape existing U+DCxx characters
645 647 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
646 648 pos += 1
647 649 else:
648 650 pos += len(c)
649 651 except UnicodeDecodeError:
650 652 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
651 653 pos += 1
652 654 r += c
653 655 return r
654 656
655 657
656 658 def fromutf8b(s):
657 659 # type: (bytes) -> bytes
658 660 '''Given a UTF-8b string, return a local, possibly-binary string.
659 661
660 662 return the original binary string. This
661 663 is a round-trip process for strings like filenames, but metadata
662 664 that's was passed through tolocal will remain in UTF-8.
663 665
664 666 >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x
665 667 >>> m = b"\\xc3\\xa9\\x99abcd"
666 668 >>> toutf8b(m)
667 669 '\\xc3\\xa9\\xed\\xb2\\x99abcd'
668 670 >>> roundtrip(m)
669 671 True
670 672 >>> roundtrip(b"\\xc2\\xc2\\x80")
671 673 True
672 674 >>> roundtrip(b"\\xef\\xbf\\xbd")
673 675 True
674 676 >>> roundtrip(b"\\xef\\xef\\xbf\\xbd")
675 677 True
676 678 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
677 679 True
678 680 '''
679 681
680 682 if isasciistr(s):
681 683 return s
682 684 # fast path - look for uDxxx prefixes in s
683 685 if b"\xed" not in s:
684 686 return s
685 687
686 688 # We could do this with the unicode type but some Python builds
687 689 # use UTF-16 internally (issue5031) which causes non-BMP code
688 690 # points to be escaped. Instead, we use our handy getutf8char
689 691 # helper again to walk the string without "decoding" it.
690 692
691 693 s = pycompat.bytestr(s)
692 694 r = b""
693 695 pos = 0
694 696 l = len(s)
695 697 while pos < l:
696 698 c = getutf8char(s, pos)
697 699 pos += len(c)
698 700 # unescape U+DCxx characters
699 701 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
700 702 c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
701 703 r += c
702 704 return r
@@ -1,3157 +1,3157 b''
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import weakref
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 nullrev,
18 18 )
19 19 from .thirdparty import attr
20 20 from . import (
21 21 bookmarks as bookmod,
22 22 bundle2,
23 23 changegroup,
24 24 discovery,
25 25 error,
26 26 exchangev2,
27 27 lock as lockmod,
28 28 logexchange,
29 29 narrowspec,
30 30 obsolete,
31 31 obsutil,
32 32 phases,
33 33 pushkey,
34 34 pycompat,
35 35 requirements,
36 36 scmutil,
37 37 sslutil,
38 38 streamclone,
39 39 url as urlmod,
40 40 util,
41 41 wireprototypes,
42 42 )
43 43 from .utils import (
44 44 hashutil,
45 45 stringutil,
46 46 )
47 47
48 48 urlerr = util.urlerr
49 49 urlreq = util.urlreq
50 50
51 51 _NARROWACL_SECTION = b'narrowacl'
52 52
53 53 # Maps bundle version human names to changegroup versions.
54 54 _bundlespeccgversions = {
55 55 b'v1': b'01',
56 56 b'v2': b'02',
57 57 b'packed1': b's1',
58 58 b'bundle2': b'02', # legacy
59 59 }
60 60
61 61 # Maps bundle version with content opts to choose which part to bundle
62 62 _bundlespeccontentopts = {
63 63 b'v1': {
64 64 b'changegroup': True,
65 65 b'cg.version': b'01',
66 66 b'obsolescence': False,
67 67 b'phases': False,
68 68 b'tagsfnodescache': False,
69 69 b'revbranchcache': False,
70 70 },
71 71 b'v2': {
72 72 b'changegroup': True,
73 73 b'cg.version': b'02',
74 74 b'obsolescence': False,
75 75 b'phases': False,
76 76 b'tagsfnodescache': True,
77 77 b'revbranchcache': True,
78 78 },
79 79 b'packed1': {b'cg.version': b's1'},
80 80 }
81 81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
82 82
83 83 _bundlespecvariants = {
84 84 b"streamv2": {
85 85 b"changegroup": False,
86 86 b"streamv2": True,
87 87 b"tagsfnodescache": False,
88 88 b"revbranchcache": False,
89 89 }
90 90 }
91 91
92 92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
93 93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
94 94
95 95
96 96 @attr.s
97 97 class bundlespec(object):
98 98 compression = attr.ib()
99 99 wirecompression = attr.ib()
100 100 version = attr.ib()
101 101 wireversion = attr.ib()
102 102 params = attr.ib()
103 103 contentopts = attr.ib()
104 104
105 105
106 106 def parsebundlespec(repo, spec, strict=True):
107 107 """Parse a bundle string specification into parts.
108 108
109 109 Bundle specifications denote a well-defined bundle/exchange format.
110 110 The content of a given specification should not change over time in
111 111 order to ensure that bundles produced by a newer version of Mercurial are
112 112 readable from an older version.
113 113
114 114 The string currently has the form:
115 115
116 116 <compression>-<type>[;<parameter0>[;<parameter1>]]
117 117
118 118 Where <compression> is one of the supported compression formats
119 119 and <type> is (currently) a version string. A ";" can follow the type and
120 120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
121 121 pairs.
122 122
123 123 If ``strict`` is True (the default) <compression> is required. Otherwise,
124 124 it is optional.
125 125
126 126 Returns a bundlespec object of (compression, version, parameters).
127 127 Compression will be ``None`` if not in strict mode and a compression isn't
128 128 defined.
129 129
130 130 An ``InvalidBundleSpecification`` is raised when the specification is
131 131 not syntactically well formed.
132 132
133 133 An ``UnsupportedBundleSpecification`` is raised when the compression or
134 134 bundle type/version is not recognized.
135 135
136 136 Note: this function will likely eventually return a more complex data
137 137 structure, including bundle2 part information.
138 138 """
139 139
140 140 def parseparams(s):
141 141 if b';' not in s:
142 142 return s, {}
143 143
144 144 params = {}
145 145 version, paramstr = s.split(b';', 1)
146 146
147 147 for p in paramstr.split(b';'):
148 148 if b'=' not in p:
149 149 raise error.InvalidBundleSpecification(
150 150 _(
151 151 b'invalid bundle specification: '
152 152 b'missing "=" in parameter: %s'
153 153 )
154 154 % p
155 155 )
156 156
157 157 key, value = p.split(b'=', 1)
158 158 key = urlreq.unquote(key)
159 159 value = urlreq.unquote(value)
160 160 params[key] = value
161 161
162 162 return version, params
163 163
164 164 if strict and b'-' not in spec:
165 165 raise error.InvalidBundleSpecification(
166 166 _(
167 167 b'invalid bundle specification; '
168 168 b'must be prefixed with compression: %s'
169 169 )
170 170 % spec
171 171 )
172 172
173 173 if b'-' in spec:
174 174 compression, version = spec.split(b'-', 1)
175 175
176 176 if compression not in util.compengines.supportedbundlenames:
177 177 raise error.UnsupportedBundleSpecification(
178 178 _(b'%s compression is not supported') % compression
179 179 )
180 180
181 181 version, params = parseparams(version)
182 182
183 183 if version not in _bundlespeccgversions:
184 184 raise error.UnsupportedBundleSpecification(
185 185 _(b'%s is not a recognized bundle version') % version
186 186 )
187 187 else:
188 188 # Value could be just the compression or just the version, in which
189 189 # case some defaults are assumed (but only when not in strict mode).
190 190 assert not strict
191 191
192 192 spec, params = parseparams(spec)
193 193
194 194 if spec in util.compengines.supportedbundlenames:
195 195 compression = spec
196 196 version = b'v1'
197 197 # Generaldelta repos require v2.
198 198 if b'generaldelta' in repo.requirements:
199 199 version = b'v2'
200 200 # Modern compression engines require v2.
201 201 if compression not in _bundlespecv1compengines:
202 202 version = b'v2'
203 203 elif spec in _bundlespeccgversions:
204 204 if spec == b'packed1':
205 205 compression = b'none'
206 206 else:
207 207 compression = b'bzip2'
208 208 version = spec
209 209 else:
210 210 raise error.UnsupportedBundleSpecification(
211 211 _(b'%s is not a recognized bundle specification') % spec
212 212 )
213 213
214 214 # Bundle version 1 only supports a known set of compression engines.
215 215 if version == b'v1' and compression not in _bundlespecv1compengines:
216 216 raise error.UnsupportedBundleSpecification(
217 217 _(b'compression engine %s is not supported on v1 bundles')
218 218 % compression
219 219 )
220 220
221 221 # The specification for packed1 can optionally declare the data formats
222 222 # required to apply it. If we see this metadata, compare against what the
223 223 # repo supports and error if the bundle isn't compatible.
224 224 if version == b'packed1' and b'requirements' in params:
225 225 requirements = set(params[b'requirements'].split(b','))
226 226 missingreqs = requirements - repo.supportedformats
227 227 if missingreqs:
228 228 raise error.UnsupportedBundleSpecification(
229 229 _(b'missing support for repository features: %s')
230 230 % b', '.join(sorted(missingreqs))
231 231 )
232 232
233 233 # Compute contentopts based on the version
234 234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
235 235
236 236 # Process the variants
237 237 if b"stream" in params and params[b"stream"] == b"v2":
238 238 variant = _bundlespecvariants[b"streamv2"]
239 239 contentopts.update(variant)
240 240
241 241 engine = util.compengines.forbundlename(compression)
242 242 compression, wirecompression = engine.bundletype()
243 243 wireversion = _bundlespeccgversions[version]
244 244
245 245 return bundlespec(
246 246 compression, wirecompression, version, wireversion, params, contentopts
247 247 )
248 248
249 249
250 250 def readbundle(ui, fh, fname, vfs=None):
251 251 header = changegroup.readexactly(fh, 4)
252 252
253 253 alg = None
254 254 if not fname:
255 255 fname = b"stream"
256 256 if not header.startswith(b'HG') and header.startswith(b'\0'):
257 257 fh = changegroup.headerlessfixup(fh, header)
258 258 header = b"HG10"
259 259 alg = b'UN'
260 260 elif vfs:
261 261 fname = vfs.join(fname)
262 262
263 263 magic, version = header[0:2], header[2:4]
264 264
265 265 if magic != b'HG':
266 266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
267 267 if version == b'10':
268 268 if alg is None:
269 269 alg = changegroup.readexactly(fh, 2)
270 270 return changegroup.cg1unpacker(fh, alg)
271 271 elif version.startswith(b'2'):
272 272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
273 273 elif version == b'S1':
274 274 return streamclone.streamcloneapplier(fh)
275 275 else:
276 276 raise error.Abort(
277 277 _(b'%s: unknown bundle version %s') % (fname, version)
278 278 )
279 279
280 280
281 281 def getbundlespec(ui, fh):
282 282 """Infer the bundlespec from a bundle file handle.
283 283
284 284 The input file handle is seeked and the original seek position is not
285 285 restored.
286 286 """
287 287
288 288 def speccompression(alg):
289 289 try:
290 290 return util.compengines.forbundletype(alg).bundletype()[0]
291 291 except KeyError:
292 292 return None
293 293
294 294 b = readbundle(ui, fh, None)
295 295 if isinstance(b, changegroup.cg1unpacker):
296 296 alg = b._type
297 297 if alg == b'_truncatedBZ':
298 298 alg = b'BZ'
299 299 comp = speccompression(alg)
300 300 if not comp:
301 301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
302 302 return b'%s-v1' % comp
303 303 elif isinstance(b, bundle2.unbundle20):
304 304 if b'Compression' in b.params:
305 305 comp = speccompression(b.params[b'Compression'])
306 306 if not comp:
307 307 raise error.Abort(
308 308 _(b'unknown compression algorithm: %s') % comp
309 309 )
310 310 else:
311 311 comp = b'none'
312 312
313 313 version = None
314 314 for part in b.iterparts():
315 315 if part.type == b'changegroup':
316 316 version = part.params[b'version']
317 317 if version in (b'01', b'02'):
318 318 version = b'v2'
319 319 else:
320 320 raise error.Abort(
321 321 _(
322 322 b'changegroup version %s does not have '
323 323 b'a known bundlespec'
324 324 )
325 325 % version,
326 326 hint=_(b'try upgrading your Mercurial client'),
327 327 )
328 328 elif part.type == b'stream2' and version is None:
329 329 # A stream2 part requires to be part of a v2 bundle
330 330 requirements = urlreq.unquote(part.params[b'requirements'])
331 331 splitted = requirements.split()
332 332 params = bundle2._formatrequirementsparams(splitted)
333 333 return b'none-v2;stream=v2;%s' % params
334 334
335 335 if not version:
336 336 raise error.Abort(
337 337 _(b'could not identify changegroup version in bundle')
338 338 )
339 339
340 340 return b'%s-%s' % (comp, version)
341 341 elif isinstance(b, streamclone.streamcloneapplier):
342 342 requirements = streamclone.readbundle1header(fh)[2]
343 343 formatted = bundle2._formatrequirementsparams(requirements)
344 344 return b'none-packed1;%s' % formatted
345 345 else:
346 346 raise error.Abort(_(b'unknown bundle type: %s') % b)
347 347
348 348
349 349 def _computeoutgoing(repo, heads, common):
350 350 """Computes which revs are outgoing given a set of common
351 351 and a set of heads.
352 352
353 353 This is a separate function so extensions can have access to
354 354 the logic.
355 355
356 356 Returns a discovery.outgoing object.
357 357 """
358 358 cl = repo.changelog
359 359 if common:
360 360 hasnode = cl.hasnode
361 361 common = [n for n in common if hasnode(n)]
362 362 else:
363 363 common = [nullid]
364 364 if not heads:
365 365 heads = cl.heads()
366 366 return discovery.outgoing(repo, common, heads)
367 367
368 368
369 369 def _checkpublish(pushop):
370 370 repo = pushop.repo
371 371 ui = repo.ui
372 372 behavior = ui.config(b'experimental', b'auto-publish')
373 373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
374 374 return
375 375 remotephases = listkeys(pushop.remote, b'phases')
376 376 if not remotephases.get(b'publishing', False):
377 377 return
378 378
379 379 if pushop.revs is None:
380 380 published = repo.filtered(b'served').revs(b'not public()')
381 381 else:
382 382 published = repo.revs(b'::%ln - public()', pushop.revs)
383 383 if published:
384 384 if behavior == b'warn':
385 385 ui.warn(
386 386 _(b'%i changesets about to be published\n') % len(published)
387 387 )
388 388 elif behavior == b'confirm':
389 389 if ui.promptchoice(
390 390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
391 391 % len(published)
392 392 ):
393 393 raise error.Abort(_(b'user quit'))
394 394 elif behavior == b'abort':
395 395 msg = _(b'push would publish %i changesets') % len(published)
396 396 hint = _(
397 397 b"use --publish or adjust 'experimental.auto-publish'"
398 398 b" config"
399 399 )
400 400 raise error.Abort(msg, hint=hint)
401 401
402 402
403 403 def _forcebundle1(op):
404 404 """return true if a pull/push must use bundle1
405 405
406 406 This function is used to allow testing of the older bundle version"""
407 407 ui = op.repo.ui
408 408 # The goal is this config is to allow developer to choose the bundle
409 409 # version used during exchanged. This is especially handy during test.
410 410 # Value is a list of bundle version to be picked from, highest version
411 411 # should be used.
412 412 #
413 413 # developer config: devel.legacy.exchange
414 414 exchange = ui.configlist(b'devel', b'legacy.exchange')
415 415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
416 416 return forcebundle1 or not op.remote.capable(b'bundle2')
417 417
418 418
419 419 class pushoperation(object):
420 420 """A object that represent a single push operation
421 421
422 422 Its purpose is to carry push related state and very common operations.
423 423
424 424 A new pushoperation should be created at the beginning of each push and
425 425 discarded afterward.
426 426 """
427 427
428 428 def __init__(
429 429 self,
430 430 repo,
431 431 remote,
432 432 force=False,
433 433 revs=None,
434 434 newbranch=False,
435 435 bookmarks=(),
436 436 publish=False,
437 437 pushvars=None,
438 438 ):
439 439 # repo we push from
440 440 self.repo = repo
441 441 self.ui = repo.ui
442 442 # repo we push to
443 443 self.remote = remote
444 444 # force option provided
445 445 self.force = force
446 446 # revs to be pushed (None is "all")
447 447 self.revs = revs
448 448 # bookmark explicitly pushed
449 449 self.bookmarks = bookmarks
450 450 # allow push of new branch
451 451 self.newbranch = newbranch
452 452 # step already performed
453 453 # (used to check what steps have been already performed through bundle2)
454 454 self.stepsdone = set()
455 455 # Integer version of the changegroup push result
456 456 # - None means nothing to push
457 457 # - 0 means HTTP error
458 458 # - 1 means we pushed and remote head count is unchanged *or*
459 459 # we have outgoing changesets but refused to push
460 460 # - other values as described by addchangegroup()
461 461 self.cgresult = None
462 462 # Boolean value for the bookmark push
463 463 self.bkresult = None
464 464 # discover.outgoing object (contains common and outgoing data)
465 465 self.outgoing = None
466 466 # all remote topological heads before the push
467 467 self.remoteheads = None
468 468 # Details of the remote branch pre and post push
469 469 #
470 470 # mapping: {'branch': ([remoteheads],
471 471 # [newheads],
472 472 # [unsyncedheads],
473 473 # [discardedheads])}
474 474 # - branch: the branch name
475 475 # - remoteheads: the list of remote heads known locally
476 476 # None if the branch is new
477 477 # - newheads: the new remote heads (known locally) with outgoing pushed
478 478 # - unsyncedheads: the list of remote heads unknown locally.
479 479 # - discardedheads: the list of remote heads made obsolete by the push
480 480 self.pushbranchmap = None
481 481 # testable as a boolean indicating if any nodes are missing locally.
482 482 self.incoming = None
483 483 # summary of the remote phase situation
484 484 self.remotephases = None
485 485 # phases changes that must be pushed along side the changesets
486 486 self.outdatedphases = None
487 487 # phases changes that must be pushed if changeset push fails
488 488 self.fallbackoutdatedphases = None
489 489 # outgoing obsmarkers
490 490 self.outobsmarkers = set()
491 491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
492 492 self.outbookmarks = []
493 493 # transaction manager
494 494 self.trmanager = None
495 495 # map { pushkey partid -> callback handling failure}
496 496 # used to handle exception from mandatory pushkey part failure
497 497 self.pkfailcb = {}
498 498 # an iterable of pushvars or None
499 499 self.pushvars = pushvars
500 500 # publish pushed changesets
501 501 self.publish = publish
502 502
503 503 @util.propertycache
504 504 def futureheads(self):
505 505 """future remote heads if the changeset push succeeds"""
506 506 return self.outgoing.ancestorsof
507 507
508 508 @util.propertycache
509 509 def fallbackheads(self):
510 510 """future remote heads if the changeset push fails"""
511 511 if self.revs is None:
512 512 # not target to push, all common are relevant
513 513 return self.outgoing.commonheads
514 514 unfi = self.repo.unfiltered()
515 515 # I want cheads = heads(::ancestorsof and ::commonheads)
516 516 # (ancestorsof is revs with secret changeset filtered out)
517 517 #
518 518 # This can be expressed as:
519 519 # cheads = ( (ancestorsof and ::commonheads)
520 520 # + (commonheads and ::ancestorsof))"
521 521 # )
522 522 #
523 523 # while trying to push we already computed the following:
524 524 # common = (::commonheads)
525 525 # missing = ((commonheads::ancestorsof) - commonheads)
526 526 #
527 527 # We can pick:
528 528 # * ancestorsof part of common (::commonheads)
529 529 common = self.outgoing.common
530 530 rev = self.repo.changelog.index.rev
531 531 cheads = [node for node in self.revs if rev(node) in common]
532 532 # and
533 533 # * commonheads parents on missing
534 534 revset = unfi.set(
535 535 b'%ln and parents(roots(%ln))',
536 536 self.outgoing.commonheads,
537 537 self.outgoing.missing,
538 538 )
539 539 cheads.extend(c.node() for c in revset)
540 540 return cheads
541 541
542 542 @property
543 543 def commonheads(self):
544 544 """set of all common heads after changeset bundle push"""
545 545 if self.cgresult:
546 546 return self.futureheads
547 547 else:
548 548 return self.fallbackheads
549 549
550 550
551 551 # mapping of message used when pushing bookmark
552 552 bookmsgmap = {
553 553 b'update': (
554 554 _(b"updating bookmark %s\n"),
555 555 _(b'updating bookmark %s failed!\n'),
556 556 ),
557 557 b'export': (
558 558 _(b"exporting bookmark %s\n"),
559 559 _(b'exporting bookmark %s failed!\n'),
560 560 ),
561 561 b'delete': (
562 562 _(b"deleting remote bookmark %s\n"),
563 563 _(b'deleting remote bookmark %s failed!\n'),
564 564 ),
565 565 }
566 566
567 567
568 568 def push(
569 569 repo,
570 570 remote,
571 571 force=False,
572 572 revs=None,
573 573 newbranch=False,
574 574 bookmarks=(),
575 575 publish=False,
576 576 opargs=None,
577 577 ):
578 578 '''Push outgoing changesets (limited by revs) from a local
579 579 repository to remote. Return an integer:
580 580 - None means nothing to push
581 581 - 0 means HTTP error
582 582 - 1 means we pushed and remote head count is unchanged *or*
583 583 we have outgoing changesets but refused to push
584 584 - other values as described by addchangegroup()
585 585 '''
586 586 if opargs is None:
587 587 opargs = {}
588 588 pushop = pushoperation(
589 589 repo,
590 590 remote,
591 591 force,
592 592 revs,
593 593 newbranch,
594 594 bookmarks,
595 595 publish,
596 596 **pycompat.strkwargs(opargs)
597 597 )
598 598 if pushop.remote.local():
599 599 missing = (
600 600 set(pushop.repo.requirements) - pushop.remote.local().supported
601 601 )
602 602 if missing:
603 603 msg = _(
604 604 b"required features are not"
605 605 b" supported in the destination:"
606 606 b" %s"
607 607 ) % (b', '.join(sorted(missing)))
608 608 raise error.Abort(msg)
609 609
610 610 if not pushop.remote.canpush():
611 611 raise error.Abort(_(b"destination does not support push"))
612 612
613 613 if not pushop.remote.capable(b'unbundle'):
614 614 raise error.Abort(
615 615 _(
616 616 b'cannot push: destination does not support the '
617 617 b'unbundle wire protocol command'
618 618 )
619 619 )
620 620
621 621 # get lock as we might write phase data
622 622 wlock = lock = None
623 623 try:
624 624 # bundle2 push may receive a reply bundle touching bookmarks
625 625 # requiring the wlock. Take it now to ensure proper ordering.
626 626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
627 627 if (
628 628 (not _forcebundle1(pushop))
629 629 and maypushback
630 630 and not bookmod.bookmarksinstore(repo)
631 631 ):
632 632 wlock = pushop.repo.wlock()
633 633 lock = pushop.repo.lock()
634 634 pushop.trmanager = transactionmanager(
635 635 pushop.repo, b'push-response', pushop.remote.url()
636 636 )
637 637 except error.LockUnavailable as err:
638 638 # source repo cannot be locked.
639 639 # We do not abort the push, but just disable the local phase
640 640 # synchronisation.
641 641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
642 642 err
643 643 )
644 644 pushop.ui.debug(msg)
645 645
646 646 with wlock or util.nullcontextmanager():
647 647 with lock or util.nullcontextmanager():
648 648 with pushop.trmanager or util.nullcontextmanager():
649 649 pushop.repo.checkpush(pushop)
650 650 _checkpublish(pushop)
651 651 _pushdiscovery(pushop)
652 652 if not pushop.force:
653 653 _checksubrepostate(pushop)
654 654 if not _forcebundle1(pushop):
655 655 _pushbundle2(pushop)
656 656 _pushchangeset(pushop)
657 657 _pushsyncphase(pushop)
658 658 _pushobsolete(pushop)
659 659 _pushbookmark(pushop)
660 660
661 661 if repo.ui.configbool(b'experimental', b'remotenames'):
662 662 logexchange.pullremotenames(repo, remote)
663 663
664 664 return pushop
665 665
666 666
667 667 # list of steps to perform discovery before push
668 668 pushdiscoveryorder = []
669 669
670 670 # Mapping between step name and function
671 671 #
672 672 # This exists to help extensions wrap steps if necessary
673 673 pushdiscoverymapping = {}
674 674
675 675
676 676 def pushdiscovery(stepname):
677 677 """decorator for function performing discovery before push
678 678
679 679 The function is added to the step -> function mapping and appended to the
680 680 list of steps. Beware that decorated function will be added in order (this
681 681 may matter).
682 682
683 683 You can only use this decorator for a new step, if you want to wrap a step
684 684 from an extension, change the pushdiscovery dictionary directly."""
685 685
686 686 def dec(func):
687 687 assert stepname not in pushdiscoverymapping
688 688 pushdiscoverymapping[stepname] = func
689 689 pushdiscoveryorder.append(stepname)
690 690 return func
691 691
692 692 return dec
693 693
694 694
695 695 def _pushdiscovery(pushop):
696 696 """Run all discovery steps"""
697 697 for stepname in pushdiscoveryorder:
698 698 step = pushdiscoverymapping[stepname]
699 699 step(pushop)
700 700
701 701
702 702 def _checksubrepostate(pushop):
703 703 """Ensure all outgoing referenced subrepo revisions are present locally"""
704 704 for n in pushop.outgoing.missing:
705 705 ctx = pushop.repo[n]
706 706
707 707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
708 708 for subpath in sorted(ctx.substate):
709 709 sub = ctx.sub(subpath)
710 710 sub.verify(onpush=True)
711 711
712 712
713 713 @pushdiscovery(b'changeset')
714 714 def _pushdiscoverychangeset(pushop):
715 715 """discover the changeset that need to be pushed"""
716 716 fci = discovery.findcommonincoming
717 717 if pushop.revs:
718 718 commoninc = fci(
719 719 pushop.repo,
720 720 pushop.remote,
721 721 force=pushop.force,
722 722 ancestorsof=pushop.revs,
723 723 )
724 724 else:
725 725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
726 726 common, inc, remoteheads = commoninc
727 727 fco = discovery.findcommonoutgoing
728 728 outgoing = fco(
729 729 pushop.repo,
730 730 pushop.remote,
731 731 onlyheads=pushop.revs,
732 732 commoninc=commoninc,
733 733 force=pushop.force,
734 734 )
735 735 pushop.outgoing = outgoing
736 736 pushop.remoteheads = remoteheads
737 737 pushop.incoming = inc
738 738
739 739
740 740 @pushdiscovery(b'phase')
741 741 def _pushdiscoveryphase(pushop):
742 742 """discover the phase that needs to be pushed
743 743
744 744 (computed for both success and failure case for changesets push)"""
745 745 outgoing = pushop.outgoing
746 746 unfi = pushop.repo.unfiltered()
747 747 remotephases = listkeys(pushop.remote, b'phases')
748 748
749 749 if (
750 750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
751 751 and remotephases # server supports phases
752 752 and not pushop.outgoing.missing # no changesets to be pushed
753 753 and remotephases.get(b'publishing', False)
754 754 ):
755 755 # When:
756 756 # - this is a subrepo push
757 757 # - and remote support phase
758 758 # - and no changeset are to be pushed
759 759 # - and remote is publishing
760 760 # We may be in issue 3781 case!
761 761 # We drop the possible phase synchronisation done by
762 762 # courtesy to publish changesets possibly locally draft
763 763 # on the remote.
764 764 pushop.outdatedphases = []
765 765 pushop.fallbackoutdatedphases = []
766 766 return
767 767
768 768 pushop.remotephases = phases.remotephasessummary(
769 769 pushop.repo, pushop.fallbackheads, remotephases
770 770 )
771 771 droots = pushop.remotephases.draftroots
772 772
773 773 extracond = b''
774 774 if not pushop.remotephases.publishing:
775 775 extracond = b' and public()'
776 776 revset = b'heads((%%ln::%%ln) %s)' % extracond
777 777 # Get the list of all revs draft on remote by public here.
778 778 # XXX Beware that revset break if droots is not strictly
779 779 # XXX root we may want to ensure it is but it is costly
780 780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
781 781 if not pushop.remotephases.publishing and pushop.publish:
782 782 future = list(
783 783 unfi.set(
784 784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
785 785 )
786 786 )
787 787 elif not outgoing.missing:
788 788 future = fallback
789 789 else:
790 790 # adds changeset we are going to push as draft
791 791 #
792 792 # should not be necessary for publishing server, but because of an
793 793 # issue fixed in xxxxx we have to do it anyway.
794 794 fdroots = list(
795 795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
796 796 )
797 797 fdroots = [f.node() for f in fdroots]
798 798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
799 799 pushop.outdatedphases = future
800 800 pushop.fallbackoutdatedphases = fallback
801 801
802 802
803 803 @pushdiscovery(b'obsmarker')
804 804 def _pushdiscoveryobsmarkers(pushop):
805 805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
806 806 return
807 807
808 808 if not pushop.repo.obsstore:
809 809 return
810 810
811 811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
812 812 return
813 813
814 814 repo = pushop.repo
815 815 # very naive computation, that can be quite expensive on big repo.
816 816 # However: evolution is currently slow on them anyway.
817 817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
818 818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
819 819
820 820
821 821 @pushdiscovery(b'bookmarks')
822 822 def _pushdiscoverybookmarks(pushop):
823 823 ui = pushop.ui
824 824 repo = pushop.repo.unfiltered()
825 825 remote = pushop.remote
826 826 ui.debug(b"checking for updated bookmarks\n")
827 827 ancestors = ()
828 828 if pushop.revs:
829 829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
830 830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
831 831
832 832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
833 833
834 834 explicit = {
835 835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
836 836 }
837 837
838 838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
839 839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
840 840
841 841
842 842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
843 843 """take decision on bookmarks to push to the remote repo
844 844
845 845 Exists to help extensions alter this behavior.
846 846 """
847 847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
848 848
849 849 repo = pushop.repo
850 850
851 851 for b, scid, dcid in advsrc:
852 852 if b in explicit:
853 853 explicit.remove(b)
854 854 if not pushed or repo[scid].rev() in pushed:
855 855 pushop.outbookmarks.append((b, dcid, scid))
856 856 # search added bookmark
857 857 for b, scid, dcid in addsrc:
858 858 if b in explicit:
859 859 explicit.remove(b)
860 860 if bookmod.isdivergent(b):
861 861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
862 862 pushop.bkresult = 2
863 863 else:
864 864 pushop.outbookmarks.append((b, b'', scid))
865 865 # search for overwritten bookmark
866 866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
867 867 if b in explicit:
868 868 explicit.remove(b)
869 869 pushop.outbookmarks.append((b, dcid, scid))
870 870 # search for bookmark to delete
871 871 for b, scid, dcid in adddst:
872 872 if b in explicit:
873 873 explicit.remove(b)
874 874 # treat as "deleted locally"
875 875 pushop.outbookmarks.append((b, dcid, b''))
876 876 # identical bookmarks shouldn't get reported
877 877 for b, scid, dcid in same:
878 878 if b in explicit:
879 879 explicit.remove(b)
880 880
881 881 if explicit:
882 882 explicit = sorted(explicit)
883 883 # we should probably list all of them
884 884 pushop.ui.warn(
885 885 _(
886 886 b'bookmark %s does not exist on the local '
887 887 b'or remote repository!\n'
888 888 )
889 889 % explicit[0]
890 890 )
891 891 pushop.bkresult = 2
892 892
893 893 pushop.outbookmarks.sort()
894 894
895 895
896 896 def _pushcheckoutgoing(pushop):
897 897 outgoing = pushop.outgoing
898 898 unfi = pushop.repo.unfiltered()
899 899 if not outgoing.missing:
900 900 # nothing to push
901 901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
902 902 return False
903 903 # something to push
904 904 if not pushop.force:
905 905 # if repo.obsstore == False --> no obsolete
906 906 # then, save the iteration
907 907 if unfi.obsstore:
908 908 # this message are here for 80 char limit reason
909 909 mso = _(b"push includes obsolete changeset: %s!")
910 910 mspd = _(b"push includes phase-divergent changeset: %s!")
911 911 mscd = _(b"push includes content-divergent changeset: %s!")
912 912 mst = {
913 913 b"orphan": _(b"push includes orphan changeset: %s!"),
914 914 b"phase-divergent": mspd,
915 915 b"content-divergent": mscd,
916 916 }
917 917 # If we are to push if there is at least one
918 918 # obsolete or unstable changeset in missing, at
919 919 # least one of the missinghead will be obsolete or
920 920 # unstable. So checking heads only is ok
921 921 for node in outgoing.ancestorsof:
922 922 ctx = unfi[node]
923 923 if ctx.obsolete():
924 924 raise error.Abort(mso % ctx)
925 925 elif ctx.isunstable():
926 926 # TODO print more than one instability in the abort
927 927 # message
928 928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
929 929
930 930 discovery.checkheads(pushop)
931 931 return True
932 932
933 933
934 934 # List of names of steps to perform for an outgoing bundle2, order matters.
935 935 b2partsgenorder = []
936 936
937 937 # Mapping between step name and function
938 938 #
939 939 # This exists to help extensions wrap steps if necessary
940 940 b2partsgenmapping = {}
941 941
942 942
943 943 def b2partsgenerator(stepname, idx=None):
944 944 """decorator for function generating bundle2 part
945 945
946 946 The function is added to the step -> function mapping and appended to the
947 947 list of steps. Beware that decorated functions will be added in order
948 948 (this may matter).
949 949
950 950 You can only use this decorator for new steps, if you want to wrap a step
951 951 from an extension, attack the b2partsgenmapping dictionary directly."""
952 952
953 953 def dec(func):
954 954 assert stepname not in b2partsgenmapping
955 955 b2partsgenmapping[stepname] = func
956 956 if idx is None:
957 957 b2partsgenorder.append(stepname)
958 958 else:
959 959 b2partsgenorder.insert(idx, stepname)
960 960 return func
961 961
962 962 return dec
963 963
964 964
965 965 def _pushb2ctxcheckheads(pushop, bundler):
966 966 """Generate race condition checking parts
967 967
968 968 Exists as an independent function to aid extensions
969 969 """
970 970 # * 'force' do not check for push race,
971 971 # * if we don't push anything, there are nothing to check.
972 972 if not pushop.force and pushop.outgoing.ancestorsof:
973 973 allowunrelated = b'related' in bundler.capabilities.get(
974 974 b'checkheads', ()
975 975 )
976 976 emptyremote = pushop.pushbranchmap is None
977 977 if not allowunrelated or emptyremote:
978 978 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
979 979 else:
980 980 affected = set()
981 981 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
982 982 remoteheads, newheads, unsyncedheads, discardedheads = heads
983 983 if remoteheads is not None:
984 984 remote = set(remoteheads)
985 985 affected |= set(discardedheads) & remote
986 986 affected |= remote - set(newheads)
987 987 if affected:
988 988 data = iter(sorted(affected))
989 989 bundler.newpart(b'check:updated-heads', data=data)
990 990
991 991
992 992 def _pushing(pushop):
993 993 """return True if we are pushing anything"""
994 994 return bool(
995 995 pushop.outgoing.missing
996 996 or pushop.outdatedphases
997 997 or pushop.outobsmarkers
998 998 or pushop.outbookmarks
999 999 )
1000 1000
1001 1001
1002 1002 @b2partsgenerator(b'check-bookmarks')
1003 1003 def _pushb2checkbookmarks(pushop, bundler):
1004 1004 """insert bookmark move checking"""
1005 1005 if not _pushing(pushop) or pushop.force:
1006 1006 return
1007 1007 b2caps = bundle2.bundle2caps(pushop.remote)
1008 1008 hasbookmarkcheck = b'bookmarks' in b2caps
1009 1009 if not (pushop.outbookmarks and hasbookmarkcheck):
1010 1010 return
1011 1011 data = []
1012 1012 for book, old, new in pushop.outbookmarks:
1013 1013 data.append((book, old))
1014 1014 checkdata = bookmod.binaryencode(data)
1015 1015 bundler.newpart(b'check:bookmarks', data=checkdata)
1016 1016
1017 1017
1018 1018 @b2partsgenerator(b'check-phases')
1019 1019 def _pushb2checkphases(pushop, bundler):
1020 1020 """insert phase move checking"""
1021 1021 if not _pushing(pushop) or pushop.force:
1022 1022 return
1023 1023 b2caps = bundle2.bundle2caps(pushop.remote)
1024 1024 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1025 1025 if pushop.remotephases is not None and hasphaseheads:
1026 1026 # check that the remote phase has not changed
1027 1027 checks = {p: [] for p in phases.allphases}
1028 1028 checks[phases.public].extend(pushop.remotephases.publicheads)
1029 1029 checks[phases.draft].extend(pushop.remotephases.draftroots)
1030 1030 if any(pycompat.itervalues(checks)):
1031 1031 for phase in checks:
1032 1032 checks[phase].sort()
1033 1033 checkdata = phases.binaryencode(checks)
1034 1034 bundler.newpart(b'check:phases', data=checkdata)
1035 1035
1036 1036
1037 1037 @b2partsgenerator(b'changeset')
1038 1038 def _pushb2ctx(pushop, bundler):
1039 1039 """handle changegroup push through bundle2
1040 1040
1041 1041 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1042 1042 """
1043 1043 if b'changesets' in pushop.stepsdone:
1044 1044 return
1045 1045 pushop.stepsdone.add(b'changesets')
1046 1046 # Send known heads to the server for race detection.
1047 1047 if not _pushcheckoutgoing(pushop):
1048 1048 return
1049 1049 pushop.repo.prepushoutgoinghooks(pushop)
1050 1050
1051 1051 _pushb2ctxcheckheads(pushop, bundler)
1052 1052
1053 1053 b2caps = bundle2.bundle2caps(pushop.remote)
1054 1054 version = b'01'
1055 1055 cgversions = b2caps.get(b'changegroup')
1056 1056 if cgversions: # 3.1 and 3.2 ship with an empty value
1057 1057 cgversions = [
1058 1058 v
1059 1059 for v in cgversions
1060 1060 if v in changegroup.supportedoutgoingversions(pushop.repo)
1061 1061 ]
1062 1062 if not cgversions:
1063 1063 raise error.Abort(_(b'no common changegroup version'))
1064 1064 version = max(cgversions)
1065 1065 cgstream = changegroup.makestream(
1066 1066 pushop.repo, pushop.outgoing, version, b'push'
1067 1067 )
1068 1068 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1069 1069 if cgversions:
1070 1070 cgpart.addparam(b'version', version)
1071 1071 if scmutil.istreemanifest(pushop.repo):
1072 1072 cgpart.addparam(b'treemanifest', b'1')
1073 1073 if b'exp-sidedata-flag' in pushop.repo.requirements:
1074 1074 cgpart.addparam(b'exp-sidedata', b'1')
1075 1075
1076 1076 def handlereply(op):
1077 1077 """extract addchangegroup returns from server reply"""
1078 1078 cgreplies = op.records.getreplies(cgpart.id)
1079 1079 assert len(cgreplies[b'changegroup']) == 1
1080 1080 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1081 1081
1082 1082 return handlereply
1083 1083
1084 1084
1085 1085 @b2partsgenerator(b'phase')
1086 1086 def _pushb2phases(pushop, bundler):
1087 1087 """handle phase push through bundle2"""
1088 1088 if b'phases' in pushop.stepsdone:
1089 1089 return
1090 1090 b2caps = bundle2.bundle2caps(pushop.remote)
1091 1091 ui = pushop.repo.ui
1092 1092
1093 1093 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1094 1094 haspushkey = b'pushkey' in b2caps
1095 1095 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1096 1096
1097 1097 if hasphaseheads and not legacyphase:
1098 1098 return _pushb2phaseheads(pushop, bundler)
1099 1099 elif haspushkey:
1100 1100 return _pushb2phasespushkey(pushop, bundler)
1101 1101
1102 1102
1103 1103 def _pushb2phaseheads(pushop, bundler):
1104 1104 """push phase information through a bundle2 - binary part"""
1105 1105 pushop.stepsdone.add(b'phases')
1106 1106 if pushop.outdatedphases:
1107 1107 updates = {p: [] for p in phases.allphases}
1108 1108 updates[0].extend(h.node() for h in pushop.outdatedphases)
1109 1109 phasedata = phases.binaryencode(updates)
1110 1110 bundler.newpart(b'phase-heads', data=phasedata)
1111 1111
1112 1112
1113 1113 def _pushb2phasespushkey(pushop, bundler):
1114 1114 """push phase information through a bundle2 - pushkey part"""
1115 1115 pushop.stepsdone.add(b'phases')
1116 1116 part2node = []
1117 1117
1118 1118 def handlefailure(pushop, exc):
1119 1119 targetid = int(exc.partid)
1120 1120 for partid, node in part2node:
1121 1121 if partid == targetid:
1122 1122 raise error.Abort(_(b'updating %s to public failed') % node)
1123 1123
1124 1124 enc = pushkey.encode
1125 1125 for newremotehead in pushop.outdatedphases:
1126 1126 part = bundler.newpart(b'pushkey')
1127 1127 part.addparam(b'namespace', enc(b'phases'))
1128 1128 part.addparam(b'key', enc(newremotehead.hex()))
1129 1129 part.addparam(b'old', enc(b'%d' % phases.draft))
1130 1130 part.addparam(b'new', enc(b'%d' % phases.public))
1131 1131 part2node.append((part.id, newremotehead))
1132 1132 pushop.pkfailcb[part.id] = handlefailure
1133 1133
1134 1134 def handlereply(op):
1135 1135 for partid, node in part2node:
1136 1136 partrep = op.records.getreplies(partid)
1137 1137 results = partrep[b'pushkey']
1138 1138 assert len(results) <= 1
1139 1139 msg = None
1140 1140 if not results:
1141 1141 msg = _(b'server ignored update of %s to public!\n') % node
1142 1142 elif not int(results[0][b'return']):
1143 1143 msg = _(b'updating %s to public failed!\n') % node
1144 1144 if msg is not None:
1145 1145 pushop.ui.warn(msg)
1146 1146
1147 1147 return handlereply
1148 1148
1149 1149
1150 1150 @b2partsgenerator(b'obsmarkers')
1151 1151 def _pushb2obsmarkers(pushop, bundler):
1152 1152 if b'obsmarkers' in pushop.stepsdone:
1153 1153 return
1154 1154 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1155 1155 if obsolete.commonversion(remoteversions) is None:
1156 1156 return
1157 1157 pushop.stepsdone.add(b'obsmarkers')
1158 1158 if pushop.outobsmarkers:
1159 1159 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1160 1160 bundle2.buildobsmarkerspart(bundler, markers)
1161 1161
1162 1162
1163 1163 @b2partsgenerator(b'bookmarks')
1164 1164 def _pushb2bookmarks(pushop, bundler):
1165 1165 """handle bookmark push through bundle2"""
1166 1166 if b'bookmarks' in pushop.stepsdone:
1167 1167 return
1168 1168 b2caps = bundle2.bundle2caps(pushop.remote)
1169 1169
1170 1170 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1171 1171 legacybooks = b'bookmarks' in legacy
1172 1172
1173 1173 if not legacybooks and b'bookmarks' in b2caps:
1174 1174 return _pushb2bookmarkspart(pushop, bundler)
1175 1175 elif b'pushkey' in b2caps:
1176 1176 return _pushb2bookmarkspushkey(pushop, bundler)
1177 1177
1178 1178
1179 1179 def _bmaction(old, new):
1180 1180 """small utility for bookmark pushing"""
1181 1181 if not old:
1182 1182 return b'export'
1183 1183 elif not new:
1184 1184 return b'delete'
1185 1185 return b'update'
1186 1186
1187 1187
1188 1188 def _abortonsecretctx(pushop, node, b):
1189 1189 """abort if a given bookmark points to a secret changeset"""
1190 1190 if node and pushop.repo[node].phase() == phases.secret:
1191 1191 raise error.Abort(
1192 1192 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1193 1193 )
1194 1194
1195 1195
1196 1196 def _pushb2bookmarkspart(pushop, bundler):
1197 1197 pushop.stepsdone.add(b'bookmarks')
1198 1198 if not pushop.outbookmarks:
1199 1199 return
1200 1200
1201 1201 allactions = []
1202 1202 data = []
1203 1203 for book, old, new in pushop.outbookmarks:
1204 1204 _abortonsecretctx(pushop, new, book)
1205 1205 data.append((book, new))
1206 1206 allactions.append((book, _bmaction(old, new)))
1207 1207 checkdata = bookmod.binaryencode(data)
1208 1208 bundler.newpart(b'bookmarks', data=checkdata)
1209 1209
1210 1210 def handlereply(op):
1211 1211 ui = pushop.ui
1212 1212 # if success
1213 1213 for book, action in allactions:
1214 1214 ui.status(bookmsgmap[action][0] % book)
1215 1215
1216 1216 return handlereply
1217 1217
1218 1218
1219 1219 def _pushb2bookmarkspushkey(pushop, bundler):
1220 1220 pushop.stepsdone.add(b'bookmarks')
1221 1221 part2book = []
1222 1222 enc = pushkey.encode
1223 1223
1224 1224 def handlefailure(pushop, exc):
1225 1225 targetid = int(exc.partid)
1226 1226 for partid, book, action in part2book:
1227 1227 if partid == targetid:
1228 1228 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1229 1229 # we should not be called for part we did not generated
1230 1230 assert False
1231 1231
1232 1232 for book, old, new in pushop.outbookmarks:
1233 1233 _abortonsecretctx(pushop, new, book)
1234 1234 part = bundler.newpart(b'pushkey')
1235 1235 part.addparam(b'namespace', enc(b'bookmarks'))
1236 1236 part.addparam(b'key', enc(book))
1237 1237 part.addparam(b'old', enc(hex(old)))
1238 1238 part.addparam(b'new', enc(hex(new)))
1239 1239 action = b'update'
1240 1240 if not old:
1241 1241 action = b'export'
1242 1242 elif not new:
1243 1243 action = b'delete'
1244 1244 part2book.append((part.id, book, action))
1245 1245 pushop.pkfailcb[part.id] = handlefailure
1246 1246
1247 1247 def handlereply(op):
1248 1248 ui = pushop.ui
1249 1249 for partid, book, action in part2book:
1250 1250 partrep = op.records.getreplies(partid)
1251 1251 results = partrep[b'pushkey']
1252 1252 assert len(results) <= 1
1253 1253 if not results:
1254 1254 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1255 1255 else:
1256 1256 ret = int(results[0][b'return'])
1257 1257 if ret:
1258 1258 ui.status(bookmsgmap[action][0] % book)
1259 1259 else:
1260 1260 ui.warn(bookmsgmap[action][1] % book)
1261 1261 if pushop.bkresult is not None:
1262 1262 pushop.bkresult = 1
1263 1263
1264 1264 return handlereply
1265 1265
1266 1266
1267 1267 @b2partsgenerator(b'pushvars', idx=0)
1268 1268 def _getbundlesendvars(pushop, bundler):
1269 1269 '''send shellvars via bundle2'''
1270 1270 pushvars = pushop.pushvars
1271 1271 if pushvars:
1272 1272 shellvars = {}
1273 1273 for raw in pushvars:
1274 1274 if b'=' not in raw:
1275 1275 msg = (
1276 1276 b"unable to parse variable '%s', should follow "
1277 1277 b"'KEY=VALUE' or 'KEY=' format"
1278 1278 )
1279 1279 raise error.Abort(msg % raw)
1280 1280 k, v = raw.split(b'=', 1)
1281 1281 shellvars[k] = v
1282 1282
1283 1283 part = bundler.newpart(b'pushvars')
1284 1284
1285 1285 for key, value in pycompat.iteritems(shellvars):
1286 1286 part.addparam(key, value, mandatory=False)
1287 1287
1288 1288
1289 1289 def _pushbundle2(pushop):
1290 1290 """push data to the remote using bundle2
1291 1291
1292 1292 The only currently supported type of data is changegroup but this will
1293 1293 evolve in the future."""
1294 1294 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1295 1295 pushback = pushop.trmanager and pushop.ui.configbool(
1296 1296 b'experimental', b'bundle2.pushback'
1297 1297 )
1298 1298
1299 1299 # create reply capability
1300 1300 capsblob = bundle2.encodecaps(
1301 1301 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1302 1302 )
1303 1303 bundler.newpart(b'replycaps', data=capsblob)
1304 1304 replyhandlers = []
1305 1305 for partgenname in b2partsgenorder:
1306 1306 partgen = b2partsgenmapping[partgenname]
1307 1307 ret = partgen(pushop, bundler)
1308 1308 if callable(ret):
1309 1309 replyhandlers.append(ret)
1310 1310 # do not push if nothing to push
1311 1311 if bundler.nbparts <= 1:
1312 1312 return
1313 1313 stream = util.chunkbuffer(bundler.getchunks())
1314 1314 try:
1315 1315 try:
1316 1316 with pushop.remote.commandexecutor() as e:
1317 1317 reply = e.callcommand(
1318 1318 b'unbundle',
1319 1319 {
1320 1320 b'bundle': stream,
1321 1321 b'heads': [b'force'],
1322 1322 b'url': pushop.remote.url(),
1323 1323 },
1324 1324 ).result()
1325 1325 except error.BundleValueError as exc:
1326 1326 raise error.Abort(_(b'missing support for %s') % exc)
1327 1327 try:
1328 1328 trgetter = None
1329 1329 if pushback:
1330 1330 trgetter = pushop.trmanager.transaction
1331 1331 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1332 1332 except error.BundleValueError as exc:
1333 1333 raise error.Abort(_(b'missing support for %s') % exc)
1334 1334 except bundle2.AbortFromPart as exc:
1335 1335 pushop.ui.status(_(b'remote: %s\n') % exc)
1336 1336 if exc.hint is not None:
1337 1337 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1338 1338 raise error.Abort(_(b'push failed on remote'))
1339 1339 except error.PushkeyFailed as exc:
1340 1340 partid = int(exc.partid)
1341 1341 if partid not in pushop.pkfailcb:
1342 1342 raise
1343 1343 pushop.pkfailcb[partid](pushop, exc)
1344 1344 for rephand in replyhandlers:
1345 1345 rephand(op)
1346 1346
1347 1347
1348 1348 def _pushchangeset(pushop):
1349 1349 """Make the actual push of changeset bundle to remote repo"""
1350 1350 if b'changesets' in pushop.stepsdone:
1351 1351 return
1352 1352 pushop.stepsdone.add(b'changesets')
1353 1353 if not _pushcheckoutgoing(pushop):
1354 1354 return
1355 1355
1356 1356 # Should have verified this in push().
1357 1357 assert pushop.remote.capable(b'unbundle')
1358 1358
1359 1359 pushop.repo.prepushoutgoinghooks(pushop)
1360 1360 outgoing = pushop.outgoing
1361 1361 # TODO: get bundlecaps from remote
1362 1362 bundlecaps = None
1363 1363 # create a changegroup from local
1364 1364 if pushop.revs is None and not (
1365 1365 outgoing.excluded or pushop.repo.changelog.filteredrevs
1366 1366 ):
1367 1367 # push everything,
1368 1368 # use the fast path, no race possible on push
1369 1369 cg = changegroup.makechangegroup(
1370 1370 pushop.repo,
1371 1371 outgoing,
1372 1372 b'01',
1373 1373 b'push',
1374 1374 fastpath=True,
1375 1375 bundlecaps=bundlecaps,
1376 1376 )
1377 1377 else:
1378 1378 cg = changegroup.makechangegroup(
1379 1379 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1380 1380 )
1381 1381
1382 1382 # apply changegroup to remote
1383 1383 # local repo finds heads on server, finds out what
1384 1384 # revs it must push. once revs transferred, if server
1385 1385 # finds it has different heads (someone else won
1386 1386 # commit/push race), server aborts.
1387 1387 if pushop.force:
1388 1388 remoteheads = [b'force']
1389 1389 else:
1390 1390 remoteheads = pushop.remoteheads
1391 1391 # ssh: return remote's addchangegroup()
1392 1392 # http: return remote's addchangegroup() or 0 for error
1393 1393 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1394 1394
1395 1395
1396 1396 def _pushsyncphase(pushop):
1397 1397 """synchronise phase information locally and remotely"""
1398 1398 cheads = pushop.commonheads
1399 1399 # even when we don't push, exchanging phase data is useful
1400 1400 remotephases = listkeys(pushop.remote, b'phases')
1401 1401 if (
1402 1402 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1403 1403 and remotephases # server supports phases
1404 1404 and pushop.cgresult is None # nothing was pushed
1405 1405 and remotephases.get(b'publishing', False)
1406 1406 ):
1407 1407 # When:
1408 1408 # - this is a subrepo push
1409 1409 # - and remote support phase
1410 1410 # - and no changeset was pushed
1411 1411 # - and remote is publishing
1412 1412 # We may be in issue 3871 case!
1413 1413 # We drop the possible phase synchronisation done by
1414 1414 # courtesy to publish changesets possibly locally draft
1415 1415 # on the remote.
1416 1416 remotephases = {b'publishing': b'True'}
1417 1417 if not remotephases: # old server or public only reply from non-publishing
1418 1418 _localphasemove(pushop, cheads)
1419 1419 # don't push any phase data as there is nothing to push
1420 1420 else:
1421 1421 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1422 1422 pheads, droots = ana
1423 1423 ### Apply remote phase on local
1424 1424 if remotephases.get(b'publishing', False):
1425 1425 _localphasemove(pushop, cheads)
1426 1426 else: # publish = False
1427 1427 _localphasemove(pushop, pheads)
1428 1428 _localphasemove(pushop, cheads, phases.draft)
1429 1429 ### Apply local phase on remote
1430 1430
1431 1431 if pushop.cgresult:
1432 1432 if b'phases' in pushop.stepsdone:
1433 1433 # phases already pushed though bundle2
1434 1434 return
1435 1435 outdated = pushop.outdatedphases
1436 1436 else:
1437 1437 outdated = pushop.fallbackoutdatedphases
1438 1438
1439 1439 pushop.stepsdone.add(b'phases')
1440 1440
1441 1441 # filter heads already turned public by the push
1442 1442 outdated = [c for c in outdated if c.node() not in pheads]
1443 1443 # fallback to independent pushkey command
1444 1444 for newremotehead in outdated:
1445 1445 with pushop.remote.commandexecutor() as e:
1446 1446 r = e.callcommand(
1447 1447 b'pushkey',
1448 1448 {
1449 1449 b'namespace': b'phases',
1450 1450 b'key': newremotehead.hex(),
1451 1451 b'old': b'%d' % phases.draft,
1452 1452 b'new': b'%d' % phases.public,
1453 1453 },
1454 1454 ).result()
1455 1455
1456 1456 if not r:
1457 1457 pushop.ui.warn(
1458 1458 _(b'updating %s to public failed!\n') % newremotehead
1459 1459 )
1460 1460
1461 1461
1462 1462 def _localphasemove(pushop, nodes, phase=phases.public):
1463 1463 """move <nodes> to <phase> in the local source repo"""
1464 1464 if pushop.trmanager:
1465 1465 phases.advanceboundary(
1466 1466 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1467 1467 )
1468 1468 else:
1469 1469 # repo is not locked, do not change any phases!
1470 1470 # Informs the user that phases should have been moved when
1471 1471 # applicable.
1472 1472 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1473 1473 phasestr = phases.phasenames[phase]
1474 1474 if actualmoves:
1475 1475 pushop.ui.status(
1476 1476 _(
1477 1477 b'cannot lock source repo, skipping '
1478 1478 b'local %s phase update\n'
1479 1479 )
1480 1480 % phasestr
1481 1481 )
1482 1482
1483 1483
1484 1484 def _pushobsolete(pushop):
1485 1485 """utility function to push obsolete markers to a remote"""
1486 1486 if b'obsmarkers' in pushop.stepsdone:
1487 1487 return
1488 1488 repo = pushop.repo
1489 1489 remote = pushop.remote
1490 1490 pushop.stepsdone.add(b'obsmarkers')
1491 1491 if pushop.outobsmarkers:
1492 1492 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1493 1493 rslts = []
1494 1494 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1495 1495 remotedata = obsolete._pushkeyescape(markers)
1496 1496 for key in sorted(remotedata, reverse=True):
1497 1497 # reverse sort to ensure we end with dump0
1498 1498 data = remotedata[key]
1499 1499 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1500 1500 if [r for r in rslts if not r]:
1501 1501 msg = _(b'failed to push some obsolete markers!\n')
1502 1502 repo.ui.warn(msg)
1503 1503
1504 1504
1505 1505 def _pushbookmark(pushop):
1506 1506 """Update bookmark position on remote"""
1507 1507 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1508 1508 return
1509 1509 pushop.stepsdone.add(b'bookmarks')
1510 1510 ui = pushop.ui
1511 1511 remote = pushop.remote
1512 1512
1513 1513 for b, old, new in pushop.outbookmarks:
1514 1514 action = b'update'
1515 1515 if not old:
1516 1516 action = b'export'
1517 1517 elif not new:
1518 1518 action = b'delete'
1519 1519
1520 1520 with remote.commandexecutor() as e:
1521 1521 r = e.callcommand(
1522 1522 b'pushkey',
1523 1523 {
1524 1524 b'namespace': b'bookmarks',
1525 1525 b'key': b,
1526 1526 b'old': hex(old),
1527 1527 b'new': hex(new),
1528 1528 },
1529 1529 ).result()
1530 1530
1531 1531 if r:
1532 1532 ui.status(bookmsgmap[action][0] % b)
1533 1533 else:
1534 1534 ui.warn(bookmsgmap[action][1] % b)
1535 1535 # discovery can have set the value form invalid entry
1536 1536 if pushop.bkresult is not None:
1537 1537 pushop.bkresult = 1
1538 1538
1539 1539
1540 1540 class pulloperation(object):
1541 1541 """A object that represent a single pull operation
1542 1542
1543 1543 It purpose is to carry pull related state and very common operation.
1544 1544
1545 1545 A new should be created at the beginning of each pull and discarded
1546 1546 afterward.
1547 1547 """
1548 1548
1549 1549 def __init__(
1550 1550 self,
1551 1551 repo,
1552 1552 remote,
1553 1553 heads=None,
1554 1554 force=False,
1555 1555 bookmarks=(),
1556 1556 remotebookmarks=None,
1557 1557 streamclonerequested=None,
1558 1558 includepats=None,
1559 1559 excludepats=None,
1560 1560 depth=None,
1561 1561 ):
1562 1562 # repo we pull into
1563 1563 self.repo = repo
1564 1564 # repo we pull from
1565 1565 self.remote = remote
1566 1566 # revision we try to pull (None is "all")
1567 1567 self.heads = heads
1568 1568 # bookmark pulled explicitly
1569 1569 self.explicitbookmarks = [
1570 1570 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1571 1571 ]
1572 1572 # do we force pull?
1573 1573 self.force = force
1574 1574 # whether a streaming clone was requested
1575 1575 self.streamclonerequested = streamclonerequested
1576 1576 # transaction manager
1577 1577 self.trmanager = None
1578 1578 # set of common changeset between local and remote before pull
1579 1579 self.common = None
1580 1580 # set of pulled head
1581 1581 self.rheads = None
1582 1582 # list of missing changeset to fetch remotely
1583 1583 self.fetch = None
1584 1584 # remote bookmarks data
1585 1585 self.remotebookmarks = remotebookmarks
1586 1586 # result of changegroup pulling (used as return code by pull)
1587 1587 self.cgresult = None
1588 1588 # list of step already done
1589 1589 self.stepsdone = set()
1590 1590 # Whether we attempted a clone from pre-generated bundles.
1591 1591 self.clonebundleattempted = False
1592 1592 # Set of file patterns to include.
1593 1593 self.includepats = includepats
1594 1594 # Set of file patterns to exclude.
1595 1595 self.excludepats = excludepats
1596 1596 # Number of ancestor changesets to pull from each pulled head.
1597 1597 self.depth = depth
1598 1598
1599 1599 @util.propertycache
1600 1600 def pulledsubset(self):
1601 1601 """heads of the set of changeset target by the pull"""
1602 1602 # compute target subset
1603 1603 if self.heads is None:
1604 1604 # We pulled every thing possible
1605 1605 # sync on everything common
1606 1606 c = set(self.common)
1607 1607 ret = list(self.common)
1608 1608 for n in self.rheads:
1609 1609 if n not in c:
1610 1610 ret.append(n)
1611 1611 return ret
1612 1612 else:
1613 1613 # We pulled a specific subset
1614 1614 # sync on this subset
1615 1615 return self.heads
1616 1616
1617 1617 @util.propertycache
1618 1618 def canusebundle2(self):
1619 1619 return not _forcebundle1(self)
1620 1620
1621 1621 @util.propertycache
1622 1622 def remotebundle2caps(self):
1623 1623 return bundle2.bundle2caps(self.remote)
1624 1624
1625 1625 def gettransaction(self):
1626 1626 # deprecated; talk to trmanager directly
1627 1627 return self.trmanager.transaction()
1628 1628
1629 1629
1630 1630 class transactionmanager(util.transactional):
1631 1631 """An object to manage the life cycle of a transaction
1632 1632
1633 1633 It creates the transaction on demand and calls the appropriate hooks when
1634 1634 closing the transaction."""
1635 1635
1636 1636 def __init__(self, repo, source, url):
1637 1637 self.repo = repo
1638 1638 self.source = source
1639 1639 self.url = url
1640 1640 self._tr = None
1641 1641
1642 1642 def transaction(self):
1643 1643 """Return an open transaction object, constructing if necessary"""
1644 1644 if not self._tr:
1645 1645 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1646 1646 self._tr = self.repo.transaction(trname)
1647 1647 self._tr.hookargs[b'source'] = self.source
1648 1648 self._tr.hookargs[b'url'] = self.url
1649 1649 return self._tr
1650 1650
1651 1651 def close(self):
1652 1652 """close transaction if created"""
1653 1653 if self._tr is not None:
1654 1654 self._tr.close()
1655 1655
1656 1656 def release(self):
1657 1657 """release transaction if created"""
1658 1658 if self._tr is not None:
1659 1659 self._tr.release()
1660 1660
1661 1661
1662 1662 def listkeys(remote, namespace):
1663 1663 with remote.commandexecutor() as e:
1664 1664 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1665 1665
1666 1666
1667 1667 def _fullpullbundle2(repo, pullop):
1668 1668 # The server may send a partial reply, i.e. when inlining
1669 1669 # pre-computed bundles. In that case, update the common
1670 1670 # set based on the results and pull another bundle.
1671 1671 #
1672 1672 # There are two indicators that the process is finished:
1673 1673 # - no changeset has been added, or
1674 1674 # - all remote heads are known locally.
1675 1675 # The head check must use the unfiltered view as obsoletion
1676 1676 # markers can hide heads.
1677 1677 unfi = repo.unfiltered()
1678 1678 unficl = unfi.changelog
1679 1679
1680 1680 def headsofdiff(h1, h2):
1681 1681 """Returns heads(h1 % h2)"""
1682 1682 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1683 1683 return {ctx.node() for ctx in res}
1684 1684
1685 1685 def headsofunion(h1, h2):
1686 1686 """Returns heads((h1 + h2) - null)"""
1687 1687 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1688 1688 return {ctx.node() for ctx in res}
1689 1689
1690 1690 while True:
1691 1691 old_heads = unficl.heads()
1692 1692 clstart = len(unficl)
1693 1693 _pullbundle2(pullop)
1694 1694 if requirements.NARROW_REQUIREMENT in repo.requirements:
1695 1695 # XXX narrow clones filter the heads on the server side during
1696 1696 # XXX getbundle and result in partial replies as well.
1697 1697 # XXX Disable pull bundles in this case as band aid to avoid
1698 1698 # XXX extra round trips.
1699 1699 break
1700 1700 if clstart == len(unficl):
1701 1701 break
1702 1702 if all(unficl.hasnode(n) for n in pullop.rheads):
1703 1703 break
1704 1704 new_heads = headsofdiff(unficl.heads(), old_heads)
1705 1705 pullop.common = headsofunion(new_heads, pullop.common)
1706 1706 pullop.rheads = set(pullop.rheads) - pullop.common
1707 1707
1708 1708
1709 1709 def add_confirm_callback(repo, pullop):
1710 1710 """ adds a finalize callback to transaction which can be used to show stats
1711 1711 to user and confirm the pull before committing transaction """
1712 1712
1713 1713 tr = pullop.trmanager.transaction()
1714 1714 scmutil.registersummarycallback(
1715 1715 repo, tr, txnname=b'pull', as_validator=True
1716 1716 )
1717 1717 reporef = weakref.ref(repo.unfiltered())
1718 1718
1719 1719 def prompt(tr):
1720 1720 repo = reporef()
1721 1721 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1722 1722 if repo.ui.promptchoice(cm):
1723 raise error.Abort("user aborted")
1723 raise error.Abort(b"user aborted")
1724 1724
1725 1725 tr.addvalidator(b'900-pull-prompt', prompt)
1726 1726
1727 1727
1728 1728 def pull(
1729 1729 repo,
1730 1730 remote,
1731 1731 heads=None,
1732 1732 force=False,
1733 1733 bookmarks=(),
1734 1734 opargs=None,
1735 1735 streamclonerequested=None,
1736 1736 includepats=None,
1737 1737 excludepats=None,
1738 1738 depth=None,
1739 1739 confirm=None,
1740 1740 ):
1741 1741 """Fetch repository data from a remote.
1742 1742
1743 1743 This is the main function used to retrieve data from a remote repository.
1744 1744
1745 1745 ``repo`` is the local repository to clone into.
1746 1746 ``remote`` is a peer instance.
1747 1747 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1748 1748 default) means to pull everything from the remote.
1749 1749 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1750 1750 default, all remote bookmarks are pulled.
1751 1751 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1752 1752 initialization.
1753 1753 ``streamclonerequested`` is a boolean indicating whether a "streaming
1754 1754 clone" is requested. A "streaming clone" is essentially a raw file copy
1755 1755 of revlogs from the server. This only works when the local repository is
1756 1756 empty. The default value of ``None`` means to respect the server
1757 1757 configuration for preferring stream clones.
1758 1758 ``includepats`` and ``excludepats`` define explicit file patterns to
1759 1759 include and exclude in storage, respectively. If not defined, narrow
1760 1760 patterns from the repo instance are used, if available.
1761 1761 ``depth`` is an integer indicating the DAG depth of history we're
1762 1762 interested in. If defined, for each revision specified in ``heads``, we
1763 1763 will fetch up to this many of its ancestors and data associated with them.
1764 1764 ``confirm`` is a boolean indicating whether the pull should be confirmed
1765 1765 before committing the transaction. This overrides HGPLAIN.
1766 1766
1767 1767 Returns the ``pulloperation`` created for this pull.
1768 1768 """
1769 1769 if opargs is None:
1770 1770 opargs = {}
1771 1771
1772 1772 # We allow the narrow patterns to be passed in explicitly to provide more
1773 1773 # flexibility for API consumers.
1774 1774 if includepats or excludepats:
1775 1775 includepats = includepats or set()
1776 1776 excludepats = excludepats or set()
1777 1777 else:
1778 1778 includepats, excludepats = repo.narrowpats
1779 1779
1780 1780 narrowspec.validatepatterns(includepats)
1781 1781 narrowspec.validatepatterns(excludepats)
1782 1782
1783 1783 pullop = pulloperation(
1784 1784 repo,
1785 1785 remote,
1786 1786 heads,
1787 1787 force,
1788 1788 bookmarks=bookmarks,
1789 1789 streamclonerequested=streamclonerequested,
1790 1790 includepats=includepats,
1791 1791 excludepats=excludepats,
1792 1792 depth=depth,
1793 1793 **pycompat.strkwargs(opargs)
1794 1794 )
1795 1795
1796 1796 peerlocal = pullop.remote.local()
1797 1797 if peerlocal:
1798 1798 missing = set(peerlocal.requirements) - pullop.repo.supported
1799 1799 if missing:
1800 1800 msg = _(
1801 1801 b"required features are not"
1802 1802 b" supported in the destination:"
1803 1803 b" %s"
1804 1804 ) % (b', '.join(sorted(missing)))
1805 1805 raise error.Abort(msg)
1806 1806
1807 1807 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1808 1808 wlock = util.nullcontextmanager()
1809 1809 if not bookmod.bookmarksinstore(repo):
1810 1810 wlock = repo.wlock()
1811 1811 with wlock, repo.lock(), pullop.trmanager:
1812 1812 if confirm or (
1813 1813 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1814 1814 ):
1815 1815 add_confirm_callback(repo, pullop)
1816 1816
1817 1817 # Use the modern wire protocol, if available.
1818 1818 if remote.capable(b'command-changesetdata'):
1819 1819 exchangev2.pull(pullop)
1820 1820 else:
1821 1821 # This should ideally be in _pullbundle2(). However, it needs to run
1822 1822 # before discovery to avoid extra work.
1823 1823 _maybeapplyclonebundle(pullop)
1824 1824 streamclone.maybeperformlegacystreamclone(pullop)
1825 1825 _pulldiscovery(pullop)
1826 1826 if pullop.canusebundle2:
1827 1827 _fullpullbundle2(repo, pullop)
1828 1828 _pullchangeset(pullop)
1829 1829 _pullphase(pullop)
1830 1830 _pullbookmarks(pullop)
1831 1831 _pullobsolete(pullop)
1832 1832
1833 1833 # storing remotenames
1834 1834 if repo.ui.configbool(b'experimental', b'remotenames'):
1835 1835 logexchange.pullremotenames(repo, remote)
1836 1836
1837 1837 return pullop
1838 1838
1839 1839
1840 1840 # list of steps to perform discovery before pull
1841 1841 pulldiscoveryorder = []
1842 1842
1843 1843 # Mapping between step name and function
1844 1844 #
1845 1845 # This exists to help extensions wrap steps if necessary
1846 1846 pulldiscoverymapping = {}
1847 1847
1848 1848
1849 1849 def pulldiscovery(stepname):
1850 1850 """decorator for function performing discovery before pull
1851 1851
1852 1852 The function is added to the step -> function mapping and appended to the
1853 1853 list of steps. Beware that decorated function will be added in order (this
1854 1854 may matter).
1855 1855
1856 1856 You can only use this decorator for a new step, if you want to wrap a step
1857 1857 from an extension, change the pulldiscovery dictionary directly."""
1858 1858
1859 1859 def dec(func):
1860 1860 assert stepname not in pulldiscoverymapping
1861 1861 pulldiscoverymapping[stepname] = func
1862 1862 pulldiscoveryorder.append(stepname)
1863 1863 return func
1864 1864
1865 1865 return dec
1866 1866
1867 1867
1868 1868 def _pulldiscovery(pullop):
1869 1869 """Run all discovery steps"""
1870 1870 for stepname in pulldiscoveryorder:
1871 1871 step = pulldiscoverymapping[stepname]
1872 1872 step(pullop)
1873 1873
1874 1874
1875 1875 @pulldiscovery(b'b1:bookmarks')
1876 1876 def _pullbookmarkbundle1(pullop):
1877 1877 """fetch bookmark data in bundle1 case
1878 1878
1879 1879 If not using bundle2, we have to fetch bookmarks before changeset
1880 1880 discovery to reduce the chance and impact of race conditions."""
1881 1881 if pullop.remotebookmarks is not None:
1882 1882 return
1883 1883 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1884 1884 # all known bundle2 servers now support listkeys, but lets be nice with
1885 1885 # new implementation.
1886 1886 return
1887 1887 books = listkeys(pullop.remote, b'bookmarks')
1888 1888 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1889 1889
1890 1890
1891 1891 @pulldiscovery(b'changegroup')
1892 1892 def _pulldiscoverychangegroup(pullop):
1893 1893 """discovery phase for the pull
1894 1894
1895 1895 Current handle changeset discovery only, will change handle all discovery
1896 1896 at some point."""
1897 1897 tmp = discovery.findcommonincoming(
1898 1898 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1899 1899 )
1900 1900 common, fetch, rheads = tmp
1901 1901 has_node = pullop.repo.unfiltered().changelog.index.has_node
1902 1902 if fetch and rheads:
1903 1903 # If a remote heads is filtered locally, put in back in common.
1904 1904 #
1905 1905 # This is a hackish solution to catch most of "common but locally
1906 1906 # hidden situation". We do not performs discovery on unfiltered
1907 1907 # repository because it end up doing a pathological amount of round
1908 1908 # trip for w huge amount of changeset we do not care about.
1909 1909 #
1910 1910 # If a set of such "common but filtered" changeset exist on the server
1911 1911 # but are not including a remote heads, we'll not be able to detect it,
1912 1912 scommon = set(common)
1913 1913 for n in rheads:
1914 1914 if has_node(n):
1915 1915 if n not in scommon:
1916 1916 common.append(n)
1917 1917 if set(rheads).issubset(set(common)):
1918 1918 fetch = []
1919 1919 pullop.common = common
1920 1920 pullop.fetch = fetch
1921 1921 pullop.rheads = rheads
1922 1922
1923 1923
1924 1924 def _pullbundle2(pullop):
1925 1925 """pull data using bundle2
1926 1926
1927 1927 For now, the only supported data are changegroup."""
1928 1928 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1929 1929
1930 1930 # make ui easier to access
1931 1931 ui = pullop.repo.ui
1932 1932
1933 1933 # At the moment we don't do stream clones over bundle2. If that is
1934 1934 # implemented then here's where the check for that will go.
1935 1935 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1936 1936
1937 1937 # declare pull perimeters
1938 1938 kwargs[b'common'] = pullop.common
1939 1939 kwargs[b'heads'] = pullop.heads or pullop.rheads
1940 1940
1941 1941 # check server supports narrow and then adding includepats and excludepats
1942 1942 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1943 1943 if servernarrow and pullop.includepats:
1944 1944 kwargs[b'includepats'] = pullop.includepats
1945 1945 if servernarrow and pullop.excludepats:
1946 1946 kwargs[b'excludepats'] = pullop.excludepats
1947 1947
1948 1948 if streaming:
1949 1949 kwargs[b'cg'] = False
1950 1950 kwargs[b'stream'] = True
1951 1951 pullop.stepsdone.add(b'changegroup')
1952 1952 pullop.stepsdone.add(b'phases')
1953 1953
1954 1954 else:
1955 1955 # pulling changegroup
1956 1956 pullop.stepsdone.add(b'changegroup')
1957 1957
1958 1958 kwargs[b'cg'] = pullop.fetch
1959 1959
1960 1960 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1961 1961 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1962 1962 if not legacyphase and hasbinaryphase:
1963 1963 kwargs[b'phases'] = True
1964 1964 pullop.stepsdone.add(b'phases')
1965 1965
1966 1966 if b'listkeys' in pullop.remotebundle2caps:
1967 1967 if b'phases' not in pullop.stepsdone:
1968 1968 kwargs[b'listkeys'] = [b'phases']
1969 1969
1970 1970 bookmarksrequested = False
1971 1971 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1972 1972 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1973 1973
1974 1974 if pullop.remotebookmarks is not None:
1975 1975 pullop.stepsdone.add(b'request-bookmarks')
1976 1976
1977 1977 if (
1978 1978 b'request-bookmarks' not in pullop.stepsdone
1979 1979 and pullop.remotebookmarks is None
1980 1980 and not legacybookmark
1981 1981 and hasbinarybook
1982 1982 ):
1983 1983 kwargs[b'bookmarks'] = True
1984 1984 bookmarksrequested = True
1985 1985
1986 1986 if b'listkeys' in pullop.remotebundle2caps:
1987 1987 if b'request-bookmarks' not in pullop.stepsdone:
1988 1988 # make sure to always includes bookmark data when migrating
1989 1989 # `hg incoming --bundle` to using this function.
1990 1990 pullop.stepsdone.add(b'request-bookmarks')
1991 1991 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1992 1992
1993 1993 # If this is a full pull / clone and the server supports the clone bundles
1994 1994 # feature, tell the server whether we attempted a clone bundle. The
1995 1995 # presence of this flag indicates the client supports clone bundles. This
1996 1996 # will enable the server to treat clients that support clone bundles
1997 1997 # differently from those that don't.
1998 1998 if (
1999 1999 pullop.remote.capable(b'clonebundles')
2000 2000 and pullop.heads is None
2001 2001 and list(pullop.common) == [nullid]
2002 2002 ):
2003 2003 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2004 2004
2005 2005 if streaming:
2006 2006 pullop.repo.ui.status(_(b'streaming all changes\n'))
2007 2007 elif not pullop.fetch:
2008 2008 pullop.repo.ui.status(_(b"no changes found\n"))
2009 2009 pullop.cgresult = 0
2010 2010 else:
2011 2011 if pullop.heads is None and list(pullop.common) == [nullid]:
2012 2012 pullop.repo.ui.status(_(b"requesting all changes\n"))
2013 2013 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2014 2014 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2015 2015 if obsolete.commonversion(remoteversions) is not None:
2016 2016 kwargs[b'obsmarkers'] = True
2017 2017 pullop.stepsdone.add(b'obsmarkers')
2018 2018 _pullbundle2extraprepare(pullop, kwargs)
2019 2019
2020 2020 with pullop.remote.commandexecutor() as e:
2021 2021 args = dict(kwargs)
2022 2022 args[b'source'] = b'pull'
2023 2023 bundle = e.callcommand(b'getbundle', args).result()
2024 2024
2025 2025 try:
2026 2026 op = bundle2.bundleoperation(
2027 2027 pullop.repo, pullop.gettransaction, source=b'pull'
2028 2028 )
2029 2029 op.modes[b'bookmarks'] = b'records'
2030 2030 bundle2.processbundle(pullop.repo, bundle, op=op)
2031 2031 except bundle2.AbortFromPart as exc:
2032 2032 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2033 2033 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2034 2034 except error.BundleValueError as exc:
2035 2035 raise error.Abort(_(b'missing support for %s') % exc)
2036 2036
2037 2037 if pullop.fetch:
2038 2038 pullop.cgresult = bundle2.combinechangegroupresults(op)
2039 2039
2040 2040 # processing phases change
2041 2041 for namespace, value in op.records[b'listkeys']:
2042 2042 if namespace == b'phases':
2043 2043 _pullapplyphases(pullop, value)
2044 2044
2045 2045 # processing bookmark update
2046 2046 if bookmarksrequested:
2047 2047 books = {}
2048 2048 for record in op.records[b'bookmarks']:
2049 2049 books[record[b'bookmark']] = record[b"node"]
2050 2050 pullop.remotebookmarks = books
2051 2051 else:
2052 2052 for namespace, value in op.records[b'listkeys']:
2053 2053 if namespace == b'bookmarks':
2054 2054 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2055 2055
2056 2056 # bookmark data were either already there or pulled in the bundle
2057 2057 if pullop.remotebookmarks is not None:
2058 2058 _pullbookmarks(pullop)
2059 2059
2060 2060
2061 2061 def _pullbundle2extraprepare(pullop, kwargs):
2062 2062 """hook function so that extensions can extend the getbundle call"""
2063 2063
2064 2064
2065 2065 def _pullchangeset(pullop):
2066 2066 """pull changeset from unbundle into the local repo"""
2067 2067 # We delay the open of the transaction as late as possible so we
2068 2068 # don't open transaction for nothing or you break future useful
2069 2069 # rollback call
2070 2070 if b'changegroup' in pullop.stepsdone:
2071 2071 return
2072 2072 pullop.stepsdone.add(b'changegroup')
2073 2073 if not pullop.fetch:
2074 2074 pullop.repo.ui.status(_(b"no changes found\n"))
2075 2075 pullop.cgresult = 0
2076 2076 return
2077 2077 tr = pullop.gettransaction()
2078 2078 if pullop.heads is None and list(pullop.common) == [nullid]:
2079 2079 pullop.repo.ui.status(_(b"requesting all changes\n"))
2080 2080 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2081 2081 # issue1320, avoid a race if remote changed after discovery
2082 2082 pullop.heads = pullop.rheads
2083 2083
2084 2084 if pullop.remote.capable(b'getbundle'):
2085 2085 # TODO: get bundlecaps from remote
2086 2086 cg = pullop.remote.getbundle(
2087 2087 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2088 2088 )
2089 2089 elif pullop.heads is None:
2090 2090 with pullop.remote.commandexecutor() as e:
2091 2091 cg = e.callcommand(
2092 2092 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2093 2093 ).result()
2094 2094
2095 2095 elif not pullop.remote.capable(b'changegroupsubset'):
2096 2096 raise error.Abort(
2097 2097 _(
2098 2098 b"partial pull cannot be done because "
2099 2099 b"other repository doesn't support "
2100 2100 b"changegroupsubset."
2101 2101 )
2102 2102 )
2103 2103 else:
2104 2104 with pullop.remote.commandexecutor() as e:
2105 2105 cg = e.callcommand(
2106 2106 b'changegroupsubset',
2107 2107 {
2108 2108 b'bases': pullop.fetch,
2109 2109 b'heads': pullop.heads,
2110 2110 b'source': b'pull',
2111 2111 },
2112 2112 ).result()
2113 2113
2114 2114 bundleop = bundle2.applybundle(
2115 2115 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2116 2116 )
2117 2117 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2118 2118
2119 2119
2120 2120 def _pullphase(pullop):
2121 2121 # Get remote phases data from remote
2122 2122 if b'phases' in pullop.stepsdone:
2123 2123 return
2124 2124 remotephases = listkeys(pullop.remote, b'phases')
2125 2125 _pullapplyphases(pullop, remotephases)
2126 2126
2127 2127
2128 2128 def _pullapplyphases(pullop, remotephases):
2129 2129 """apply phase movement from observed remote state"""
2130 2130 if b'phases' in pullop.stepsdone:
2131 2131 return
2132 2132 pullop.stepsdone.add(b'phases')
2133 2133 publishing = bool(remotephases.get(b'publishing', False))
2134 2134 if remotephases and not publishing:
2135 2135 # remote is new and non-publishing
2136 2136 pheads, _dr = phases.analyzeremotephases(
2137 2137 pullop.repo, pullop.pulledsubset, remotephases
2138 2138 )
2139 2139 dheads = pullop.pulledsubset
2140 2140 else:
2141 2141 # Remote is old or publishing all common changesets
2142 2142 # should be seen as public
2143 2143 pheads = pullop.pulledsubset
2144 2144 dheads = []
2145 2145 unfi = pullop.repo.unfiltered()
2146 2146 phase = unfi._phasecache.phase
2147 2147 rev = unfi.changelog.index.get_rev
2148 2148 public = phases.public
2149 2149 draft = phases.draft
2150 2150
2151 2151 # exclude changesets already public locally and update the others
2152 2152 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2153 2153 if pheads:
2154 2154 tr = pullop.gettransaction()
2155 2155 phases.advanceboundary(pullop.repo, tr, public, pheads)
2156 2156
2157 2157 # exclude changesets already draft locally and update the others
2158 2158 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2159 2159 if dheads:
2160 2160 tr = pullop.gettransaction()
2161 2161 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2162 2162
2163 2163
2164 2164 def _pullbookmarks(pullop):
2165 2165 """process the remote bookmark information to update the local one"""
2166 2166 if b'bookmarks' in pullop.stepsdone:
2167 2167 return
2168 2168 pullop.stepsdone.add(b'bookmarks')
2169 2169 repo = pullop.repo
2170 2170 remotebookmarks = pullop.remotebookmarks
2171 2171 bookmod.updatefromremote(
2172 2172 repo.ui,
2173 2173 repo,
2174 2174 remotebookmarks,
2175 2175 pullop.remote.url(),
2176 2176 pullop.gettransaction,
2177 2177 explicit=pullop.explicitbookmarks,
2178 2178 )
2179 2179
2180 2180
2181 2181 def _pullobsolete(pullop):
2182 2182 """utility function to pull obsolete markers from a remote
2183 2183
2184 2184 The `gettransaction` is function that return the pull transaction, creating
2185 2185 one if necessary. We return the transaction to inform the calling code that
2186 2186 a new transaction have been created (when applicable).
2187 2187
2188 2188 Exists mostly to allow overriding for experimentation purpose"""
2189 2189 if b'obsmarkers' in pullop.stepsdone:
2190 2190 return
2191 2191 pullop.stepsdone.add(b'obsmarkers')
2192 2192 tr = None
2193 2193 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2194 2194 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2195 2195 remoteobs = listkeys(pullop.remote, b'obsolete')
2196 2196 if b'dump0' in remoteobs:
2197 2197 tr = pullop.gettransaction()
2198 2198 markers = []
2199 2199 for key in sorted(remoteobs, reverse=True):
2200 2200 if key.startswith(b'dump'):
2201 2201 data = util.b85decode(remoteobs[key])
2202 2202 version, newmarks = obsolete._readmarkers(data)
2203 2203 markers += newmarks
2204 2204 if markers:
2205 2205 pullop.repo.obsstore.add(tr, markers)
2206 2206 pullop.repo.invalidatevolatilesets()
2207 2207 return tr
2208 2208
2209 2209
2210 2210 def applynarrowacl(repo, kwargs):
2211 2211 """Apply narrow fetch access control.
2212 2212
2213 2213 This massages the named arguments for getbundle wire protocol commands
2214 2214 so requested data is filtered through access control rules.
2215 2215 """
2216 2216 ui = repo.ui
2217 2217 # TODO this assumes existence of HTTP and is a layering violation.
2218 2218 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2219 2219 user_includes = ui.configlist(
2220 2220 _NARROWACL_SECTION,
2221 2221 username + b'.includes',
2222 2222 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2223 2223 )
2224 2224 user_excludes = ui.configlist(
2225 2225 _NARROWACL_SECTION,
2226 2226 username + b'.excludes',
2227 2227 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2228 2228 )
2229 2229 if not user_includes:
2230 2230 raise error.Abort(
2231 2231 _(b"%s configuration for user %s is empty")
2232 2232 % (_NARROWACL_SECTION, username)
2233 2233 )
2234 2234
2235 2235 user_includes = [
2236 2236 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2237 2237 ]
2238 2238 user_excludes = [
2239 2239 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2240 2240 ]
2241 2241
2242 2242 req_includes = set(kwargs.get('includepats', []))
2243 2243 req_excludes = set(kwargs.get('excludepats', []))
2244 2244
2245 2245 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2246 2246 req_includes, req_excludes, user_includes, user_excludes
2247 2247 )
2248 2248
2249 2249 if invalid_includes:
2250 2250 raise error.Abort(
2251 2251 _(b"The following includes are not accessible for %s: %s")
2252 2252 % (username, stringutil.pprint(invalid_includes))
2253 2253 )
2254 2254
2255 2255 new_args = {}
2256 2256 new_args.update(kwargs)
2257 2257 new_args['narrow'] = True
2258 2258 new_args['narrow_acl'] = True
2259 2259 new_args['includepats'] = req_includes
2260 2260 if req_excludes:
2261 2261 new_args['excludepats'] = req_excludes
2262 2262
2263 2263 return new_args
2264 2264
2265 2265
2266 2266 def _computeellipsis(repo, common, heads, known, match, depth=None):
2267 2267 """Compute the shape of a narrowed DAG.
2268 2268
2269 2269 Args:
2270 2270 repo: The repository we're transferring.
2271 2271 common: The roots of the DAG range we're transferring.
2272 2272 May be just [nullid], which means all ancestors of heads.
2273 2273 heads: The heads of the DAG range we're transferring.
2274 2274 match: The narrowmatcher that allows us to identify relevant changes.
2275 2275 depth: If not None, only consider nodes to be full nodes if they are at
2276 2276 most depth changesets away from one of heads.
2277 2277
2278 2278 Returns:
2279 2279 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2280 2280
2281 2281 visitnodes: The list of nodes (either full or ellipsis) which
2282 2282 need to be sent to the client.
2283 2283 relevant_nodes: The set of changelog nodes which change a file inside
2284 2284 the narrowspec. The client needs these as non-ellipsis nodes.
2285 2285 ellipsisroots: A dict of {rev: parents} that is used in
2286 2286 narrowchangegroup to produce ellipsis nodes with the
2287 2287 correct parents.
2288 2288 """
2289 2289 cl = repo.changelog
2290 2290 mfl = repo.manifestlog
2291 2291
2292 2292 clrev = cl.rev
2293 2293
2294 2294 commonrevs = {clrev(n) for n in common} | {nullrev}
2295 2295 headsrevs = {clrev(n) for n in heads}
2296 2296
2297 2297 if depth:
2298 2298 revdepth = {h: 0 for h in headsrevs}
2299 2299
2300 2300 ellipsisheads = collections.defaultdict(set)
2301 2301 ellipsisroots = collections.defaultdict(set)
2302 2302
2303 2303 def addroot(head, curchange):
2304 2304 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2305 2305 ellipsisroots[head].add(curchange)
2306 2306 # Recursively split ellipsis heads with 3 roots by finding the
2307 2307 # roots' youngest common descendant which is an elided merge commit.
2308 2308 # That descendant takes 2 of the 3 roots as its own, and becomes a
2309 2309 # root of the head.
2310 2310 while len(ellipsisroots[head]) > 2:
2311 2311 child, roots = splithead(head)
2312 2312 splitroots(head, child, roots)
2313 2313 head = child # Recurse in case we just added a 3rd root
2314 2314
2315 2315 def splitroots(head, child, roots):
2316 2316 ellipsisroots[head].difference_update(roots)
2317 2317 ellipsisroots[head].add(child)
2318 2318 ellipsisroots[child].update(roots)
2319 2319 ellipsisroots[child].discard(child)
2320 2320
2321 2321 def splithead(head):
2322 2322 r1, r2, r3 = sorted(ellipsisroots[head])
2323 2323 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2324 2324 mid = repo.revs(
2325 2325 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2326 2326 )
2327 2327 for j in mid:
2328 2328 if j == nr2:
2329 2329 return nr2, (nr1, nr2)
2330 2330 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2331 2331 return j, (nr1, nr2)
2332 2332 raise error.Abort(
2333 2333 _(
2334 2334 b'Failed to split up ellipsis node! head: %d, '
2335 2335 b'roots: %d %d %d'
2336 2336 )
2337 2337 % (head, r1, r2, r3)
2338 2338 )
2339 2339
2340 2340 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2341 2341 visit = reversed(missing)
2342 2342 relevant_nodes = set()
2343 2343 visitnodes = [cl.node(m) for m in missing]
2344 2344 required = set(headsrevs) | known
2345 2345 for rev in visit:
2346 2346 clrev = cl.changelogrevision(rev)
2347 2347 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2348 2348 if depth is not None:
2349 2349 curdepth = revdepth[rev]
2350 2350 for p in ps:
2351 2351 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2352 2352 needed = False
2353 2353 shallow_enough = depth is None or revdepth[rev] <= depth
2354 2354 if shallow_enough:
2355 2355 curmf = mfl[clrev.manifest].read()
2356 2356 if ps:
2357 2357 # We choose to not trust the changed files list in
2358 2358 # changesets because it's not always correct. TODO: could
2359 2359 # we trust it for the non-merge case?
2360 2360 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2361 2361 needed = bool(curmf.diff(p1mf, match))
2362 2362 if not needed and len(ps) > 1:
2363 2363 # For merge changes, the list of changed files is not
2364 2364 # helpful, since we need to emit the merge if a file
2365 2365 # in the narrow spec has changed on either side of the
2366 2366 # merge. As a result, we do a manifest diff to check.
2367 2367 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2368 2368 needed = bool(curmf.diff(p2mf, match))
2369 2369 else:
2370 2370 # For a root node, we need to include the node if any
2371 2371 # files in the node match the narrowspec.
2372 2372 needed = any(curmf.walk(match))
2373 2373
2374 2374 if needed:
2375 2375 for head in ellipsisheads[rev]:
2376 2376 addroot(head, rev)
2377 2377 for p in ps:
2378 2378 required.add(p)
2379 2379 relevant_nodes.add(cl.node(rev))
2380 2380 else:
2381 2381 if not ps:
2382 2382 ps = [nullrev]
2383 2383 if rev in required:
2384 2384 for head in ellipsisheads[rev]:
2385 2385 addroot(head, rev)
2386 2386 for p in ps:
2387 2387 ellipsisheads[p].add(rev)
2388 2388 else:
2389 2389 for p in ps:
2390 2390 ellipsisheads[p] |= ellipsisheads[rev]
2391 2391
2392 2392 # add common changesets as roots of their reachable ellipsis heads
2393 2393 for c in commonrevs:
2394 2394 for head in ellipsisheads[c]:
2395 2395 addroot(head, c)
2396 2396 return visitnodes, relevant_nodes, ellipsisroots
2397 2397
2398 2398
2399 2399 def caps20to10(repo, role):
2400 2400 """return a set with appropriate options to use bundle20 during getbundle"""
2401 2401 caps = {b'HG20'}
2402 2402 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2403 2403 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2404 2404 return caps
2405 2405
2406 2406
2407 2407 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2408 2408 getbundle2partsorder = []
2409 2409
2410 2410 # Mapping between step name and function
2411 2411 #
2412 2412 # This exists to help extensions wrap steps if necessary
2413 2413 getbundle2partsmapping = {}
2414 2414
2415 2415
2416 2416 def getbundle2partsgenerator(stepname, idx=None):
2417 2417 """decorator for function generating bundle2 part for getbundle
2418 2418
2419 2419 The function is added to the step -> function mapping and appended to the
2420 2420 list of steps. Beware that decorated functions will be added in order
2421 2421 (this may matter).
2422 2422
2423 2423 You can only use this decorator for new steps, if you want to wrap a step
2424 2424 from an extension, attack the getbundle2partsmapping dictionary directly."""
2425 2425
2426 2426 def dec(func):
2427 2427 assert stepname not in getbundle2partsmapping
2428 2428 getbundle2partsmapping[stepname] = func
2429 2429 if idx is None:
2430 2430 getbundle2partsorder.append(stepname)
2431 2431 else:
2432 2432 getbundle2partsorder.insert(idx, stepname)
2433 2433 return func
2434 2434
2435 2435 return dec
2436 2436
2437 2437
2438 2438 def bundle2requested(bundlecaps):
2439 2439 if bundlecaps is not None:
2440 2440 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2441 2441 return False
2442 2442
2443 2443
2444 2444 def getbundlechunks(
2445 2445 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2446 2446 ):
2447 2447 """Return chunks constituting a bundle's raw data.
2448 2448
2449 2449 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2450 2450 passed.
2451 2451
2452 2452 Returns a 2-tuple of a dict with metadata about the generated bundle
2453 2453 and an iterator over raw chunks (of varying sizes).
2454 2454 """
2455 2455 kwargs = pycompat.byteskwargs(kwargs)
2456 2456 info = {}
2457 2457 usebundle2 = bundle2requested(bundlecaps)
2458 2458 # bundle10 case
2459 2459 if not usebundle2:
2460 2460 if bundlecaps and not kwargs.get(b'cg', True):
2461 2461 raise ValueError(
2462 2462 _(b'request for bundle10 must include changegroup')
2463 2463 )
2464 2464
2465 2465 if kwargs:
2466 2466 raise ValueError(
2467 2467 _(b'unsupported getbundle arguments: %s')
2468 2468 % b', '.join(sorted(kwargs.keys()))
2469 2469 )
2470 2470 outgoing = _computeoutgoing(repo, heads, common)
2471 2471 info[b'bundleversion'] = 1
2472 2472 return (
2473 2473 info,
2474 2474 changegroup.makestream(
2475 2475 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2476 2476 ),
2477 2477 )
2478 2478
2479 2479 # bundle20 case
2480 2480 info[b'bundleversion'] = 2
2481 2481 b2caps = {}
2482 2482 for bcaps in bundlecaps:
2483 2483 if bcaps.startswith(b'bundle2='):
2484 2484 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2485 2485 b2caps.update(bundle2.decodecaps(blob))
2486 2486 bundler = bundle2.bundle20(repo.ui, b2caps)
2487 2487
2488 2488 kwargs[b'heads'] = heads
2489 2489 kwargs[b'common'] = common
2490 2490
2491 2491 for name in getbundle2partsorder:
2492 2492 func = getbundle2partsmapping[name]
2493 2493 func(
2494 2494 bundler,
2495 2495 repo,
2496 2496 source,
2497 2497 bundlecaps=bundlecaps,
2498 2498 b2caps=b2caps,
2499 2499 **pycompat.strkwargs(kwargs)
2500 2500 )
2501 2501
2502 2502 info[b'prefercompressed'] = bundler.prefercompressed
2503 2503
2504 2504 return info, bundler.getchunks()
2505 2505
2506 2506
2507 2507 @getbundle2partsgenerator(b'stream2')
2508 2508 def _getbundlestream2(bundler, repo, *args, **kwargs):
2509 2509 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2510 2510
2511 2511
2512 2512 @getbundle2partsgenerator(b'changegroup')
2513 2513 def _getbundlechangegrouppart(
2514 2514 bundler,
2515 2515 repo,
2516 2516 source,
2517 2517 bundlecaps=None,
2518 2518 b2caps=None,
2519 2519 heads=None,
2520 2520 common=None,
2521 2521 **kwargs
2522 2522 ):
2523 2523 """add a changegroup part to the requested bundle"""
2524 2524 if not kwargs.get('cg', True) or not b2caps:
2525 2525 return
2526 2526
2527 2527 version = b'01'
2528 2528 cgversions = b2caps.get(b'changegroup')
2529 2529 if cgversions: # 3.1 and 3.2 ship with an empty value
2530 2530 cgversions = [
2531 2531 v
2532 2532 for v in cgversions
2533 2533 if v in changegroup.supportedoutgoingversions(repo)
2534 2534 ]
2535 2535 if not cgversions:
2536 2536 raise error.Abort(_(b'no common changegroup version'))
2537 2537 version = max(cgversions)
2538 2538
2539 2539 outgoing = _computeoutgoing(repo, heads, common)
2540 2540 if not outgoing.missing:
2541 2541 return
2542 2542
2543 2543 if kwargs.get('narrow', False):
2544 2544 include = sorted(filter(bool, kwargs.get('includepats', [])))
2545 2545 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2546 2546 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2547 2547 else:
2548 2548 matcher = None
2549 2549
2550 2550 cgstream = changegroup.makestream(
2551 2551 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2552 2552 )
2553 2553
2554 2554 part = bundler.newpart(b'changegroup', data=cgstream)
2555 2555 if cgversions:
2556 2556 part.addparam(b'version', version)
2557 2557
2558 2558 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2559 2559
2560 2560 if scmutil.istreemanifest(repo):
2561 2561 part.addparam(b'treemanifest', b'1')
2562 2562
2563 2563 if b'exp-sidedata-flag' in repo.requirements:
2564 2564 part.addparam(b'exp-sidedata', b'1')
2565 2565
2566 2566 if (
2567 2567 kwargs.get('narrow', False)
2568 2568 and kwargs.get('narrow_acl', False)
2569 2569 and (include or exclude)
2570 2570 ):
2571 2571 # this is mandatory because otherwise ACL clients won't work
2572 2572 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2573 2573 narrowspecpart.data = b'%s\0%s' % (
2574 2574 b'\n'.join(include),
2575 2575 b'\n'.join(exclude),
2576 2576 )
2577 2577
2578 2578
2579 2579 @getbundle2partsgenerator(b'bookmarks')
2580 2580 def _getbundlebookmarkpart(
2581 2581 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2582 2582 ):
2583 2583 """add a bookmark part to the requested bundle"""
2584 2584 if not kwargs.get('bookmarks', False):
2585 2585 return
2586 2586 if not b2caps or b'bookmarks' not in b2caps:
2587 2587 raise error.Abort(_(b'no common bookmarks exchange method'))
2588 2588 books = bookmod.listbinbookmarks(repo)
2589 2589 data = bookmod.binaryencode(books)
2590 2590 if data:
2591 2591 bundler.newpart(b'bookmarks', data=data)
2592 2592
2593 2593
2594 2594 @getbundle2partsgenerator(b'listkeys')
2595 2595 def _getbundlelistkeysparts(
2596 2596 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2597 2597 ):
2598 2598 """add parts containing listkeys namespaces to the requested bundle"""
2599 2599 listkeys = kwargs.get('listkeys', ())
2600 2600 for namespace in listkeys:
2601 2601 part = bundler.newpart(b'listkeys')
2602 2602 part.addparam(b'namespace', namespace)
2603 2603 keys = repo.listkeys(namespace).items()
2604 2604 part.data = pushkey.encodekeys(keys)
2605 2605
2606 2606
2607 2607 @getbundle2partsgenerator(b'obsmarkers')
2608 2608 def _getbundleobsmarkerpart(
2609 2609 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2610 2610 ):
2611 2611 """add an obsolescence markers part to the requested bundle"""
2612 2612 if kwargs.get('obsmarkers', False):
2613 2613 if heads is None:
2614 2614 heads = repo.heads()
2615 2615 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2616 2616 markers = repo.obsstore.relevantmarkers(subset)
2617 2617 markers = obsutil.sortedmarkers(markers)
2618 2618 bundle2.buildobsmarkerspart(bundler, markers)
2619 2619
2620 2620
2621 2621 @getbundle2partsgenerator(b'phases')
2622 2622 def _getbundlephasespart(
2623 2623 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2624 2624 ):
2625 2625 """add phase heads part to the requested bundle"""
2626 2626 if kwargs.get('phases', False):
2627 2627 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2628 2628 raise error.Abort(_(b'no common phases exchange method'))
2629 2629 if heads is None:
2630 2630 heads = repo.heads()
2631 2631
2632 2632 headsbyphase = collections.defaultdict(set)
2633 2633 if repo.publishing():
2634 2634 headsbyphase[phases.public] = heads
2635 2635 else:
2636 2636 # find the appropriate heads to move
2637 2637
2638 2638 phase = repo._phasecache.phase
2639 2639 node = repo.changelog.node
2640 2640 rev = repo.changelog.rev
2641 2641 for h in heads:
2642 2642 headsbyphase[phase(repo, rev(h))].add(h)
2643 2643 seenphases = list(headsbyphase.keys())
2644 2644
2645 2645 # We do not handle anything but public and draft phase for now)
2646 2646 if seenphases:
2647 2647 assert max(seenphases) <= phases.draft
2648 2648
2649 2649 # if client is pulling non-public changesets, we need to find
2650 2650 # intermediate public heads.
2651 2651 draftheads = headsbyphase.get(phases.draft, set())
2652 2652 if draftheads:
2653 2653 publicheads = headsbyphase.get(phases.public, set())
2654 2654
2655 2655 revset = b'heads(only(%ln, %ln) and public())'
2656 2656 extraheads = repo.revs(revset, draftheads, publicheads)
2657 2657 for r in extraheads:
2658 2658 headsbyphase[phases.public].add(node(r))
2659 2659
2660 2660 # transform data in a format used by the encoding function
2661 2661 phasemapping = {
2662 2662 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2663 2663 }
2664 2664
2665 2665 # generate the actual part
2666 2666 phasedata = phases.binaryencode(phasemapping)
2667 2667 bundler.newpart(b'phase-heads', data=phasedata)
2668 2668
2669 2669
2670 2670 @getbundle2partsgenerator(b'hgtagsfnodes')
2671 2671 def _getbundletagsfnodes(
2672 2672 bundler,
2673 2673 repo,
2674 2674 source,
2675 2675 bundlecaps=None,
2676 2676 b2caps=None,
2677 2677 heads=None,
2678 2678 common=None,
2679 2679 **kwargs
2680 2680 ):
2681 2681 """Transfer the .hgtags filenodes mapping.
2682 2682
2683 2683 Only values for heads in this bundle will be transferred.
2684 2684
2685 2685 The part data consists of pairs of 20 byte changeset node and .hgtags
2686 2686 filenodes raw values.
2687 2687 """
2688 2688 # Don't send unless:
2689 2689 # - changeset are being exchanged,
2690 2690 # - the client supports it.
2691 2691 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2692 2692 return
2693 2693
2694 2694 outgoing = _computeoutgoing(repo, heads, common)
2695 2695 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2696 2696
2697 2697
2698 2698 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2699 2699 def _getbundlerevbranchcache(
2700 2700 bundler,
2701 2701 repo,
2702 2702 source,
2703 2703 bundlecaps=None,
2704 2704 b2caps=None,
2705 2705 heads=None,
2706 2706 common=None,
2707 2707 **kwargs
2708 2708 ):
2709 2709 """Transfer the rev-branch-cache mapping
2710 2710
2711 2711 The payload is a series of data related to each branch
2712 2712
2713 2713 1) branch name length
2714 2714 2) number of open heads
2715 2715 3) number of closed heads
2716 2716 4) open heads nodes
2717 2717 5) closed heads nodes
2718 2718 """
2719 2719 # Don't send unless:
2720 2720 # - changeset are being exchanged,
2721 2721 # - the client supports it.
2722 2722 # - narrow bundle isn't in play (not currently compatible).
2723 2723 if (
2724 2724 not kwargs.get('cg', True)
2725 2725 or not b2caps
2726 2726 or b'rev-branch-cache' not in b2caps
2727 2727 or kwargs.get('narrow', False)
2728 2728 or repo.ui.has_section(_NARROWACL_SECTION)
2729 2729 ):
2730 2730 return
2731 2731
2732 2732 outgoing = _computeoutgoing(repo, heads, common)
2733 2733 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2734 2734
2735 2735
2736 2736 def check_heads(repo, their_heads, context):
2737 2737 """check if the heads of a repo have been modified
2738 2738
2739 2739 Used by peer for unbundling.
2740 2740 """
2741 2741 heads = repo.heads()
2742 2742 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2743 2743 if not (
2744 2744 their_heads == [b'force']
2745 2745 or their_heads == heads
2746 2746 or their_heads == [b'hashed', heads_hash]
2747 2747 ):
2748 2748 # someone else committed/pushed/unbundled while we
2749 2749 # were transferring data
2750 2750 raise error.PushRaced(
2751 2751 b'repository changed while %s - please try again' % context
2752 2752 )
2753 2753
2754 2754
2755 2755 def unbundle(repo, cg, heads, source, url):
2756 2756 """Apply a bundle to a repo.
2757 2757
2758 2758 this function makes sure the repo is locked during the application and have
2759 2759 mechanism to check that no push race occurred between the creation of the
2760 2760 bundle and its application.
2761 2761
2762 2762 If the push was raced as PushRaced exception is raised."""
2763 2763 r = 0
2764 2764 # need a transaction when processing a bundle2 stream
2765 2765 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2766 2766 lockandtr = [None, None, None]
2767 2767 recordout = None
2768 2768 # quick fix for output mismatch with bundle2 in 3.4
2769 2769 captureoutput = repo.ui.configbool(
2770 2770 b'experimental', b'bundle2-output-capture'
2771 2771 )
2772 2772 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2773 2773 captureoutput = True
2774 2774 try:
2775 2775 # note: outside bundle1, 'heads' is expected to be empty and this
2776 2776 # 'check_heads' call wil be a no-op
2777 2777 check_heads(repo, heads, b'uploading changes')
2778 2778 # push can proceed
2779 2779 if not isinstance(cg, bundle2.unbundle20):
2780 2780 # legacy case: bundle1 (changegroup 01)
2781 2781 txnname = b"\n".join([source, util.hidepassword(url)])
2782 2782 with repo.lock(), repo.transaction(txnname) as tr:
2783 2783 op = bundle2.applybundle(repo, cg, tr, source, url)
2784 2784 r = bundle2.combinechangegroupresults(op)
2785 2785 else:
2786 2786 r = None
2787 2787 try:
2788 2788
2789 2789 def gettransaction():
2790 2790 if not lockandtr[2]:
2791 2791 if not bookmod.bookmarksinstore(repo):
2792 2792 lockandtr[0] = repo.wlock()
2793 2793 lockandtr[1] = repo.lock()
2794 2794 lockandtr[2] = repo.transaction(source)
2795 2795 lockandtr[2].hookargs[b'source'] = source
2796 2796 lockandtr[2].hookargs[b'url'] = url
2797 2797 lockandtr[2].hookargs[b'bundle2'] = b'1'
2798 2798 return lockandtr[2]
2799 2799
2800 2800 # Do greedy locking by default until we're satisfied with lazy
2801 2801 # locking.
2802 2802 if not repo.ui.configbool(
2803 2803 b'experimental', b'bundle2lazylocking'
2804 2804 ):
2805 2805 gettransaction()
2806 2806
2807 2807 op = bundle2.bundleoperation(
2808 2808 repo,
2809 2809 gettransaction,
2810 2810 captureoutput=captureoutput,
2811 2811 source=b'push',
2812 2812 )
2813 2813 try:
2814 2814 op = bundle2.processbundle(repo, cg, op=op)
2815 2815 finally:
2816 2816 r = op.reply
2817 2817 if captureoutput and r is not None:
2818 2818 repo.ui.pushbuffer(error=True, subproc=True)
2819 2819
2820 2820 def recordout(output):
2821 2821 r.newpart(b'output', data=output, mandatory=False)
2822 2822
2823 2823 if lockandtr[2] is not None:
2824 2824 lockandtr[2].close()
2825 2825 except BaseException as exc:
2826 2826 exc.duringunbundle2 = True
2827 2827 if captureoutput and r is not None:
2828 2828 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2829 2829
2830 2830 def recordout(output):
2831 2831 part = bundle2.bundlepart(
2832 2832 b'output', data=output, mandatory=False
2833 2833 )
2834 2834 parts.append(part)
2835 2835
2836 2836 raise
2837 2837 finally:
2838 2838 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2839 2839 if recordout is not None:
2840 2840 recordout(repo.ui.popbuffer())
2841 2841 return r
2842 2842
2843 2843
2844 2844 def _maybeapplyclonebundle(pullop):
2845 2845 """Apply a clone bundle from a remote, if possible."""
2846 2846
2847 2847 repo = pullop.repo
2848 2848 remote = pullop.remote
2849 2849
2850 2850 if not repo.ui.configbool(b'ui', b'clonebundles'):
2851 2851 return
2852 2852
2853 2853 # Only run if local repo is empty.
2854 2854 if len(repo):
2855 2855 return
2856 2856
2857 2857 if pullop.heads:
2858 2858 return
2859 2859
2860 2860 if not remote.capable(b'clonebundles'):
2861 2861 return
2862 2862
2863 2863 with remote.commandexecutor() as e:
2864 2864 res = e.callcommand(b'clonebundles', {}).result()
2865 2865
2866 2866 # If we call the wire protocol command, that's good enough to record the
2867 2867 # attempt.
2868 2868 pullop.clonebundleattempted = True
2869 2869
2870 2870 entries = parseclonebundlesmanifest(repo, res)
2871 2871 if not entries:
2872 2872 repo.ui.note(
2873 2873 _(
2874 2874 b'no clone bundles available on remote; '
2875 2875 b'falling back to regular clone\n'
2876 2876 )
2877 2877 )
2878 2878 return
2879 2879
2880 2880 entries = filterclonebundleentries(
2881 2881 repo, entries, streamclonerequested=pullop.streamclonerequested
2882 2882 )
2883 2883
2884 2884 if not entries:
2885 2885 # There is a thundering herd concern here. However, if a server
2886 2886 # operator doesn't advertise bundles appropriate for its clients,
2887 2887 # they deserve what's coming. Furthermore, from a client's
2888 2888 # perspective, no automatic fallback would mean not being able to
2889 2889 # clone!
2890 2890 repo.ui.warn(
2891 2891 _(
2892 2892 b'no compatible clone bundles available on server; '
2893 2893 b'falling back to regular clone\n'
2894 2894 )
2895 2895 )
2896 2896 repo.ui.warn(
2897 2897 _(b'(you may want to report this to the server operator)\n')
2898 2898 )
2899 2899 return
2900 2900
2901 2901 entries = sortclonebundleentries(repo.ui, entries)
2902 2902
2903 2903 url = entries[0][b'URL']
2904 2904 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2905 2905 if trypullbundlefromurl(repo.ui, repo, url):
2906 2906 repo.ui.status(_(b'finished applying clone bundle\n'))
2907 2907 # Bundle failed.
2908 2908 #
2909 2909 # We abort by default to avoid the thundering herd of
2910 2910 # clients flooding a server that was expecting expensive
2911 2911 # clone load to be offloaded.
2912 2912 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2913 2913 repo.ui.warn(_(b'falling back to normal clone\n'))
2914 2914 else:
2915 2915 raise error.Abort(
2916 2916 _(b'error applying bundle'),
2917 2917 hint=_(
2918 2918 b'if this error persists, consider contacting '
2919 2919 b'the server operator or disable clone '
2920 2920 b'bundles via '
2921 2921 b'"--config ui.clonebundles=false"'
2922 2922 ),
2923 2923 )
2924 2924
2925 2925
2926 2926 def parseclonebundlesmanifest(repo, s):
2927 2927 """Parses the raw text of a clone bundles manifest.
2928 2928
2929 2929 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2930 2930 to the URL and other keys are the attributes for the entry.
2931 2931 """
2932 2932 m = []
2933 2933 for line in s.splitlines():
2934 2934 fields = line.split()
2935 2935 if not fields:
2936 2936 continue
2937 2937 attrs = {b'URL': fields[0]}
2938 2938 for rawattr in fields[1:]:
2939 2939 key, value = rawattr.split(b'=', 1)
2940 2940 key = urlreq.unquote(key)
2941 2941 value = urlreq.unquote(value)
2942 2942 attrs[key] = value
2943 2943
2944 2944 # Parse BUNDLESPEC into components. This makes client-side
2945 2945 # preferences easier to specify since you can prefer a single
2946 2946 # component of the BUNDLESPEC.
2947 2947 if key == b'BUNDLESPEC':
2948 2948 try:
2949 2949 bundlespec = parsebundlespec(repo, value)
2950 2950 attrs[b'COMPRESSION'] = bundlespec.compression
2951 2951 attrs[b'VERSION'] = bundlespec.version
2952 2952 except error.InvalidBundleSpecification:
2953 2953 pass
2954 2954 except error.UnsupportedBundleSpecification:
2955 2955 pass
2956 2956
2957 2957 m.append(attrs)
2958 2958
2959 2959 return m
2960 2960
2961 2961
2962 2962 def isstreamclonespec(bundlespec):
2963 2963 # Stream clone v1
2964 2964 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2965 2965 return True
2966 2966
2967 2967 # Stream clone v2
2968 2968 if (
2969 2969 bundlespec.wirecompression == b'UN'
2970 2970 and bundlespec.wireversion == b'02'
2971 2971 and bundlespec.contentopts.get(b'streamv2')
2972 2972 ):
2973 2973 return True
2974 2974
2975 2975 return False
2976 2976
2977 2977
2978 2978 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2979 2979 """Remove incompatible clone bundle manifest entries.
2980 2980
2981 2981 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2982 2982 and returns a new list consisting of only the entries that this client
2983 2983 should be able to apply.
2984 2984
2985 2985 There is no guarantee we'll be able to apply all returned entries because
2986 2986 the metadata we use to filter on may be missing or wrong.
2987 2987 """
2988 2988 newentries = []
2989 2989 for entry in entries:
2990 2990 spec = entry.get(b'BUNDLESPEC')
2991 2991 if spec:
2992 2992 try:
2993 2993 bundlespec = parsebundlespec(repo, spec, strict=True)
2994 2994
2995 2995 # If a stream clone was requested, filter out non-streamclone
2996 2996 # entries.
2997 2997 if streamclonerequested and not isstreamclonespec(bundlespec):
2998 2998 repo.ui.debug(
2999 2999 b'filtering %s because not a stream clone\n'
3000 3000 % entry[b'URL']
3001 3001 )
3002 3002 continue
3003 3003
3004 3004 except error.InvalidBundleSpecification as e:
3005 3005 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3006 3006 continue
3007 3007 except error.UnsupportedBundleSpecification as e:
3008 3008 repo.ui.debug(
3009 3009 b'filtering %s because unsupported bundle '
3010 3010 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3011 3011 )
3012 3012 continue
3013 3013 # If we don't have a spec and requested a stream clone, we don't know
3014 3014 # what the entry is so don't attempt to apply it.
3015 3015 elif streamclonerequested:
3016 3016 repo.ui.debug(
3017 3017 b'filtering %s because cannot determine if a stream '
3018 3018 b'clone bundle\n' % entry[b'URL']
3019 3019 )
3020 3020 continue
3021 3021
3022 3022 if b'REQUIRESNI' in entry and not sslutil.hassni:
3023 3023 repo.ui.debug(
3024 3024 b'filtering %s because SNI not supported\n' % entry[b'URL']
3025 3025 )
3026 3026 continue
3027 3027
3028 3028 if b'REQUIREDRAM' in entry:
3029 3029 try:
3030 3030 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3031 3031 except error.ParseError:
3032 3032 repo.ui.debug(
3033 3033 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3034 3034 % entry[b'URL']
3035 3035 )
3036 3036 continue
3037 3037 actualram = repo.ui.estimatememory()
3038 3038 if actualram is not None and actualram * 0.66 < requiredram:
3039 3039 repo.ui.debug(
3040 3040 b'filtering %s as it needs more than 2/3 of system memory\n'
3041 3041 % entry[b'URL']
3042 3042 )
3043 3043 continue
3044 3044
3045 3045 newentries.append(entry)
3046 3046
3047 3047 return newentries
3048 3048
3049 3049
3050 3050 class clonebundleentry(object):
3051 3051 """Represents an item in a clone bundles manifest.
3052 3052
3053 3053 This rich class is needed to support sorting since sorted() in Python 3
3054 3054 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3055 3055 won't work.
3056 3056 """
3057 3057
3058 3058 def __init__(self, value, prefers):
3059 3059 self.value = value
3060 3060 self.prefers = prefers
3061 3061
3062 3062 def _cmp(self, other):
3063 3063 for prefkey, prefvalue in self.prefers:
3064 3064 avalue = self.value.get(prefkey)
3065 3065 bvalue = other.value.get(prefkey)
3066 3066
3067 3067 # Special case for b missing attribute and a matches exactly.
3068 3068 if avalue is not None and bvalue is None and avalue == prefvalue:
3069 3069 return -1
3070 3070
3071 3071 # Special case for a missing attribute and b matches exactly.
3072 3072 if bvalue is not None and avalue is None and bvalue == prefvalue:
3073 3073 return 1
3074 3074
3075 3075 # We can't compare unless attribute present on both.
3076 3076 if avalue is None or bvalue is None:
3077 3077 continue
3078 3078
3079 3079 # Same values should fall back to next attribute.
3080 3080 if avalue == bvalue:
3081 3081 continue
3082 3082
3083 3083 # Exact matches come first.
3084 3084 if avalue == prefvalue:
3085 3085 return -1
3086 3086 if bvalue == prefvalue:
3087 3087 return 1
3088 3088
3089 3089 # Fall back to next attribute.
3090 3090 continue
3091 3091
3092 3092 # If we got here we couldn't sort by attributes and prefers. Fall
3093 3093 # back to index order.
3094 3094 return 0
3095 3095
3096 3096 def __lt__(self, other):
3097 3097 return self._cmp(other) < 0
3098 3098
3099 3099 def __gt__(self, other):
3100 3100 return self._cmp(other) > 0
3101 3101
3102 3102 def __eq__(self, other):
3103 3103 return self._cmp(other) == 0
3104 3104
3105 3105 def __le__(self, other):
3106 3106 return self._cmp(other) <= 0
3107 3107
3108 3108 def __ge__(self, other):
3109 3109 return self._cmp(other) >= 0
3110 3110
3111 3111 def __ne__(self, other):
3112 3112 return self._cmp(other) != 0
3113 3113
3114 3114
3115 3115 def sortclonebundleentries(ui, entries):
3116 3116 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3117 3117 if not prefers:
3118 3118 return list(entries)
3119 3119
3120 3120 def _split(p):
3121 3121 if b'=' not in p:
3122 3122 hint = _(b"each comma separated item should be key=value pairs")
3123 3123 raise error.Abort(
3124 3124 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3125 3125 )
3126 3126 return p.split(b'=', 1)
3127 3127
3128 3128 prefers = [_split(p) for p in prefers]
3129 3129
3130 3130 items = sorted(clonebundleentry(v, prefers) for v in entries)
3131 3131 return [i.value for i in items]
3132 3132
3133 3133
3134 3134 def trypullbundlefromurl(ui, repo, url):
3135 3135 """Attempt to apply a bundle from a URL."""
3136 3136 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3137 3137 try:
3138 3138 fh = urlmod.open(ui, url)
3139 3139 cg = readbundle(ui, fh, b'stream')
3140 3140
3141 3141 if isinstance(cg, streamclone.streamcloneapplier):
3142 3142 cg.apply(repo)
3143 3143 else:
3144 3144 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3145 3145 return True
3146 3146 except urlerr.httperror as e:
3147 3147 ui.warn(
3148 3148 _(b'HTTP error fetching bundle: %s\n')
3149 3149 % stringutil.forcebytestr(e)
3150 3150 )
3151 3151 except urlerr.urlerror as e:
3152 3152 ui.warn(
3153 3153 _(b'error fetching bundle: %s\n')
3154 3154 % stringutil.forcebytestr(e.reason)
3155 3155 )
3156 3156
3157 3157 return False
@@ -1,20 +1,20 b''
1 1 # extension to emulate interrupting filemerge._filemerge
2 2
3 3 from __future__ import absolute_import
4 4
5 5 from mercurial import (
6 6 error,
7 7 extensions,
8 8 filemerge,
9 9 )
10 10
11 11
12 12 def failfilemerge(
13 13 filemergefn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
14 14 ):
15 raise error.Abort("^C")
15 raise error.Abort(b"^C")
16 16 return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels)
17 17
18 18
19 19 def extsetup(ui):
20 20 extensions.wrapfunction(filemerge, '_filemerge', failfilemerge)
@@ -1,1236 +1,1237 b''
1 1 This test is dedicated to test the bundle2 container format
2 2
3 3 It test multiple existing parts to test different feature of the container. You
4 4 probably do not need to touch this test unless you change the binary encoding
5 5 of the bundle2 format itself.
6 6
7 7 Create an extension to test bundle2 API
8 8
9 9 $ cat > bundle2.py << EOF
10 10 > """A small extension to test bundle2 implementation
11 11 >
12 12 > This extension allows detailed testing of the various bundle2 API and
13 13 > behaviors.
14 14 > """
15 15 > import gc
16 16 > import os
17 17 > import sys
18 18 > from mercurial import util
19 19 > from mercurial import bundle2
20 20 > from mercurial import scmutil
21 21 > from mercurial import discovery
22 22 > from mercurial import changegroup
23 23 > from mercurial import error
24 24 > from mercurial import obsolete
25 > from mercurial import pycompat
25 26 > from mercurial import registrar
26 27 > from mercurial.utils import procutil
27 28 >
28 29 >
29 30 > try:
30 31 > import msvcrt
31 32 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
32 33 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
33 34 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
34 35 > except ImportError:
35 36 > pass
36 37 >
37 38 > cmdtable = {}
38 39 > command = registrar.command(cmdtable)
39 40 >
40 41 > ELEPHANTSSONG = b"""Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
41 42 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
42 43 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
43 44 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
44 45 >
45 46 > @bundle2.parthandler(b'test:song')
46 47 > def songhandler(op, part):
47 48 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
48 49 > op.ui.write(b'The choir starts singing:\n')
49 50 > verses = 0
50 51 > for line in part.read().split(b'\n'):
51 52 > op.ui.write(b' %s\n' % line)
52 53 > verses += 1
53 54 > op.records.add(b'song', {b'verses': verses})
54 55 >
55 56 > @bundle2.parthandler(b'test:ping')
56 57 > def pinghandler(op, part):
57 58 > op.ui.write(b'received ping request (id %i)\n' % part.id)
58 59 > if op.reply is not None and b'ping-pong' in op.reply.capabilities:
59 60 > op.ui.write_err(b'replying to ping request (id %i)\n' % part.id)
60 61 > op.reply.newpart(b'test:pong', [(b'in-reply-to', b'%d' % part.id)],
61 62 > mandatory=False)
62 63 >
63 64 > @bundle2.parthandler(b'test:debugreply')
64 65 > def debugreply(op, part):
65 66 > """print data about the capacity of the bundle reply"""
66 67 > if op.reply is None:
67 68 > op.ui.write(b'debugreply: no reply\n')
68 69 > else:
69 70 > op.ui.write(b'debugreply: capabilities:\n')
70 71 > for cap in sorted(op.reply.capabilities):
71 72 > op.ui.write(b"debugreply: '%s'\n" % cap)
72 73 > for val in op.reply.capabilities[cap]:
73 74 > op.ui.write(b"debugreply: '%s'\n" % val)
74 75 >
75 76 > @command(b'bundle2',
76 77 > [(b'', b'param', [], b'stream level parameter'),
77 78 > (b'', b'unknown', False, b'include an unknown mandatory part in the bundle'),
78 79 > (b'', b'unknownparams', False, b'include an unknown part parameters in the bundle'),
79 80 > (b'', b'parts', False, b'include some arbitrary parts to the bundle'),
80 81 > (b'', b'reply', False, b'produce a reply bundle'),
81 82 > (b'', b'pushrace', False, b'includes a check:head part with unknown nodes'),
82 83 > (b'', b'genraise', False, b'includes a part that raise an exception during generation'),
83 84 > (b'', b'timeout', False, b'emulate a timeout during bundle generation'),
84 85 > (b'r', b'rev', [], b'includes those changeset in the bundle'),
85 86 > (b'', b'compress', b'', b'compress the stream'),
86 87 > ],
87 88 > b'[OUTPUTFILE]')
88 89 > def cmdbundle2(ui, repo, path=None, **opts):
89 90 > """write a bundle2 container on standard output"""
90 91 > bundler = bundle2.bundle20(ui)
91 92 > for p in opts['param']:
92 93 > p = p.split(b'=', 1)
93 94 > try:
94 95 > bundler.addparam(*p)
95 96 > except error.ProgrammingError as exc:
96 97 > raise error.Abort(b'%s' % exc)
97 98 >
98 99 > if opts['compress']:
99 100 > bundler.setcompression(opts['compress'])
100 101 >
101 102 > if opts['reply']:
102 103 > capsstring = b'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
103 104 > bundler.newpart(b'replycaps', data=capsstring)
104 105 >
105 106 > if opts['pushrace']:
106 107 > # also serve to test the assignement of data outside of init
107 108 > part = bundler.newpart(b'check:heads')
108 109 > part.data = b'01234567890123456789'
109 110 >
110 111 > revs = opts['rev']
111 112 > if 'rev' in opts:
112 113 > revs = scmutil.revrange(repo, opts['rev'])
113 114 > if revs:
114 115 > # very crude version of a changegroup part creation
115 116 > bundled = repo.revs('%ld::%ld', revs, revs)
116 117 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
117 118 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
118 119 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
119 120 > cg = changegroup.makechangegroup(repo, outgoing, b'01',
120 121 > b'test:bundle2')
121 122 > bundler.newpart(b'changegroup', data=cg.getchunks(),
122 123 > mandatory=False)
123 124 >
124 125 > if opts['parts']:
125 126 > bundler.newpart(b'test:empty', mandatory=False)
126 127 > # add a second one to make sure we handle multiple parts
127 128 > bundler.newpart(b'test:empty', mandatory=False)
128 129 > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
129 130 > bundler.newpart(b'test:debugreply', mandatory=False)
130 131 > mathpart = bundler.newpart(b'test:math')
131 132 > mathpart.addparam(b'pi', b'3.14')
132 133 > mathpart.addparam(b'e', b'2.72')
133 134 > mathpart.addparam(b'cooking', b'raw', mandatory=False)
134 135 > mathpart.data = b'42'
135 136 > mathpart.mandatory = False
136 137 > # advisory known part with unknown mandatory param
137 138 > bundler.newpart(b'test:song', [(b'randomparam', b'')], mandatory=False)
138 139 > if opts['unknown']:
139 140 > bundler.newpart(b'test:unknown', data=b'some random content')
140 141 > if opts['unknownparams']:
141 142 > bundler.newpart(b'test:song', [(b'randomparams', b'')])
142 143 > if opts['parts']:
143 144 > bundler.newpart(b'test:ping', mandatory=False)
144 145 > if opts['genraise']:
145 146 > def genraise():
146 147 > yield b'first line\n'
147 148 > raise RuntimeError('Someone set up us the bomb!')
148 149 > bundler.newpart(b'output', data=genraise(), mandatory=False)
149 150 >
150 151 > if path is None:
151 152 > file = procutil.stdout
152 153 > else:
153 154 > file = open(path, 'wb')
154 155 >
155 156 > if opts['timeout']:
156 157 > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
157 158 > for idx, junk in enumerate(bundler.getchunks()):
158 159 > ui.write(b'%d chunk\n' % idx)
159 160 > if idx > 4:
160 161 > # This throws a GeneratorExit inside the generator, which
161 162 > # can cause problems if the exception-recovery code is
162 163 > # too zealous. It's important for this test that the break
163 164 > # occur while we're in the middle of a part.
164 165 > break
165 166 > gc.collect()
166 167 > ui.write(b'fake timeout complete.\n')
167 168 > return
168 169 > try:
169 170 > for chunk in bundler.getchunks():
170 171 > file.write(chunk)
171 172 > except RuntimeError as exc:
172 > raise error.Abort(exc)
173 > raise error.Abort(pycompat.bytestr(exc))
173 174 > finally:
174 175 > file.flush()
175 176 >
176 177 > @command(b'unbundle2', [], b'')
177 178 > def cmdunbundle2(ui, repo, replypath=None):
178 179 > """process a bundle2 stream from stdin on the current repo"""
179 180 > try:
180 181 > tr = None
181 182 > lock = repo.lock()
182 183 > tr = repo.transaction(b'processbundle')
183 184 > try:
184 185 > unbundler = bundle2.getunbundler(ui, procutil.stdin)
185 186 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
186 187 > tr.close()
187 188 > except error.BundleValueError as exc:
188 189 > raise error.Abort(b'missing support for %s' % exc)
189 190 > except error.PushRaced as exc:
190 191 > raise error.Abort(b'push race: %s' % exc)
191 192 > finally:
192 193 > if tr is not None:
193 194 > tr.release()
194 195 > lock.release()
195 196 > remains = procutil.stdin.read()
196 197 > ui.write(b'%i unread bytes\n' % len(remains))
197 198 > if op.records[b'song']:
198 199 > totalverses = sum(r[b'verses'] for r in op.records[b'song'])
199 200 > ui.write(b'%i total verses sung\n' % totalverses)
200 201 > for rec in op.records[b'changegroup']:
201 202 > ui.write(b'addchangegroup return: %i\n' % rec[b'return'])
202 203 > if op.reply is not None and replypath is not None:
203 204 > with open(replypath, 'wb') as file:
204 205 > for chunk in op.reply.getchunks():
205 206 > file.write(chunk)
206 207 >
207 208 > @command(b'statbundle2', [], b'')
208 209 > def cmdstatbundle2(ui, repo):
209 210 > """print statistic on the bundle2 container read from stdin"""
210 211 > unbundler = bundle2.getunbundler(ui, procutil.stdin)
211 212 > try:
212 213 > params = unbundler.params
213 214 > except error.BundleValueError as exc:
214 215 > raise error.Abort(b'unknown parameters: %s' % exc)
215 216 > ui.write(b'options count: %i\n' % len(params))
216 217 > for key in sorted(params):
217 218 > ui.write(b'- %s\n' % key)
218 219 > value = params[key]
219 220 > if value is not None:
220 221 > ui.write(b' %s\n' % value)
221 222 > count = 0
222 223 > for p in unbundler.iterparts():
223 224 > count += 1
224 225 > ui.write(b' :%s:\n' % p.type)
225 226 > ui.write(b' mandatory: %i\n' % len(p.mandatoryparams))
226 227 > ui.write(b' advisory: %i\n' % len(p.advisoryparams))
227 228 > ui.write(b' payload: %i bytes\n' % len(p.read()))
228 229 > ui.write(b'parts count: %i\n' % count)
229 230 > EOF
230 231 $ cat >> $HGRCPATH << EOF
231 232 > [extensions]
232 233 > bundle2=$TESTTMP/bundle2.py
233 234 > [experimental]
234 235 > evolution.createmarkers=True
235 236 > [ui]
236 237 > ssh="$PYTHON" "$TESTDIR/dummyssh"
237 238 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
238 239 > [web]
239 240 > push_ssl = false
240 241 > allow_push = *
241 242 > [phases]
242 243 > publish=False
243 244 > EOF
244 245
245 246 The extension requires a repo (currently unused)
246 247
247 248 $ hg init main
248 249 $ cd main
249 250 $ touch a
250 251 $ hg add a
251 252 $ hg commit -m 'a'
252 253
253 254
254 255 Empty bundle
255 256 =================
256 257
257 258 - no option
258 259 - no parts
259 260
260 261 Test bundling
261 262
262 263 $ hg bundle2 | f --hexdump
263 264
264 265 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
265 266
266 267 Test timeouts during bundling
267 268 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
268 269 bundle2-output-bundle: "HG20", 1 parts total
269 270 bundle2-output: start emission of HG20 stream
270 271 0 chunk
271 272 bundle2-output: bundle parameter:
272 273 1 chunk
273 274 bundle2-output: start of parts
274 275 bundle2-output: bundle part: "test:song"
275 276 bundle2-output-part: "test:song" (advisory) 178 bytes payload
276 277 bundle2-output: part 0: "test:song"
277 278 bundle2-output: header chunk size: 16
278 279 2 chunk
279 280 3 chunk
280 281 bundle2-output: payload chunk size: 178
281 282 4 chunk
282 283 5 chunk
283 284 bundle2-generatorexit
284 285 fake timeout complete.
285 286
286 287 Test unbundling
287 288
288 289 $ hg bundle2 | hg statbundle2
289 290 options count: 0
290 291 parts count: 0
291 292
292 293 Test old style bundle are detected and refused
293 294
294 295 $ hg bundle --all --type v1 ../bundle.hg
295 296 1 changesets found
296 297 $ hg statbundle2 < ../bundle.hg
297 298 abort: unknown bundle version 10
298 299 [255]
299 300
300 301 Test parameters
301 302 =================
302 303
303 304 - some options
304 305 - no parts
305 306
306 307 advisory parameters, no value
307 308 -------------------------------
308 309
309 310 Simplest possible parameters form
310 311
311 312 Test generation simple option
312 313
313 314 $ hg bundle2 --param 'caution' | f --hexdump
314 315
315 316 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
316 317 0010: 00 00 00 |...|
317 318
318 319 Test unbundling
319 320
320 321 $ hg bundle2 --param 'caution' | hg statbundle2
321 322 options count: 1
322 323 - caution
323 324 parts count: 0
324 325
325 326 Test generation multiple option
326 327
327 328 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
328 329
329 330 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
330 331 0010: 6d 65 61 6c 00 00 00 00 |meal....|
331 332
332 333 Test unbundling
333 334
334 335 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
335 336 options count: 2
336 337 - caution
337 338 - meal
338 339 parts count: 0
339 340
340 341 advisory parameters, with value
341 342 -------------------------------
342 343
343 344 Test generation
344 345
345 346 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
346 347
347 348 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
348 349 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
349 350 0020: 61 6e 74 73 00 00 00 00 |ants....|
350 351
351 352 Test unbundling
352 353
353 354 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
354 355 options count: 3
355 356 - caution
356 357 - elephants
357 358 - meal
358 359 vegan
359 360 parts count: 0
360 361
361 362 parameter with special char in value
362 363 ---------------------------------------------------
363 364
364 365 Test generation
365 366
366 367 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
367 368
368 369 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
369 370 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
370 371 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
371 372 0030: 65 00 00 00 00 |e....|
372 373
373 374 Test unbundling
374 375
375 376 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
376 377 options count: 2
377 378 - e|! 7/
378 379 babar%#==tutu
379 380 - simple
380 381 parts count: 0
381 382
382 383 Test unknown mandatory option
383 384 ---------------------------------------------------
384 385
385 386 $ hg bundle2 --param 'Gravity' | hg statbundle2
386 387 abort: unknown parameters: Stream Parameter - Gravity
387 388 [255]
388 389
389 390 Test debug output
390 391 ---------------------------------------------------
391 392
392 393 bundling debug
393 394
394 395 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
395 396 bundle2-output-bundle: "HG20", (2 params) 0 parts total
396 397 bundle2-output: start emission of HG20 stream
397 398 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
398 399 bundle2-output: start of parts
399 400 bundle2-output: end of bundle
400 401
401 402 file content is ok
402 403
403 404 $ f --hexdump ../out.hg2
404 405 ../out.hg2:
405 406 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
406 407 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
407 408 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
408 409 0030: 65 00 00 00 00 |e....|
409 410
410 411 unbundling debug
411 412
412 413 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
413 414 bundle2-input: start processing of HG20 stream
414 415 bundle2-input: reading bundle2 stream parameters
415 416 bundle2-input: ignoring unknown parameter e|! 7/
416 417 bundle2-input: ignoring unknown parameter simple
417 418 options count: 2
418 419 - e|! 7/
419 420 babar%#==tutu
420 421 - simple
421 422 bundle2-input: start extraction of bundle2 parts
422 423 bundle2-input: part header size: 0
423 424 bundle2-input: end of bundle2 stream
424 425 parts count: 0
425 426
426 427
427 428 Test buggy input
428 429 ---------------------------------------------------
429 430
430 431 empty parameter name
431 432
432 433 $ hg bundle2 --param '' --quiet
433 434 abort: empty parameter name
434 435 [255]
435 436
436 437 bad parameter name
437 438
438 439 $ hg bundle2 --param 42babar
439 440 abort: non letter first character: 42babar
440 441 [255]
441 442
442 443
443 444 Test part
444 445 =================
445 446
446 447 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
447 448 bundle2-output-bundle: "HG20", 7 parts total
448 449 bundle2-output: start emission of HG20 stream
449 450 bundle2-output: bundle parameter:
450 451 bundle2-output: start of parts
451 452 bundle2-output: bundle part: "test:empty"
452 453 bundle2-output-part: "test:empty" (advisory) empty payload
453 454 bundle2-output: part 0: "test:empty"
454 455 bundle2-output: header chunk size: 17
455 456 bundle2-output: closing payload chunk
456 457 bundle2-output: bundle part: "test:empty"
457 458 bundle2-output-part: "test:empty" (advisory) empty payload
458 459 bundle2-output: part 1: "test:empty"
459 460 bundle2-output: header chunk size: 17
460 461 bundle2-output: closing payload chunk
461 462 bundle2-output: bundle part: "test:song"
462 463 bundle2-output-part: "test:song" (advisory) 178 bytes payload
463 464 bundle2-output: part 2: "test:song"
464 465 bundle2-output: header chunk size: 16
465 466 bundle2-output: payload chunk size: 178
466 467 bundle2-output: closing payload chunk
467 468 bundle2-output: bundle part: "test:debugreply"
468 469 bundle2-output-part: "test:debugreply" (advisory) empty payload
469 470 bundle2-output: part 3: "test:debugreply"
470 471 bundle2-output: header chunk size: 22
471 472 bundle2-output: closing payload chunk
472 473 bundle2-output: bundle part: "test:math"
473 474 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
474 475 bundle2-output: part 4: "test:math"
475 476 bundle2-output: header chunk size: 43
476 477 bundle2-output: payload chunk size: 2
477 478 bundle2-output: closing payload chunk
478 479 bundle2-output: bundle part: "test:song"
479 480 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
480 481 bundle2-output: part 5: "test:song"
481 482 bundle2-output: header chunk size: 29
482 483 bundle2-output: closing payload chunk
483 484 bundle2-output: bundle part: "test:ping"
484 485 bundle2-output-part: "test:ping" (advisory) empty payload
485 486 bundle2-output: part 6: "test:ping"
486 487 bundle2-output: header chunk size: 16
487 488 bundle2-output: closing payload chunk
488 489 bundle2-output: end of bundle
489 490
490 491 $ f --hexdump ../parts.hg2
491 492 ../parts.hg2:
492 493 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
493 494 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
494 495 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
495 496 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
496 497 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
497 498 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
498 499 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
499 500 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
500 501 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
501 502 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
502 503 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
503 504 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
504 505 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
505 506 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
506 507 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
507 508 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
508 509 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
509 510 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
510 511 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
511 512 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
512 513 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
513 514 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
514 515 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
515 516 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
516 517 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
517 518 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
518 519
519 520
520 521 $ hg statbundle2 < ../parts.hg2
521 522 options count: 0
522 523 :test:empty:
523 524 mandatory: 0
524 525 advisory: 0
525 526 payload: 0 bytes
526 527 :test:empty:
527 528 mandatory: 0
528 529 advisory: 0
529 530 payload: 0 bytes
530 531 :test:song:
531 532 mandatory: 0
532 533 advisory: 0
533 534 payload: 178 bytes
534 535 :test:debugreply:
535 536 mandatory: 0
536 537 advisory: 0
537 538 payload: 0 bytes
538 539 :test:math:
539 540 mandatory: 2
540 541 advisory: 1
541 542 payload: 2 bytes
542 543 :test:song:
543 544 mandatory: 1
544 545 advisory: 0
545 546 payload: 0 bytes
546 547 :test:ping:
547 548 mandatory: 0
548 549 advisory: 0
549 550 payload: 0 bytes
550 551 parts count: 7
551 552
552 553 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
553 554 bundle2-input: start processing of HG20 stream
554 555 bundle2-input: reading bundle2 stream parameters
555 556 options count: 0
556 557 bundle2-input: start extraction of bundle2 parts
557 558 bundle2-input: part header size: 17
558 559 bundle2-input: part type: "test:empty"
559 560 bundle2-input: part id: "0"
560 561 bundle2-input: part parameters: 0
561 562 :test:empty:
562 563 mandatory: 0
563 564 advisory: 0
564 565 bundle2-input: payload chunk size: 0
565 566 payload: 0 bytes
566 567 bundle2-input: part header size: 17
567 568 bundle2-input: part type: "test:empty"
568 569 bundle2-input: part id: "1"
569 570 bundle2-input: part parameters: 0
570 571 :test:empty:
571 572 mandatory: 0
572 573 advisory: 0
573 574 bundle2-input: payload chunk size: 0
574 575 payload: 0 bytes
575 576 bundle2-input: part header size: 16
576 577 bundle2-input: part type: "test:song"
577 578 bundle2-input: part id: "2"
578 579 bundle2-input: part parameters: 0
579 580 :test:song:
580 581 mandatory: 0
581 582 advisory: 0
582 583 bundle2-input: payload chunk size: 178
583 584 bundle2-input: payload chunk size: 0
584 585 bundle2-input-part: total payload size 178
585 586 payload: 178 bytes
586 587 bundle2-input: part header size: 22
587 588 bundle2-input: part type: "test:debugreply"
588 589 bundle2-input: part id: "3"
589 590 bundle2-input: part parameters: 0
590 591 :test:debugreply:
591 592 mandatory: 0
592 593 advisory: 0
593 594 bundle2-input: payload chunk size: 0
594 595 payload: 0 bytes
595 596 bundle2-input: part header size: 43
596 597 bundle2-input: part type: "test:math"
597 598 bundle2-input: part id: "4"
598 599 bundle2-input: part parameters: 3
599 600 :test:math:
600 601 mandatory: 2
601 602 advisory: 1
602 603 bundle2-input: payload chunk size: 2
603 604 bundle2-input: payload chunk size: 0
604 605 bundle2-input-part: total payload size 2
605 606 payload: 2 bytes
606 607 bundle2-input: part header size: 29
607 608 bundle2-input: part type: "test:song"
608 609 bundle2-input: part id: "5"
609 610 bundle2-input: part parameters: 1
610 611 :test:song:
611 612 mandatory: 1
612 613 advisory: 0
613 614 bundle2-input: payload chunk size: 0
614 615 payload: 0 bytes
615 616 bundle2-input: part header size: 16
616 617 bundle2-input: part type: "test:ping"
617 618 bundle2-input: part id: "6"
618 619 bundle2-input: part parameters: 0
619 620 :test:ping:
620 621 mandatory: 0
621 622 advisory: 0
622 623 bundle2-input: payload chunk size: 0
623 624 payload: 0 bytes
624 625 bundle2-input: part header size: 0
625 626 bundle2-input: end of bundle2 stream
626 627 parts count: 7
627 628
628 629 Test actual unbundling of test part
629 630 =======================================
630 631
631 632 Process the bundle
632 633
633 634 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
634 635 bundle2-input: start processing of HG20 stream
635 636 bundle2-input: reading bundle2 stream parameters
636 637 bundle2-input-bundle: with-transaction
637 638 bundle2-input: start extraction of bundle2 parts
638 639 bundle2-input: part header size: 17
639 640 bundle2-input: part type: "test:empty"
640 641 bundle2-input: part id: "0"
641 642 bundle2-input: part parameters: 0
642 643 bundle2-input: ignoring unsupported advisory part test:empty
643 644 bundle2-input-part: "test:empty" (advisory) unsupported-type
644 645 bundle2-input: payload chunk size: 0
645 646 bundle2-input: part header size: 17
646 647 bundle2-input: part type: "test:empty"
647 648 bundle2-input: part id: "1"
648 649 bundle2-input: part parameters: 0
649 650 bundle2-input: ignoring unsupported advisory part test:empty
650 651 bundle2-input-part: "test:empty" (advisory) unsupported-type
651 652 bundle2-input: payload chunk size: 0
652 653 bundle2-input: part header size: 16
653 654 bundle2-input: part type: "test:song"
654 655 bundle2-input: part id: "2"
655 656 bundle2-input: part parameters: 0
656 657 bundle2-input: found a handler for part test:song
657 658 bundle2-input-part: "test:song" (advisory) supported
658 659 The choir starts singing:
659 660 bundle2-input: payload chunk size: 178
660 661 bundle2-input: payload chunk size: 0
661 662 bundle2-input-part: total payload size 178
662 663 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
663 664 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
664 665 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
665 666 bundle2-input: part header size: 22
666 667 bundle2-input: part type: "test:debugreply"
667 668 bundle2-input: part id: "3"
668 669 bundle2-input: part parameters: 0
669 670 bundle2-input: found a handler for part test:debugreply
670 671 bundle2-input-part: "test:debugreply" (advisory) supported
671 672 debugreply: no reply
672 673 bundle2-input: payload chunk size: 0
673 674 bundle2-input: part header size: 43
674 675 bundle2-input: part type: "test:math"
675 676 bundle2-input: part id: "4"
676 677 bundle2-input: part parameters: 3
677 678 bundle2-input: ignoring unsupported advisory part test:math
678 679 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
679 680 bundle2-input: payload chunk size: 2
680 681 bundle2-input: payload chunk size: 0
681 682 bundle2-input-part: total payload size 2
682 683 bundle2-input: part header size: 29
683 684 bundle2-input: part type: "test:song"
684 685 bundle2-input: part id: "5"
685 686 bundle2-input: part parameters: 1
686 687 bundle2-input: found a handler for part test:song
687 688 bundle2-input: ignoring unsupported advisory part test:song - randomparam
688 689 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (randomparam)
689 690 bundle2-input: payload chunk size: 0
690 691 bundle2-input: part header size: 16
691 692 bundle2-input: part type: "test:ping"
692 693 bundle2-input: part id: "6"
693 694 bundle2-input: part parameters: 0
694 695 bundle2-input: found a handler for part test:ping
695 696 bundle2-input-part: "test:ping" (advisory) supported
696 697 received ping request (id 6)
697 698 bundle2-input: payload chunk size: 0
698 699 bundle2-input: part header size: 0
699 700 bundle2-input: end of bundle2 stream
700 701 bundle2-input-bundle: 7 parts total
701 702 0 unread bytes
702 703 3 total verses sung
703 704
704 705 Unbundle with an unknown mandatory part
705 706 (should abort)
706 707
707 708 $ hg bundle2 --parts --unknown ../unknown.hg2
708 709
709 710 $ hg unbundle2 < ../unknown.hg2
710 711 The choir starts singing:
711 712 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
712 713 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
713 714 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
714 715 debugreply: no reply
715 716 0 unread bytes
716 717 abort: missing support for test:unknown
717 718 [255]
718 719
719 720 Unbundle with an unknown mandatory part parameters
720 721 (should abort)
721 722
722 723 $ hg bundle2 --unknownparams ../unknown.hg2
723 724
724 725 $ hg unbundle2 < ../unknown.hg2
725 726 0 unread bytes
726 727 abort: missing support for test:song - randomparams
727 728 [255]
728 729
729 730 unbundle with a reply
730 731
731 732 $ hg bundle2 --parts --reply ../parts-reply.hg2
732 733 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
733 734 0 unread bytes
734 735 3 total verses sung
735 736
736 737 The reply is a bundle
737 738
738 739 $ f --hexdump ../reply.hg2
739 740 ../reply.hg2:
740 741 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
741 742 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
742 743 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
743 744 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
744 745 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
745 746 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
746 747 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
747 748 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
748 749 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
749 750 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
750 751 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
751 752 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
752 753 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
753 754 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
754 755 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
755 756 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
756 757 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
757 758 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
758 759 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
759 760 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
760 761 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
761 762 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
762 763 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
763 764 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
764 765 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
765 766 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
766 767 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
767 768 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
768 769 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
769 770 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
770 771 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
771 772 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
772 773 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
773 774 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
774 775 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
775 776 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
776 777 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
777 778 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
778 779 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
779 780 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
780 781 0280: 00 00 00 00 00 00 |......|
781 782
782 783 The reply is valid
783 784
784 785 $ hg statbundle2 < ../reply.hg2
785 786 options count: 0
786 787 :output:
787 788 mandatory: 0
788 789 advisory: 1
789 790 payload: 217 bytes
790 791 :output:
791 792 mandatory: 0
792 793 advisory: 1
793 794 payload: 201 bytes
794 795 :test:pong:
795 796 mandatory: 1
796 797 advisory: 0
797 798 payload: 0 bytes
798 799 :output:
799 800 mandatory: 0
800 801 advisory: 1
801 802 payload: 61 bytes
802 803 parts count: 4
803 804
804 805 Unbundle the reply to get the output:
805 806
806 807 $ hg unbundle2 < ../reply.hg2
807 808 remote: The choir starts singing:
808 809 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
809 810 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
810 811 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
811 812 remote: debugreply: capabilities:
812 813 remote: debugreply: 'city=!'
813 814 remote: debugreply: 'celeste,ville'
814 815 remote: debugreply: 'elephants'
815 816 remote: debugreply: 'babar'
816 817 remote: debugreply: 'celeste'
817 818 remote: debugreply: 'ping-pong'
818 819 remote: received ping request (id 7)
819 820 remote: replying to ping request (id 7)
820 821 0 unread bytes
821 822
822 823 Test push race detection
823 824
824 825 $ hg bundle2 --pushrace ../part-race.hg2
825 826
826 827 $ hg unbundle2 < ../part-race.hg2
827 828 0 unread bytes
828 829 abort: push race: remote repository changed while pushing - please try again
829 830 [255]
830 831
831 832 Support for changegroup
832 833 ===================================
833 834
834 835 $ hg unbundle $TESTDIR/bundles/rebase.hg
835 836 adding changesets
836 837 adding manifests
837 838 adding file changes
838 839 added 8 changesets with 7 changes to 7 files (+3 heads)
839 840 new changesets cd010b8cd998:02de42196ebe (8 drafts)
840 841 (run 'hg heads' to see heads, 'hg merge' to merge)
841 842
842 843 $ hg log -G
843 844 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
844 845 |
845 846 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
846 847 |/|
847 848 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
848 849 | |
849 850 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
850 851 |/
851 852 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
852 853 | |
853 854 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
854 855 | |
855 856 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
856 857 |/
857 858 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
858 859
859 860 @ 0:3903775176ed draft test a
860 861
861 862
862 863 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
863 864 4 changesets found
864 865 list of changesets:
865 866 32af7686d403cf45b5d95f2d70cebea587ac806a
866 867 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
867 868 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
868 869 02de42196ebee42ef284b6780a87cdc96e8eaab6
869 870 bundle2-output-bundle: "HG20", 1 parts total
870 871 bundle2-output: start emission of HG20 stream
871 872 bundle2-output: bundle parameter:
872 873 bundle2-output: start of parts
873 874 bundle2-output: bundle part: "changegroup"
874 875 bundle2-output-part: "changegroup" (advisory) streamed payload
875 876 bundle2-output: part 0: "changegroup"
876 877 bundle2-output: header chunk size: 18
877 878 changesets: 1/4 chunks (25.00%)
878 879 changesets: 2/4 chunks (50.00%)
879 880 changesets: 3/4 chunks (75.00%)
880 881 changesets: 4/4 chunks (100.00%)
881 882 manifests: 1/4 chunks (25.00%)
882 883 manifests: 2/4 chunks (50.00%)
883 884 manifests: 3/4 chunks (75.00%)
884 885 manifests: 4/4 chunks (100.00%)
885 886 files: D 1/3 files (33.33%)
886 887 files: E 2/3 files (66.67%)
887 888 files: H 3/3 files (100.00%)
888 889 bundle2-output: payload chunk size: 1555
889 890 bundle2-output: closing payload chunk
890 891 bundle2-output: end of bundle
891 892
892 893 $ f --hexdump ../rev.hg2
893 894 ../rev.hg2:
894 895 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
895 896 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
896 897 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
897 898 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
898 899 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
899 900 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
900 901 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
901 902 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
902 903 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
903 904 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
904 905 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
905 906 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
906 907 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
907 908 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
908 909 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
909 910 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
910 911 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
911 912 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
912 913 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
913 914 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
914 915 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
915 916 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
916 917 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
917 918 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
918 919 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
919 920 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
920 921 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
921 922 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
922 923 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
923 924 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
924 925 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
925 926 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
926 927 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
927 928 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
928 929 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
929 930 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
930 931 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
931 932 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
932 933 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
933 934 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
934 935 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
935 936 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
936 937 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
937 938 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
938 939 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
939 940 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
940 941 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
941 942 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
942 943 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
943 944 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
944 945 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
945 946 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
946 947 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
947 948 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
948 949 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
949 950 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
950 951 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
951 952 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
952 953 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
953 954 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
954 955 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
955 956 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
956 957 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
957 958 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
958 959 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
959 960 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
960 961 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
961 962 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
962 963 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
963 964 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
964 965 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
965 966 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
966 967 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
967 968 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
968 969 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
969 970 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
970 971 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
971 972 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
972 973 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
973 974 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
974 975 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
975 976 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
976 977 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
977 978 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
978 979 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
979 980 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
980 981 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
981 982 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
982 983 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
983 984 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 985 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
985 986 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
986 987 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
987 988 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
988 989 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
989 990 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
990 991 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
991 992 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
992 993 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
993 994 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
994 995
995 996 $ hg debugbundle ../rev.hg2
996 997 Stream params: {}
997 998 changegroup -- {} (mandatory: False)
998 999 32af7686d403cf45b5d95f2d70cebea587ac806a
999 1000 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1000 1001 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1001 1002 02de42196ebee42ef284b6780a87cdc96e8eaab6
1002 1003 $ hg unbundle ../rev.hg2
1003 1004 adding changesets
1004 1005 adding manifests
1005 1006 adding file changes
1006 1007 added 0 changesets with 0 changes to 3 files
1007 1008 (run 'hg update' to get a working copy)
1008 1009
1009 1010 with reply
1010 1011
1011 1012 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1012 1013 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1013 1014 added 0 changesets with 0 changes to 3 files
1014 1015 0 unread bytes
1015 1016 addchangegroup return: 1
1016 1017
1017 1018 $ f --hexdump ../rev-reply.hg2
1018 1019 ../rev-reply.hg2:
1019 1020 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1020 1021 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1021 1022 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1022 1023 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1023 1024 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1024 1025 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1025 1026 0060: 00 37 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.7adding changes|
1026 1027 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1027 1028 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1028 1029 0090: 20 63 68 61 6e 67 65 73 0a 00 00 00 00 00 00 00 | changes........|
1029 1030 00a0: 00 |.|
1030 1031
1031 1032 Check handling of exception during generation.
1032 1033 ----------------------------------------------
1033 1034
1034 1035 $ hg bundle2 --genraise > ../genfailed.hg2
1035 1036 abort: Someone set up us the bomb!
1036 1037 [255]
1037 1038
1038 1039 Should still be a valid bundle
1039 1040
1040 1041 $ f --hexdump ../genfailed.hg2
1041 1042 ../genfailed.hg2:
1042 1043 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1043 1044 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1044 1045 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1045 1046 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1046 1047 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1047 1048 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1048 1049 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1049 1050 0070: 00 |.|
1050 1051
1051 1052 And its handling on the other size raise a clean exception
1052 1053
1053 1054 $ cat ../genfailed.hg2 | hg unbundle2
1054 1055 0 unread bytes
1055 1056 abort: unexpected error: Someone set up us the bomb!
1056 1057 [255]
1057 1058
1058 1059 Test compression
1059 1060 ================
1060 1061
1061 1062 Simple case where it just work: GZ
1062 1063 ----------------------------------
1063 1064
1064 1065 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1065 1066 $ f --hexdump ../rev.hg2.bz
1066 1067 ../rev.hg2.bz:
1067 1068 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1068 1069 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1069 1070 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1070 1071 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1071 1072 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1072 1073 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1073 1074 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1074 1075 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1075 1076 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1076 1077 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1077 1078 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1078 1079 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1079 1080 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1080 1081 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1081 1082 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1082 1083 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1083 1084 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1084 1085 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1085 1086 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1086 1087 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1087 1088 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1088 1089 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1089 1090 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1090 1091 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1091 1092 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1092 1093 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1093 1094 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1094 1095 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1095 1096 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1096 1097 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1097 1098 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1098 1099 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1099 1100 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1100 1101 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1101 1102 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1102 1103 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1103 1104 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1104 1105 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1105 1106 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1106 1107 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1107 1108 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1108 1109 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1109 1110 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1110 1111 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1111 1112 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1112 1113 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1113 1114 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1114 1115 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1115 1116 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1116 1117 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1117 1118 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1118 1119 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1119 1120 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1120 1121 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1121 1122 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1122 1123 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1123 1124 $ hg debugbundle ../rev.hg2.bz
1124 1125 Stream params: {Compression: GZ}
1125 1126 changegroup -- {} (mandatory: False)
1126 1127 32af7686d403cf45b5d95f2d70cebea587ac806a
1127 1128 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1128 1129 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1129 1130 02de42196ebee42ef284b6780a87cdc96e8eaab6
1130 1131 $ hg unbundle ../rev.hg2.bz
1131 1132 adding changesets
1132 1133 adding manifests
1133 1134 adding file changes
1134 1135 added 0 changesets with 0 changes to 3 files
1135 1136 (run 'hg update' to get a working copy)
1136 1137 Simple case where it just work: BZ
1137 1138 ----------------------------------
1138 1139
1139 1140 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1140 1141 $ f --hexdump ../rev.hg2.bz
1141 1142 ../rev.hg2.bz:
1142 1143 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1143 1144 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1144 1145 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1145 1146 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1146 1147 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1147 1148 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1148 1149 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1149 1150 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1150 1151 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1151 1152 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1152 1153 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1153 1154 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1154 1155 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1155 1156 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1156 1157 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1157 1158 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1158 1159 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1159 1160 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1160 1161 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1161 1162 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1162 1163 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1163 1164 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1164 1165 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1165 1166 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1166 1167 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1167 1168 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1168 1169 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1169 1170 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1170 1171 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1171 1172 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1172 1173 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1173 1174 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1174 1175 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1175 1176 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1176 1177 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1177 1178 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1178 1179 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1179 1180 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1180 1181 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1181 1182 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1182 1183 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1183 1184 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1184 1185 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1185 1186 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1186 1187 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1187 1188 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1188 1189 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1189 1190 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1190 1191 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1191 1192 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1192 1193 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1193 1194 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1194 1195 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1195 1196 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1196 1197 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1197 1198 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1198 1199 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1199 1200 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1200 1201 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1201 1202 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1202 1203 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1203 1204 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1204 1205 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1205 1206 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1206 1207 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1207 1208 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1208 1209 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1209 1210 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1210 1211 $ hg debugbundle ../rev.hg2.bz
1211 1212 Stream params: {Compression: BZ}
1212 1213 changegroup -- {} (mandatory: False)
1213 1214 32af7686d403cf45b5d95f2d70cebea587ac806a
1214 1215 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1215 1216 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1216 1217 02de42196ebee42ef284b6780a87cdc96e8eaab6
1217 1218 $ hg unbundle ../rev.hg2.bz
1218 1219 adding changesets
1219 1220 adding manifests
1220 1221 adding file changes
1221 1222 added 0 changesets with 0 changes to 3 files
1222 1223 (run 'hg update' to get a working copy)
1223 1224
1224 1225 unknown compression while unbundling
1225 1226 -----------------------------
1226 1227
1227 1228 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1228 1229 $ cat ../rev.hg2.bz | hg statbundle2
1229 1230 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1230 1231 [255]
1231 1232 $ hg unbundle ../rev.hg2.bz
1232 1233 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1233 1234 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1234 1235 [255]
1235 1236
1236 1237 $ cd ..
@@ -1,94 +1,94 b''
1 1 ------ Test dirstate._dirs refcounting
2 2
3 3 $ hg init t
4 4 $ cd t
5 5 $ mkdir -p a/b/c/d
6 6 $ touch a/b/c/d/x
7 7 $ touch a/b/c/d/y
8 8 $ touch a/b/c/d/z
9 9 $ hg ci -Am m
10 10 adding a/b/c/d/x
11 11 adding a/b/c/d/y
12 12 adding a/b/c/d/z
13 13 $ hg mv a z
14 14 moving a/b/c/d/x to z/b/c/d/x
15 15 moving a/b/c/d/y to z/b/c/d/y
16 16 moving a/b/c/d/z to z/b/c/d/z
17 17
18 18 Test name collisions
19 19
20 20 $ rm z/b/c/d/x
21 21 $ mkdir z/b/c/d/x
22 22 $ touch z/b/c/d/x/y
23 23 $ hg add z/b/c/d/x/y
24 24 abort: file 'z/b/c/d/x' in dirstate clashes with 'z/b/c/d/x/y'
25 25 [255]
26 26 $ rm -rf z/b/c/d
27 27 $ touch z/b/c/d
28 28 $ hg add z/b/c/d
29 29 abort: directory 'z/b/c/d' already in dirstate
30 30 [255]
31 31
32 32 $ cd ..
33 33
34 34 Issue1790: dirstate entry locked into unset if file mtime is set into
35 35 the future
36 36
37 37 Prepare test repo:
38 38
39 39 $ hg init u
40 40 $ cd u
41 41 $ echo a > a
42 42 $ hg add
43 43 adding a
44 44 $ hg ci -m1
45 45
46 46 Set mtime of a into the future:
47 47
48 48 $ touch -t 202101011200 a
49 49
50 50 Status must not set a's entry to unset (issue1790):
51 51
52 52 $ hg status
53 53 $ hg debugstate
54 54 n 644 2 2021-01-01 12:00:00 a
55 55
56 56 Test modulo storage/comparison of absurd dates:
57 57
58 58 #if no-aix
59 59 $ touch -t 195001011200 a
60 60 $ hg st
61 61 $ hg debugstate
62 62 n 644 2 2018-01-19 15:14:08 a
63 63 #endif
64 64
65 65 Verify that exceptions during a dirstate change leave the dirstate
66 66 coherent (issue4353)
67 67
68 68 $ cat > ../dirstateexception.py <<EOF
69 69 > from __future__ import absolute_import
70 70 > from mercurial import (
71 71 > error,
72 72 > extensions,
73 73 > mergestate as mergestatemod,
74 74 > )
75 75 >
76 76 > def wraprecordupdates(*args):
77 > raise error.Abort("simulated error while recording dirstateupdates")
77 > raise error.Abort(b"simulated error while recording dirstateupdates")
78 78 >
79 79 > def reposetup(ui, repo):
80 80 > extensions.wrapfunction(mergestatemod, 'recordupdates',
81 81 > wraprecordupdates)
82 82 > EOF
83 83
84 84 $ hg rm a
85 85 $ hg commit -m 'rm a'
86 86 $ echo "[extensions]" >> .hg/hgrc
87 87 $ echo "dirstateex=../dirstateexception.py" >> .hg/hgrc
88 88 $ hg up 0
89 89 abort: simulated error while recording dirstateupdates
90 90 [255]
91 91 $ hg log -r . -T '{rev}\n'
92 92 1
93 93 $ hg status
94 94 ? a
@@ -1,545 +1,545 b''
1 1 #require repofncache
2 2
3 3 An extension which will set fncache chunksize to 1 byte to make sure that logic
4 4 does not break
5 5
6 6 $ cat > chunksize.py <<EOF
7 7 > from __future__ import absolute_import
8 8 > from mercurial import store
9 9 > store.fncache_chunksize = 1
10 10 > EOF
11 11
12 12 $ cat >> $HGRCPATH <<EOF
13 13 > [extensions]
14 14 > chunksize = $TESTTMP/chunksize.py
15 15 > EOF
16 16
17 17 Init repo1:
18 18
19 19 $ hg init repo1
20 20 $ cd repo1
21 21 $ echo "some text" > a
22 22 $ hg add
23 23 adding a
24 24 $ hg ci -m first
25 25 $ cat .hg/store/fncache | sort
26 26 data/a.i
27 27
28 28 Testing a.i/b:
29 29
30 30 $ mkdir a.i
31 31 $ echo "some other text" > a.i/b
32 32 $ hg add
33 33 adding a.i/b
34 34 $ hg ci -m second
35 35 $ cat .hg/store/fncache | sort
36 36 data/a.i
37 37 data/a.i.hg/b.i
38 38
39 39 Testing a.i.hg/c:
40 40
41 41 $ mkdir a.i.hg
42 42 $ echo "yet another text" > a.i.hg/c
43 43 $ hg add
44 44 adding a.i.hg/c
45 45 $ hg ci -m third
46 46 $ cat .hg/store/fncache | sort
47 47 data/a.i
48 48 data/a.i.hg.hg/c.i
49 49 data/a.i.hg/b.i
50 50
51 51 Testing verify:
52 52
53 53 $ hg verify
54 54 checking changesets
55 55 checking manifests
56 56 crosschecking files in changesets and manifests
57 57 checking files
58 58 checked 3 changesets with 3 changes to 3 files
59 59
60 60 $ rm .hg/store/fncache
61 61
62 62 $ hg verify
63 63 checking changesets
64 64 checking manifests
65 65 crosschecking files in changesets and manifests
66 66 checking files
67 67 warning: revlog 'data/a.i' not in fncache!
68 68 warning: revlog 'data/a.i.hg/c.i' not in fncache!
69 69 warning: revlog 'data/a.i/b.i' not in fncache!
70 70 checked 3 changesets with 3 changes to 3 files
71 71 3 warnings encountered!
72 72 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
73 73
74 74 Follow the hint to make sure it works
75 75
76 76 $ hg debugrebuildfncache
77 77 adding data/a.i
78 78 adding data/a.i.hg/c.i
79 79 adding data/a.i/b.i
80 80 3 items added, 0 removed from fncache
81 81
82 82 $ hg verify
83 83 checking changesets
84 84 checking manifests
85 85 crosschecking files in changesets and manifests
86 86 checking files
87 87 checked 3 changesets with 3 changes to 3 files
88 88
89 89 $ cd ..
90 90
91 91 Non store repo:
92 92
93 93 $ hg --config format.usestore=False init foo
94 94 $ cd foo
95 95 $ mkdir tst.d
96 96 $ echo foo > tst.d/foo
97 97 $ hg ci -Amfoo
98 98 adding tst.d/foo
99 99 $ find .hg | sort
100 100 .hg
101 101 .hg/00changelog.i
102 102 .hg/00manifest.i
103 103 .hg/cache
104 104 .hg/cache/branch2-served
105 105 .hg/cache/rbc-names-v1
106 106 .hg/cache/rbc-revs-v1
107 107 .hg/data
108 108 .hg/data/tst.d.hg
109 109 .hg/data/tst.d.hg/foo.i
110 110 .hg/dirstate
111 111 .hg/fsmonitor.state (fsmonitor !)
112 112 .hg/last-message.txt
113 113 .hg/phaseroots
114 114 .hg/requires
115 115 .hg/undo
116 116 .hg/undo.backup.dirstate
117 117 .hg/undo.backupfiles
118 118 .hg/undo.bookmarks
119 119 .hg/undo.branch
120 120 .hg/undo.desc
121 121 .hg/undo.dirstate
122 122 .hg/undo.phaseroots
123 123 .hg/wcache
124 124 .hg/wcache/checkisexec (execbit !)
125 125 .hg/wcache/checklink (symlink !)
126 126 .hg/wcache/checklink-target (symlink !)
127 127 .hg/wcache/manifestfulltextcache (reporevlogstore !)
128 128 $ cd ..
129 129
130 130 Non fncache repo:
131 131
132 132 $ hg --config format.usefncache=False init bar
133 133 $ cd bar
134 134 $ mkdir tst.d
135 135 $ echo foo > tst.d/Foo
136 136 $ hg ci -Amfoo
137 137 adding tst.d/Foo
138 138 $ find .hg | sort
139 139 .hg
140 140 .hg/00changelog.i
141 141 .hg/cache
142 142 .hg/cache/branch2-served
143 143 .hg/cache/rbc-names-v1
144 144 .hg/cache/rbc-revs-v1
145 145 .hg/dirstate
146 146 .hg/fsmonitor.state (fsmonitor !)
147 147 .hg/last-message.txt
148 148 .hg/requires
149 149 .hg/store
150 150 .hg/store/00changelog.i
151 151 .hg/store/00manifest.i
152 152 .hg/store/data
153 153 .hg/store/data/tst.d.hg
154 154 .hg/store/data/tst.d.hg/_foo.i
155 155 .hg/store/phaseroots
156 156 .hg/store/undo
157 157 .hg/store/undo.backupfiles
158 158 .hg/store/undo.phaseroots
159 159 .hg/undo.backup.dirstate
160 160 .hg/undo.bookmarks
161 161 .hg/undo.branch
162 162 .hg/undo.desc
163 163 .hg/undo.dirstate
164 164 .hg/wcache
165 165 .hg/wcache/checkisexec (execbit !)
166 166 .hg/wcache/checklink (symlink !)
167 167 .hg/wcache/checklink-target (symlink !)
168 168 .hg/wcache/manifestfulltextcache (reporevlogstore !)
169 169 $ cd ..
170 170
171 171 Encoding of reserved / long paths in the store
172 172
173 173 $ hg init r2
174 174 $ cd r2
175 175 $ cat <<EOF > .hg/hgrc
176 176 > [ui]
177 177 > portablefilenames = ignore
178 178 > EOF
179 179
180 180 $ hg import -q --bypass - <<EOF
181 181 > # HG changeset patch
182 182 > # User test
183 183 > # Date 0 0
184 184 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
185 185 > # Parent 0000000000000000000000000000000000000000
186 186 > 1
187 187 >
188 188 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
189 189 > new file mode 100644
190 190 > --- /dev/null
191 191 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
192 192 > @@ -0,0 +1,1 @@
193 193 > +foo
194 194 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
195 195 > new file mode 100644
196 196 > --- /dev/null
197 197 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
198 198 > @@ -0,0 +1,1 @@
199 199 > +foo
200 200 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
201 201 > new file mode 100644
202 202 > --- /dev/null
203 203 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
204 204 > @@ -0,0 +1,1 @@
205 205 > +foo
206 206 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
207 207 > new file mode 100644
208 208 > --- /dev/null
209 209 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
210 210 > @@ -0,0 +1,1 @@
211 211 > +foo
212 212 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
213 213 > new file mode 100644
214 214 > --- /dev/null
215 215 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
216 216 > @@ -0,0 +1,1 @@
217 217 > +foo
218 218 > EOF
219 219
220 220 $ find .hg/store -name *.i | sort
221 221 .hg/store/00changelog.i
222 222 .hg/store/00manifest.i
223 223 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
224 224 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
225 225 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
226 226 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
227 227 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
228 228
229 229 $ cd ..
230 230
231 231 Aborting lock does not prevent fncache writes
232 232
233 233 $ cat > exceptionext.py <<EOF
234 234 > from __future__ import absolute_import
235 235 > import os
236 236 > from mercurial import commands, error, extensions
237 237 >
238 238 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
239 239 > def releasewrap():
240 240 > l.held = False # ensure __del__ is a noop
241 > raise error.Abort("forced lock failure")
241 > raise error.Abort(b"forced lock failure")
242 242 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
243 243 > return l
244 244 >
245 245 > def reposetup(ui, repo):
246 246 > extensions.wrapfunction(repo, '_lock', lockexception)
247 247 >
248 248 > cmdtable = {}
249 249 >
250 250 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
251 251 > # at the end of dispatching (for intentional "forced lcok failure")
252 252 > def commitwrap(orig, ui, repo, *pats, **opts):
253 253 > repo = repo.unfiltered() # to use replaced repo._lock certainly
254 254 > wlock = repo.wlock()
255 255 > try:
256 256 > return orig(ui, repo, *pats, **opts)
257 257 > finally:
258 258 > # multiple 'relase()' is needed for complete releasing wlock,
259 259 > # because "forced" abort at last releasing store lock
260 260 > # prevents wlock from being released at same 'lockmod.release()'
261 261 > for i in range(wlock.held):
262 262 > wlock.release()
263 263 >
264 264 > def extsetup(ui):
265 265 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
266 266 > EOF
267 267 $ extpath=`pwd`/exceptionext.py
268 268 $ hg init fncachetxn
269 269 $ cd fncachetxn
270 270 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
271 271 $ touch y
272 272 $ hg ci -qAm y
273 273 abort: forced lock failure
274 274 [255]
275 275 $ cat .hg/store/fncache
276 276 data/y.i
277 277
278 278 Aborting transaction prevents fncache change
279 279
280 280 $ cat > ../exceptionext.py <<EOF
281 281 > from __future__ import absolute_import
282 282 > import os
283 283 > from mercurial import commands, error, extensions, localrepo
284 284 >
285 285 > def wrapper(orig, self, *args, **kwargs):
286 286 > tr = orig(self, *args, **kwargs)
287 287 > def fail(tr):
288 288 > raise error.Abort(b"forced transaction failure")
289 289 > # zzz prefix to ensure it sorted after store.write
290 290 > tr.addfinalize(b'zzz-forcefails', fail)
291 291 > return tr
292 292 >
293 293 > def uisetup(ui):
294 294 > extensions.wrapfunction(
295 295 > localrepo.localrepository, b'transaction', wrapper)
296 296 >
297 297 > cmdtable = {}
298 298 >
299 299 > EOF
300 300
301 301 Clean cached version
302 302 $ rm -f "${extpath}c"
303 303 $ rm -Rf "`dirname $extpath`/__pycache__"
304 304
305 305 $ touch z
306 306 $ hg ci -qAm z
307 307 transaction abort!
308 308 rollback completed
309 309 abort: forced transaction failure
310 310 [255]
311 311 $ cat .hg/store/fncache
312 312 data/y.i
313 313
314 314 Aborted transactions can be recovered later
315 315
316 316 $ cat > ../exceptionext.py <<EOF
317 317 > from __future__ import absolute_import
318 318 > import os
319 319 > from mercurial import (
320 320 > commands,
321 321 > error,
322 322 > extensions,
323 323 > localrepo,
324 324 > transaction,
325 325 > )
326 326 >
327 327 > def trwrapper(orig, self, *args, **kwargs):
328 328 > tr = orig(self, *args, **kwargs)
329 329 > def fail(tr):
330 330 > raise error.Abort(b"forced transaction failure")
331 331 > # zzz prefix to ensure it sorted after store.write
332 332 > tr.addfinalize(b'zzz-forcefails', fail)
333 333 > return tr
334 334 >
335 335 > def abortwrapper(orig, self, *args, **kwargs):
336 336 > raise error.Abort(b"forced transaction failure")
337 337 >
338 338 > def uisetup(ui):
339 339 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
340 340 > trwrapper)
341 341 > extensions.wrapfunction(transaction.transaction, '_abort',
342 342 > abortwrapper)
343 343 >
344 344 > cmdtable = {}
345 345 >
346 346 > EOF
347 347
348 348 Clean cached versions
349 349 $ rm -f "${extpath}c"
350 350 $ rm -Rf "`dirname $extpath`/__pycache__"
351 351
352 352 $ hg up -q 1
353 353 $ touch z
354 354 $ hg ci -qAm z 2>/dev/null
355 355 [255]
356 356 $ cat .hg/store/fncache | sort
357 357 data/y.i
358 358 data/z.i
359 359 $ hg recover --verify
360 360 rolling back interrupted transaction
361 361 checking changesets
362 362 checking manifests
363 363 crosschecking files in changesets and manifests
364 364 checking files
365 365 checked 1 changesets with 1 changes to 1 files
366 366 $ cat .hg/store/fncache
367 367 data/y.i
368 368
369 369 $ cd ..
370 370
371 371 debugrebuildfncache does nothing unless repo has fncache requirement
372 372
373 373 $ hg --config format.usefncache=false init nofncache
374 374 $ cd nofncache
375 375 $ hg debugrebuildfncache
376 376 (not rebuilding fncache because repository does not support fncache)
377 377
378 378 $ cd ..
379 379
380 380 debugrebuildfncache works on empty repository
381 381
382 382 $ hg init empty
383 383 $ cd empty
384 384 $ hg debugrebuildfncache
385 385 fncache already up to date
386 386 $ cd ..
387 387
388 388 debugrebuildfncache on an up to date repository no-ops
389 389
390 390 $ hg init repo
391 391 $ cd repo
392 392 $ echo initial > foo
393 393 $ echo initial > .bar
394 394 $ hg commit -A -m initial
395 395 adding .bar
396 396 adding foo
397 397
398 398 $ cat .hg/store/fncache | sort
399 399 data/.bar.i
400 400 data/foo.i
401 401
402 402 $ hg debugrebuildfncache
403 403 fncache already up to date
404 404
405 405 debugrebuildfncache restores deleted fncache file
406 406
407 407 $ rm -f .hg/store/fncache
408 408 $ hg debugrebuildfncache
409 409 adding data/.bar.i
410 410 adding data/foo.i
411 411 2 items added, 0 removed from fncache
412 412
413 413 $ cat .hg/store/fncache | sort
414 414 data/.bar.i
415 415 data/foo.i
416 416
417 417 Rebuild after rebuild should no-op
418 418
419 419 $ hg debugrebuildfncache
420 420 fncache already up to date
421 421
422 422 A single missing file should get restored, an extra file should be removed
423 423
424 424 $ cat > .hg/store/fncache << EOF
425 425 > data/foo.i
426 426 > data/bad-entry.i
427 427 > EOF
428 428
429 429 $ hg debugrebuildfncache
430 430 removing data/bad-entry.i
431 431 adding data/.bar.i
432 432 1 items added, 1 removed from fncache
433 433
434 434 $ cat .hg/store/fncache | sort
435 435 data/.bar.i
436 436 data/foo.i
437 437
438 438 debugrebuildfncache recovers from truncated line in fncache
439 439
440 440 $ printf a > .hg/store/fncache
441 441 $ hg debugrebuildfncache
442 442 fncache does not ends with a newline
443 443 adding data/.bar.i
444 444 adding data/foo.i
445 445 2 items added, 0 removed from fncache
446 446
447 447 $ cat .hg/store/fncache | sort
448 448 data/.bar.i
449 449 data/foo.i
450 450
451 451 $ cd ..
452 452
453 453 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
454 454
455 455 $ hg --config format.dotencode=false init nodotencode
456 456 $ cd nodotencode
457 457 $ echo initial > foo
458 458 $ echo initial > .bar
459 459 $ hg commit -A -m initial
460 460 adding .bar
461 461 adding foo
462 462
463 463 $ cat .hg/store/fncache | sort
464 464 data/.bar.i
465 465 data/foo.i
466 466
467 467 $ rm .hg/store/fncache
468 468 $ hg debugrebuildfncache
469 469 adding data/.bar.i
470 470 adding data/foo.i
471 471 2 items added, 0 removed from fncache
472 472
473 473 $ cat .hg/store/fncache | sort
474 474 data/.bar.i
475 475 data/foo.i
476 476
477 477 $ cd ..
478 478
479 479 In repositories that have accumulated a large number of files over time, the
480 480 fncache file is going to be large. If we possibly can avoid loading it, so much the better.
481 481 The cache should not loaded when committing changes to existing files, or when unbundling
482 482 changesets that only contain changes to existing files:
483 483
484 484 $ cat > fncacheloadwarn.py << EOF
485 485 > from __future__ import absolute_import
486 486 > from mercurial import extensions, localrepo
487 487 >
488 488 > def extsetup(ui):
489 489 > def wrapstore(orig, requirements, *args):
490 490 > store = orig(requirements, *args)
491 491 > if b'store' in requirements and b'fncache' in requirements:
492 492 > instrumentfncachestore(store, ui)
493 493 > return store
494 494 > extensions.wrapfunction(localrepo, 'makestore', wrapstore)
495 495 >
496 496 > def instrumentfncachestore(fncachestore, ui):
497 497 > class instrumentedfncache(type(fncachestore.fncache)):
498 498 > def _load(self):
499 499 > ui.warn(b'fncache load triggered!\n')
500 500 > super(instrumentedfncache, self)._load()
501 501 > fncachestore.fncache.__class__ = instrumentedfncache
502 502 > EOF
503 503
504 504 $ fncachextpath=`pwd`/fncacheloadwarn.py
505 505 $ hg init nofncacheload
506 506 $ cd nofncacheload
507 507 $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc
508 508
509 509 A new file should trigger a load, as we'd want to update the fncache set in that case:
510 510
511 511 $ touch foo
512 512 $ hg ci -qAm foo
513 513 fncache load triggered!
514 514
515 515 But modifying that file should not:
516 516
517 517 $ echo bar >> foo
518 518 $ hg ci -qm foo
519 519
520 520 If a transaction has been aborted, the zero-size truncated index file will
521 521 not prevent the fncache from being loaded; rather than actually abort
522 522 a transaction, we simulate the situation by creating a zero-size index file:
523 523
524 524 $ touch .hg/store/data/bar.i
525 525 $ touch bar
526 526 $ hg ci -qAm bar
527 527 fncache load triggered!
528 528
529 529 Unbundling should follow the same rules; existing files should not cause a load:
530 530
531 531 $ hg clone -q . tobundle
532 532 $ echo 'new line' > tobundle/bar
533 533 $ hg -R tobundle ci -qm bar
534 534 $ hg -R tobundle bundle -q barupdated.hg
535 535 $ hg unbundle -q barupdated.hg
536 536
537 537 but adding new files should:
538 538
539 539 $ touch tobundle/newfile
540 540 $ hg -R tobundle ci -qAm newfile
541 541 $ hg -R tobundle bundle -q newfile.hg
542 542 $ hg unbundle -q newfile.hg
543 543 fncache load triggered!
544 544
545 545 $ cd ..
@@ -1,556 +1,556 b''
1 1 $ . "$TESTDIR/histedit-helpers.sh"
2 2
3 3 $ cat >> $HGRCPATH <<EOF
4 4 > [extensions]
5 5 > histedit=
6 6 > strip=
7 7 > mockmakedate = $TESTDIR/mockmakedate.py
8 8 > EOF
9 9
10 10 $ initrepo ()
11 11 > {
12 12 > hg init r
13 13 > cd r
14 14 > for x in a b c d e f g; do
15 15 > echo $x > $x
16 16 > hg add $x
17 17 > hg ci -m $x
18 18 > done
19 19 > }
20 20
21 21 $ initrepo
22 22
23 23 log before edit
24 24 $ hg log --graph
25 25 @ changeset: 6:3c6a8ed2ebe8
26 26 | tag: tip
27 27 | user: test
28 28 | date: Thu Jan 01 00:00:00 1970 +0000
29 29 | summary: g
30 30 |
31 31 o changeset: 5:652413bf663e
32 32 | user: test
33 33 | date: Thu Jan 01 00:00:00 1970 +0000
34 34 | summary: f
35 35 |
36 36 o changeset: 4:e860deea161a
37 37 | user: test
38 38 | date: Thu Jan 01 00:00:00 1970 +0000
39 39 | summary: e
40 40 |
41 41 o changeset: 3:055a42cdd887
42 42 | user: test
43 43 | date: Thu Jan 01 00:00:00 1970 +0000
44 44 | summary: d
45 45 |
46 46 o changeset: 2:177f92b77385
47 47 | user: test
48 48 | date: Thu Jan 01 00:00:00 1970 +0000
49 49 | summary: c
50 50 |
51 51 o changeset: 1:d2ae7f538514
52 52 | user: test
53 53 | date: Thu Jan 01 00:00:00 1970 +0000
54 54 | summary: b
55 55 |
56 56 o changeset: 0:cb9a9f314b8b
57 57 user: test
58 58 date: Thu Jan 01 00:00:00 1970 +0000
59 59 summary: a
60 60
61 61 dirty a file
62 62 $ echo a > g
63 63 $ hg histedit 177f92b77385 --commands - 2>&1 << EOF
64 64 > EOF
65 65 abort: uncommitted changes
66 66 [255]
67 67 $ echo g > g
68 68
69 69 edit the history
70 70 $ hg histedit 177f92b77385 --commands - 2>&1 << EOF| fixbundle
71 71 > pick 177f92b77385 c
72 72 > pick 055a42cdd887 d
73 73 > edit e860deea161a e
74 74 > pick 652413bf663e f
75 75 > pick 3c6a8ed2ebe8 g
76 76 > EOF
77 77 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
78 78 Editing (e860deea161a), you may commit or record as needed now.
79 79 (hg histedit --continue to resume)
80 80
81 81 try to update and get an error
82 82 $ hg update tip
83 83 abort: histedit in progress
84 84 (use 'hg histedit --continue' or 'hg histedit --abort')
85 85 [255]
86 86
87 87 edit the plan via the editor
88 88 $ cat >> $TESTTMP/editplan.sh <<EOF
89 89 > cat > \$1 <<EOF2
90 90 > drop e860deea161a e
91 91 > drop 652413bf663e f
92 92 > drop 3c6a8ed2ebe8 g
93 93 > EOF2
94 94 > EOF
95 95 $ HGEDITOR="sh $TESTTMP/editplan.sh" hg histedit --edit-plan
96 96 $ cat .hg/histedit-state
97 97 v1
98 98 055a42cdd88768532f9cf79daa407fc8d138de9b
99 99 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
100 100 False
101 101 3
102 102 drop
103 103 e860deea161a2f77de56603b340ebbb4536308ae
104 104 drop
105 105 652413bf663ef2a641cab26574e46d5f5a64a55a
106 106 drop
107 107 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
108 108 0
109 109 strip-backup/177f92b77385-0ebe6a8f-histedit.hg
110 110
111 111 edit the plan via --commands
112 112 $ hg histedit --edit-plan --commands - 2>&1 << EOF
113 113 > edit e860deea161a e
114 114 > pick 652413bf663e f
115 115 > drop 3c6a8ed2ebe8 g
116 116 > EOF
117 117 $ cat .hg/histedit-state
118 118 v1
119 119 055a42cdd88768532f9cf79daa407fc8d138de9b
120 120 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
121 121 False
122 122 3
123 123 edit
124 124 e860deea161a2f77de56603b340ebbb4536308ae
125 125 pick
126 126 652413bf663ef2a641cab26574e46d5f5a64a55a
127 127 drop
128 128 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
129 129 0
130 130 strip-backup/177f92b77385-0ebe6a8f-histedit.hg
131 131
132 132 Go at a random point and try to continue
133 133
134 134 $ hg id -n
135 135 3+
136 136 $ hg up 0
137 137 abort: histedit in progress
138 138 (use 'hg histedit --continue' or 'hg histedit --abort')
139 139 [255]
140 140
141 141 Try to delete necessary commit
142 142 $ hg strip -r 652413b
143 143 abort: histedit in progress, can't strip 652413bf663e
144 144 [255]
145 145
146 146 commit, then edit the revision
147 147 $ hg ci -m 'wat'
148 148 created new head
149 149 $ echo a > e
150 150
151 151 qnew should fail while we're in the middle of the edit step
152 152
153 153 $ hg --config extensions.mq= qnew please-fail
154 154 abort: histedit in progress
155 155 (use 'hg histedit --continue' or 'hg histedit --abort')
156 156 [255]
157 157 $ HGEDITOR='echo foobaz > ' hg histedit --continue 2>&1 | fixbundle
158 158
159 159 $ hg log --graph
160 160 @ changeset: 6:b5f70786f9b0
161 161 | tag: tip
162 162 | user: test
163 163 | date: Thu Jan 01 00:00:00 1970 +0000
164 164 | summary: f
165 165 |
166 166 o changeset: 5:a5e1ba2f7afb
167 167 | user: test
168 168 | date: Thu Jan 01 00:00:00 1970 +0000
169 169 | summary: foobaz
170 170 |
171 171 o changeset: 4:1a60820cd1f6
172 172 | user: test
173 173 | date: Thu Jan 01 00:00:00 1970 +0000
174 174 | summary: wat
175 175 |
176 176 o changeset: 3:055a42cdd887
177 177 | user: test
178 178 | date: Thu Jan 01 00:00:00 1970 +0000
179 179 | summary: d
180 180 |
181 181 o changeset: 2:177f92b77385
182 182 | user: test
183 183 | date: Thu Jan 01 00:00:00 1970 +0000
184 184 | summary: c
185 185 |
186 186 o changeset: 1:d2ae7f538514
187 187 | user: test
188 188 | date: Thu Jan 01 00:00:00 1970 +0000
189 189 | summary: b
190 190 |
191 191 o changeset: 0:cb9a9f314b8b
192 192 user: test
193 193 date: Thu Jan 01 00:00:00 1970 +0000
194 194 summary: a
195 195
196 196
197 197 $ hg cat e
198 198 a
199 199
200 200 Stripping necessary commits should not break --abort
201 201
202 202 $ hg histedit 1a60820cd1f6 --commands - 2>&1 << EOF| fixbundle
203 203 > edit 1a60820cd1f6 wat
204 204 > pick a5e1ba2f7afb foobaz
205 205 > pick b5f70786f9b0 g
206 206 > EOF
207 207 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
208 208 Editing (1a60820cd1f6), you may commit or record as needed now.
209 209 (hg histedit --continue to resume)
210 210
211 211 $ mv .hg/histedit-state .hg/histedit-state.bak
212 212 $ hg strip -q -r b5f70786f9b0
213 213 $ mv .hg/histedit-state.bak .hg/histedit-state
214 214 $ hg histedit --abort
215 215 adding changesets
216 216 adding manifests
217 217 adding file changes
218 218 added 1 changesets with 1 changes to 3 files
219 219 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 220 $ hg log -r .
221 221 changeset: 6:b5f70786f9b0
222 222 tag: tip
223 223 user: test
224 224 date: Thu Jan 01 00:00:00 1970 +0000
225 225 summary: f
226 226
227 227
228 228 check histedit_source
229 229
230 230 $ hg log --debug --rev 5
231 231 changeset: 5:a5e1ba2f7afb899ef1581cea528fd885d2fca70d
232 232 phase: draft
233 233 parent: 4:1a60820cd1f6004a362aa622ebc47d59bc48eb34
234 234 parent: -1:0000000000000000000000000000000000000000
235 235 manifest: 5:5ad3be8791f39117565557781f5464363b918a45
236 236 user: test
237 237 date: Thu Jan 01 00:00:00 1970 +0000
238 238 files: e
239 239 extra: branch=default
240 240 extra: histedit_source=e860deea161a2f77de56603b340ebbb4536308ae
241 241 description:
242 242 foobaz
243 243
244 244
245 245
246 246 $ hg histedit tip --commands - 2>&1 <<EOF| fixbundle
247 247 > edit b5f70786f9b0 f
248 248 > EOF
249 249 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
250 250 Editing (b5f70786f9b0), you may commit or record as needed now.
251 251 (hg histedit --continue to resume)
252 252 $ hg status
253 253 A f
254 254
255 255 $ hg summary
256 256 parent: 5:a5e1ba2f7afb
257 257 foobaz
258 258 branch: default
259 259 commit: 1 added (new branch head)
260 260 update: 1 new changesets (update)
261 261 phases: 7 draft
262 262 hist: 1 remaining (histedit --continue)
263 263
264 264 (test also that editor is invoked if histedit is continued for
265 265 "edit" action)
266 266
267 267 $ HGEDITOR='cat' hg histedit --continue
268 268 f
269 269
270 270
271 271 HG: Enter commit message. Lines beginning with 'HG:' are removed.
272 272 HG: Leave message empty to abort commit.
273 273 HG: --
274 274 HG: user: test
275 275 HG: branch 'default'
276 276 HG: added f
277 277 saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-histedit.hg
278 278
279 279 $ hg status
280 280
281 281 log after edit
282 282 $ hg log --limit 1
283 283 changeset: 6:a107ee126658
284 284 tag: tip
285 285 user: test
286 286 date: Thu Jan 01 00:00:00 1970 +0000
287 287 summary: f
288 288
289 289
290 290 say we'll change the message, but don't.
291 291 $ cat > ../edit.sh <<EOF
292 292 > cat "\$1" | sed s/pick/mess/ > tmp
293 293 > mv tmp "\$1"
294 294 > EOF
295 295 $ HGEDITOR="sh ../edit.sh" hg histedit tip 2>&1 | fixbundle
296 296 $ hg status
297 297 $ hg log --limit 1
298 298 changeset: 6:1fd3b2fe7754
299 299 tag: tip
300 300 user: test
301 301 date: Thu Jan 01 00:00:00 1970 +0000
302 302 summary: f
303 303
304 304
305 305 modify the message
306 306
307 307 check saving last-message.txt, at first
308 308
309 309 $ cat > $TESTTMP/commitfailure.py <<EOF
310 310 > from mercurial import error
311 311 > def reposetup(ui, repo):
312 312 > class commitfailure(repo.__class__):
313 313 > def commit(self, *args, **kwargs):
314 > raise error.Abort('emulating unexpected abort')
314 > raise error.Abort(b'emulating unexpected abort')
315 315 > repo.__class__ = commitfailure
316 316 > EOF
317 317 $ cat >> .hg/hgrc <<EOF
318 318 > [extensions]
319 319 > # this failure occurs before editor invocation
320 320 > commitfailure = $TESTTMP/commitfailure.py
321 321 > EOF
322 322
323 323 $ cat > $TESTTMP/editor.sh <<EOF
324 324 > echo "==== before editing"
325 325 > cat \$1
326 326 > echo "===="
327 327 > echo "check saving last-message.txt" >> \$1
328 328 > EOF
329 329
330 330 (test that editor is not invoked before transaction starting)
331 331
332 332 $ rm -f .hg/last-message.txt
333 333 $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF | fixbundle
334 334 > mess 1fd3b2fe7754 f
335 335 > EOF
336 336 abort: emulating unexpected abort
337 337 $ test -f .hg/last-message.txt
338 338 [1]
339 339
340 340 $ cat >> .hg/hgrc <<EOF
341 341 > [extensions]
342 342 > commitfailure = !
343 343 > EOF
344 344 $ hg histedit --abort -q
345 345
346 346 (test that editor is invoked and commit message is saved into
347 347 "last-message.txt")
348 348
349 349 $ cat >> .hg/hgrc <<EOF
350 350 > [hooks]
351 351 > # this failure occurs after editor invocation
352 352 > pretxncommit.unexpectedabort = false
353 353 > EOF
354 354
355 355 $ hg status --rev '1fd3b2fe7754^1' --rev 1fd3b2fe7754
356 356 A f
357 357
358 358 $ rm -f .hg/last-message.txt
359 359 $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF
360 360 > mess 1fd3b2fe7754 f
361 361 > EOF
362 362 ==== before editing
363 363 f
364 364
365 365
366 366 HG: Enter commit message. Lines beginning with 'HG:' are removed.
367 367 HG: Leave message empty to abort commit.
368 368 HG: --
369 369 HG: user: test
370 370 HG: branch 'default'
371 371 HG: added f
372 372 ====
373 373 transaction abort!
374 374 rollback completed
375 375 note: commit message saved in .hg/last-message.txt
376 376 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
377 377 abort: pretxncommit.unexpectedabort hook exited with status 1
378 378 [255]
379 379 $ cat .hg/last-message.txt
380 380 f
381 381
382 382
383 383 check saving last-message.txt
384 384
385 385 (test also that editor is invoked if histedit is continued for "message"
386 386 action)
387 387
388 388 $ HGEDITOR=cat hg histedit --continue
389 389 f
390 390
391 391
392 392 HG: Enter commit message. Lines beginning with 'HG:' are removed.
393 393 HG: Leave message empty to abort commit.
394 394 HG: --
395 395 HG: user: test
396 396 HG: branch 'default'
397 397 HG: added f
398 398 transaction abort!
399 399 rollback completed
400 400 note: commit message saved in .hg/last-message.txt
401 401 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
402 402 abort: pretxncommit.unexpectedabort hook exited with status 1
403 403 [255]
404 404
405 405 $ cat >> .hg/hgrc <<EOF
406 406 > [hooks]
407 407 > pretxncommit.unexpectedabort =
408 408 > EOF
409 409 $ hg histedit --abort -q
410 410
411 411 then, check "modify the message" itself
412 412
413 413 $ hg histedit tip --commands - 2>&1 << EOF | fixbundle
414 414 > mess 1fd3b2fe7754 f
415 415 > EOF
416 416 $ hg status
417 417 $ hg log --limit 1
418 418 changeset: 6:62feedb1200e
419 419 tag: tip
420 420 user: test
421 421 date: Thu Jan 01 00:00:00 1970 +0000
422 422 summary: f
423 423
424 424
425 425 rollback should not work after a histedit
426 426 $ hg rollback
427 427 no rollback information available
428 428 [1]
429 429
430 430 $ cd ..
431 431 $ hg clone -qr0 r r0
432 432 $ cd r0
433 433 $ hg phase -fdr0
434 434 $ hg histedit --commands - 0 2>&1 << EOF
435 435 > edit cb9a9f314b8b a > $EDITED
436 436 > EOF
437 437 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
438 438 Editing (cb9a9f314b8b), you may commit or record as needed now.
439 439 (hg histedit --continue to resume)
440 440 [1]
441 441 $ HGEDITOR=true hg histedit --continue
442 442 saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg
443 443
444 444 $ hg log -G
445 445 @ changeset: 0:0efcea34f18a
446 446 tag: tip
447 447 user: test
448 448 date: Thu Jan 01 00:00:00 1970 +0000
449 449 summary: a
450 450
451 451 $ echo foo >> b
452 452 $ hg addr
453 453 adding b
454 454 $ hg ci -m 'add b'
455 455 $ echo foo >> a
456 456 $ hg ci -m 'extend a'
457 457 $ hg phase --public 1
458 458 Attempting to fold a change into a public change should not work:
459 459 $ cat > ../edit.sh <<EOF
460 460 > cat "\$1" | sed s/pick/fold/ > tmp
461 461 > mv tmp "\$1"
462 462 > EOF
463 463 $ HGEDITOR="sh ../edit.sh" hg histedit 2
464 464 warning: histedit rules saved to: .hg/histedit-last-edit.txt
465 465 hg: parse error: first changeset cannot use verb "fold"
466 466 [255]
467 467 $ cat .hg/histedit-last-edit.txt
468 468 fold 0012be4a27ea 2 extend a
469 469
470 470 # Edit history between 0012be4a27ea and 0012be4a27ea
471 471 #
472 472 # Commits are listed from least to most recent
473 473 #
474 474 # You can reorder changesets by reordering the lines
475 475 #
476 476 # Commands:
477 477 #
478 478 # e, edit = use commit, but stop for amending
479 479 # m, mess = edit commit message without changing commit content
480 480 # p, fold = use commit
481 481 # b, base = checkout changeset and apply further changesets from there
482 482 # d, drop = remove commit from history
483 483 # f, fold = use commit, but combine it with the one above
484 484 # r, roll = like fold, but discard this commit's description and date
485 485 #
486 486
487 487 $ cd ..
488 488
489 489 ============================================
490 490 Test update-timestamp config option in mess|
491 491 ============================================
492 492
493 493 $ addwithdate ()
494 494 > {
495 495 > echo $1 > $1
496 496 > hg add $1
497 497 > hg ci -m $1 -d "$2 0"
498 498 > }
499 499
500 500 $ initrepo ()
501 501 > {
502 502 > hg init r2
503 503 > cd r2
504 504 > addwithdate a 1
505 505 > addwithdate b 2
506 506 > addwithdate c 3
507 507 > addwithdate d 4
508 508 > addwithdate e 5
509 509 > addwithdate f 6
510 510 > }
511 511
512 512 $ initrepo
513 513
514 514 log before edit
515 515
516 516 $ hg log --limit 1
517 517 changeset: 5:178e35e0ce73
518 518 tag: tip
519 519 user: test
520 520 date: Thu Jan 01 00:00:06 1970 +0000
521 521 summary: f
522 522
523 523 $ hg histedit tip --commands - 2>&1 --config rewrite.update-timestamp=True << EOF | fixbundle
524 524 > mess 178e35e0ce73 f
525 525 > EOF
526 526
527 527 log after edit
528 528
529 529 $ hg log --limit 1
530 530 changeset: 5:98bf456d476b
531 531 tag: tip
532 532 user: test
533 533 date: Thu Jan 01 00:00:00 1970 +0000
534 534 summary: f
535 535
536 536
537 537 $ cd ..
538 538
539 539 warn the user on editing tagged commits
540 540
541 541 $ hg init issue4017
542 542 $ cd issue4017
543 543 $ echo > a
544 544 $ hg ci -Am 'add a'
545 545 adding a
546 546 $ hg tag a
547 547 $ hg tags
548 548 tip 1:bd7ee4f3939b
549 549 a 0:a8a82d372bb3
550 550 $ hg histedit
551 551 warning: tags associated with the given changeset will be lost after histedit.
552 552 do you want to continue (yN)? n
553 553 abort: histedit cancelled
554 554
555 555 [255]
556 556 $ cd ..
@@ -1,439 +1,439 b''
1 1 $ cat <<EOF > merge
2 2 > from __future__ import print_function
3 3 > import sys, os
4 4 >
5 5 > try:
6 6 > import msvcrt
7 7 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
8 8 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
9 9 > except ImportError:
10 10 > pass
11 11 >
12 12 > print("merging for", os.path.basename(sys.argv[1]))
13 13 > EOF
14 14 $ HGMERGE="\"$PYTHON\" ../merge"; export HGMERGE
15 15
16 16 $ hg init t
17 17 $ cd t
18 18 $ echo This is file a1 > a
19 19 $ hg add a
20 20 $ hg commit -m "commit #0"
21 21 $ echo This is file b1 > b
22 22 $ hg add b
23 23 $ hg commit -m "commit #1"
24 24
25 25 $ hg update 0
26 26 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
27 27
28 28 Test interrupted updates by having a non-empty dir with the same name as one
29 29 of the files in a commit we're updating to
30 30
31 31 $ mkdir b && touch b/nonempty
32 32 $ hg up
33 33 abort: Unlinking directory not permitted: *$TESTTMP/t/b* (glob) (windows !)
34 34 abort: Directory not empty: '?\$TESTTMP/t/b'? (re) (no-windows !)
35 35 [255]
36 36 $ hg ci
37 37 abort: last update was interrupted
38 38 (use 'hg update' to get a consistent checkout)
39 39 [255]
40 40 $ hg sum
41 41 parent: 0:538afb845929
42 42 commit #0
43 43 branch: default
44 44 commit: 1 unknown (interrupted update)
45 45 update: 1 new changesets (update)
46 46 phases: 2 draft
47 47 Detect interrupted update by hg status --verbose
48 48 $ hg status -v
49 49 ? b/nonempty
50 50 # The repository is in an unfinished *update* state.
51 51
52 52 # To continue: hg update .
53 53
54 54
55 55 $ rm b/nonempty
56 56
57 57 $ hg up
58 58 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 59 $ hg sum
60 60 parent: 1:b8bb4a988f25 tip
61 61 commit #1
62 62 branch: default
63 63 commit: (clean)
64 64 update: (current)
65 65 phases: 2 draft
66 66
67 67 Prepare a basic merge
68 68
69 69 $ hg up 0
70 70 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
71 71 $ echo This is file c1 > c
72 72 $ hg add c
73 73 $ hg commit -m "commit #2"
74 74 created new head
75 75 $ echo This is file b1 > b
76 76 no merges expected
77 77 $ hg merge -P 1
78 78 changeset: 1:b8bb4a988f25
79 79 user: test
80 80 date: Thu Jan 01 00:00:00 1970 +0000
81 81 summary: commit #1
82 82
83 83 $ hg merge 1
84 84 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
85 85 (branch merge, don't forget to commit)
86 86 $ hg diff --nodates
87 87 diff -r 49035e18a8e6 b
88 88 --- /dev/null
89 89 +++ b/b
90 90 @@ -0,0 +1,1 @@
91 91 +This is file b1
92 92 $ hg status
93 93 M b
94 94 $ cd ..; rm -r t
95 95
96 96 $ hg init t
97 97 $ cd t
98 98 $ echo This is file a1 > a
99 99 $ hg add a
100 100 $ hg commit -m "commit #0"
101 101 $ echo This is file b1 > b
102 102 $ hg add b
103 103 $ hg commit -m "commit #1"
104 104
105 105 $ hg update 0
106 106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 107 $ echo This is file c1 > c
108 108 $ hg add c
109 109 $ hg commit -m "commit #2"
110 110 created new head
111 111 $ echo This is file b2 > b
112 112 merge should fail
113 113 $ hg merge 1
114 114 b: untracked file differs
115 115 abort: untracked files in working directory differ from files in requested revision
116 116 [255]
117 117
118 118 #if symlink
119 119 symlinks to directories should be treated as regular files (issue5027)
120 120 $ rm b
121 121 $ ln -s 'This is file b2' b
122 122 $ hg merge 1
123 123 b: untracked file differs
124 124 abort: untracked files in working directory differ from files in requested revision
125 125 [255]
126 126 symlinks shouldn't be followed
127 127 $ rm b
128 128 $ echo This is file b1 > .hg/b
129 129 $ ln -s .hg/b b
130 130 $ hg merge 1
131 131 b: untracked file differs
132 132 abort: untracked files in working directory differ from files in requested revision
133 133 [255]
134 134
135 135 $ rm b
136 136 $ echo This is file b2 > b
137 137 #endif
138 138
139 139 bad config
140 140 $ hg merge 1 --config merge.checkunknown=x
141 141 abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
142 142 [255]
143 143 this merge should fail
144 144 $ hg merge 1 --config merge.checkunknown=abort
145 145 b: untracked file differs
146 146 abort: untracked files in working directory differ from files in requested revision
147 147 [255]
148 148
149 149 this merge should warn
150 150 $ hg merge 1 --config merge.checkunknown=warn
151 151 b: replacing untracked file
152 152 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
153 153 (branch merge, don't forget to commit)
154 154 $ cat b.orig
155 155 This is file b2
156 156 $ hg up --clean 2
157 157 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
158 158 $ mv b.orig b
159 159
160 160 this merge should silently ignore
161 161 $ cat b
162 162 This is file b2
163 163 $ hg merge 1 --config merge.checkunknown=ignore
164 164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
165 165 (branch merge, don't forget to commit)
166 166
167 167 merge.checkignored
168 168 $ hg up --clean 1
169 169 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
170 170 $ cat >> .hgignore << EOF
171 171 > remoteignored
172 172 > EOF
173 173 $ echo This is file localignored3 > localignored
174 174 $ echo This is file remoteignored3 > remoteignored
175 175 $ hg add .hgignore localignored remoteignored
176 176 $ hg commit -m "commit #3"
177 177
178 178 $ hg up 2
179 179 1 files updated, 0 files merged, 4 files removed, 0 files unresolved
180 180 $ cat >> .hgignore << EOF
181 181 > localignored
182 182 > EOF
183 183 $ hg add .hgignore
184 184 $ hg commit -m "commit #4"
185 185
186 186 remote .hgignore shouldn't be used for determining whether a file is ignored
187 187 $ echo This is file remoteignored4 > remoteignored
188 188 $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort
189 189 remoteignored: untracked file differs
190 190 abort: untracked files in working directory differ from files in requested revision
191 191 [255]
192 192 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
193 193 merging .hgignore
194 194 merging for .hgignore
195 195 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
196 196 (branch merge, don't forget to commit)
197 197 $ cat remoteignored
198 198 This is file remoteignored3
199 199 $ cat remoteignored.orig
200 200 This is file remoteignored4
201 201 $ rm remoteignored.orig
202 202
203 203 local .hgignore should be used for that
204 204 $ hg up --clean 4
205 205 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
206 206 $ echo This is file localignored4 > localignored
207 207 also test other conflicting files to see we output the full set of warnings
208 208 $ echo This is file b2 > b
209 209 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=abort
210 210 b: untracked file differs
211 211 localignored: untracked file differs
212 212 abort: untracked files in working directory differ from files in requested revision
213 213 [255]
214 214 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
215 215 localignored: untracked file differs
216 216 abort: untracked files in working directory differ from files in requested revision
217 217 [255]
218 218 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort
219 219 b: untracked file differs
220 220 abort: untracked files in working directory differ from files in requested revision
221 221 [255]
222 222 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn
223 223 b: replacing untracked file
224 224 localignored: replacing untracked file
225 225 merging .hgignore
226 226 merging for .hgignore
227 227 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
228 228 (branch merge, don't forget to commit)
229 229 $ cat localignored
230 230 This is file localignored3
231 231 $ cat localignored.orig
232 232 This is file localignored4
233 233 $ rm localignored.orig
234 234
235 235 $ cat b.orig
236 236 This is file b2
237 237 $ hg up --clean 2
238 238 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
239 239 $ mv b.orig b
240 240
241 241 this merge of b should work
242 242 $ cat b
243 243 This is file b2
244 244 $ hg merge -f 1
245 245 merging b
246 246 merging for b
247 247 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
248 248 (branch merge, don't forget to commit)
249 249 $ hg diff --nodates
250 250 diff -r 49035e18a8e6 b
251 251 --- /dev/null
252 252 +++ b/b
253 253 @@ -0,0 +1,1 @@
254 254 +This is file b2
255 255 $ hg status
256 256 M b
257 257 $ cd ..; rm -r t
258 258
259 259 $ hg init t
260 260 $ cd t
261 261 $ echo This is file a1 > a
262 262 $ hg add a
263 263 $ hg commit -m "commit #0"
264 264 $ echo This is file b1 > b
265 265 $ hg add b
266 266 $ hg commit -m "commit #1"
267 267 $ echo This is file b22 > b
268 268 $ hg commit -m "commit #2"
269 269 $ hg update 1
270 270 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
271 271 $ echo This is file c1 > c
272 272 $ hg add c
273 273 $ hg commit -m "commit #3"
274 274 created new head
275 275
276 276 Contents of b should be "this is file b1"
277 277 $ cat b
278 278 This is file b1
279 279
280 280 $ echo This is file b22 > b
281 281 merge fails
282 282 $ hg merge 2
283 283 abort: uncommitted changes
284 284 (use 'hg status' to list changes)
285 285 [255]
286 286 merge expected!
287 287 $ hg merge -f 2
288 288 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
289 289 (branch merge, don't forget to commit)
290 290 $ hg diff --nodates
291 291 diff -r 85de557015a8 b
292 292 --- a/b
293 293 +++ b/b
294 294 @@ -1,1 +1,1 @@
295 295 -This is file b1
296 296 +This is file b22
297 297 $ hg status
298 298 M b
299 299 $ cd ..; rm -r t
300 300
301 301 $ hg init t
302 302 $ cd t
303 303 $ echo This is file a1 > a
304 304 $ hg add a
305 305 $ hg commit -m "commit #0"
306 306 $ echo This is file b1 > b
307 307 $ hg add b
308 308 $ hg commit -m "commit #1"
309 309 $ echo This is file b22 > b
310 310 $ hg commit -m "commit #2"
311 311 $ hg update 1
312 312 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
313 313 $ echo This is file c1 > c
314 314 $ hg add c
315 315 $ hg commit -m "commit #3"
316 316 created new head
317 317 $ echo This is file b33 > b
318 318 merge of b should fail
319 319 $ hg merge 2
320 320 abort: uncommitted changes
321 321 (use 'hg status' to list changes)
322 322 [255]
323 323 merge of b expected
324 324 $ hg merge -f 2
325 325 merging b
326 326 merging for b
327 327 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
328 328 (branch merge, don't forget to commit)
329 329 $ hg diff --nodates
330 330 diff -r 85de557015a8 b
331 331 --- a/b
332 332 +++ b/b
333 333 @@ -1,1 +1,1 @@
334 334 -This is file b1
335 335 +This is file b33
336 336 $ hg status
337 337 M b
338 338
339 339 Test for issue2364
340 340
341 341 $ hg up -qC .
342 342 $ hg rm b
343 343 $ hg ci -md
344 344 $ hg revert -r -2 b
345 345 $ hg up -q -- -2
346 346
347 347 Test that updated files are treated as "modified", when
348 348 'merge.update()' is aborted before 'merge.recordupdates()' (= parents
349 349 aren't changed), even if none of mode, size and timestamp of them
350 350 isn't changed on the filesystem (see also issue4583).
351 351
352 352 $ cat > $TESTTMP/abort.py <<EOF
353 353 > from __future__ import absolute_import
354 354 > # emulate aborting before "recordupdates()". in this case, files
355 355 > # are changed without updating dirstate
356 356 > from mercurial import (
357 357 > error,
358 358 > extensions,
359 359 > merge,
360 360 > )
361 361 > def applyupdates(orig, *args, **kwargs):
362 362 > orig(*args, **kwargs)
363 > raise error.Abort('intentional aborting')
363 > raise error.Abort(b'intentional aborting')
364 364 > def extsetup(ui):
365 365 > extensions.wrapfunction(merge, "applyupdates", applyupdates)
366 366 > EOF
367 367
368 368 $ cat >> .hg/hgrc <<EOF
369 369 > [fakedirstatewritetime]
370 370 > # emulate invoking dirstate.write() via repo.status()
371 371 > # at 2000-01-01 00:00
372 372 > fakenow = 200001010000
373 373 > EOF
374 374
375 375 (file gotten from other revision)
376 376
377 377 $ hg update -q -C 2
378 378 $ echo 'THIS IS FILE B5' > b
379 379 $ hg commit -m 'commit #5'
380 380
381 381 $ hg update -q -C 3
382 382 $ cat b
383 383 This is file b1
384 384 $ touch -t 200001010000 b
385 385 $ hg debugrebuildstate
386 386
387 387 $ cat >> .hg/hgrc <<EOF
388 388 > [extensions]
389 389 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
390 390 > abort = $TESTTMP/abort.py
391 391 > EOF
392 392 $ hg merge 5
393 393 abort: intentional aborting
394 394 [255]
395 395 $ cat >> .hg/hgrc <<EOF
396 396 > [extensions]
397 397 > fakedirstatewritetime = !
398 398 > abort = !
399 399 > EOF
400 400
401 401 $ cat b
402 402 THIS IS FILE B5
403 403 $ touch -t 200001010000 b
404 404 $ hg status -A b
405 405 M b
406 406
407 407 (file merged from other revision)
408 408
409 409 $ hg update -q -C 3
410 410 $ echo 'this is file b6' > b
411 411 $ hg commit -m 'commit #6'
412 412 created new head
413 413
414 414 $ cat b
415 415 this is file b6
416 416 $ touch -t 200001010000 b
417 417 $ hg debugrebuildstate
418 418
419 419 $ cat >> .hg/hgrc <<EOF
420 420 > [extensions]
421 421 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
422 422 > abort = $TESTTMP/abort.py
423 423 > EOF
424 424 $ hg merge --tool internal:other 5
425 425 abort: intentional aborting
426 426 [255]
427 427 $ cat >> .hg/hgrc <<EOF
428 428 > [extensions]
429 429 > fakedirstatewritetime = !
430 430 > abort = !
431 431 > EOF
432 432
433 433 $ cat b
434 434 THIS IS FILE B5
435 435 $ touch -t 200001010000 b
436 436 $ hg status -A b
437 437 M b
438 438
439 439 $ cd ..
@@ -1,264 +1,264 b''
1 1 $ cat <<EOF >> $HGRCPATH
2 2 > [extensions]
3 3 > mq =
4 4 > [mq]
5 5 > git = keep
6 6 > [diff]
7 7 > nodates = 1
8 8 > EOF
9 9
10 10 init:
11 11
12 12 $ hg init repo
13 13 $ cd repo
14 14 $ echo a > a
15 15 $ hg ci -Am adda
16 16 adding a
17 17 $ echo a >> a
18 18 $ hg qnew -f p1
19 19 $ echo b >> a
20 20 $ hg qnew -f p2
21 21 $ echo c >> a
22 22 $ hg qnew -f p3
23 23
24 24 Fold in the middle of the queue:
25 25 (this tests also that editor is not invoked if '--edit' is not
26 26 specified)
27 27
28 28 $ hg qpop p1
29 29 popping p3
30 30 popping p2
31 31 now at: p1
32 32
33 33 $ hg qdiff
34 34 diff -r 07f494440405 a
35 35 --- a/a
36 36 +++ b/a
37 37 @@ -1,1 +1,2 @@
38 38 a
39 39 +a
40 40
41 41 $ HGEDITOR=cat hg qfold p2
42 42 $ grep git .hg/patches/p1 && echo 'git patch found!'
43 43 [1]
44 44
45 45 $ hg qser
46 46 p1
47 47 p3
48 48
49 49 $ hg qdiff
50 50 diff -r 07f494440405 a
51 51 --- a/a
52 52 +++ b/a
53 53 @@ -1,1 +1,3 @@
54 54 a
55 55 +a
56 56 +b
57 57
58 58 Fold with local changes:
59 59
60 60 $ echo d >> a
61 61 $ hg qfold p3
62 62 abort: local changes found, qrefresh first
63 63 [255]
64 64
65 65 $ hg diff -c .
66 66 diff -r 07f494440405 -r ???????????? a (glob)
67 67 --- a/a
68 68 +++ b/a
69 69 @@ -1,1 +1,3 @@
70 70 a
71 71 +a
72 72 +b
73 73
74 74 $ hg revert -a --no-backup
75 75 reverting a
76 76
77 77 Fold git patch into a regular patch, expect git patch:
78 78
79 79 $ echo a >> a
80 80 $ hg qnew -f regular
81 81 $ hg cp a aa
82 82 $ hg qnew --git -f git
83 83
84 84 $ hg qpop
85 85 popping git
86 86 now at: regular
87 87
88 88 $ hg qfold git
89 89
90 90 $ cat .hg/patches/regular
91 91 # HG changeset patch
92 92 # Parent ???????????????????????????????????????? (glob)
93 93
94 94 diff --git a/a b/a
95 95 --- a/a
96 96 +++ b/a
97 97 @@ -1,3 +1,4 @@
98 98 a
99 99 a
100 100 b
101 101 +a
102 102 diff --git a/a b/aa
103 103 copy from a
104 104 copy to aa
105 105 --- a/a
106 106 +++ b/aa
107 107 @@ -1,3 +1,4 @@
108 108 a
109 109 a
110 110 b
111 111 +a
112 112
113 113 $ hg qpop
114 114 popping regular
115 115 now at: p1
116 116
117 117 $ hg qdel regular
118 118
119 119 Fold regular patch into a git patch, expect git patch:
120 120
121 121 $ hg cp a aa
122 122 $ hg qnew --git -f git
123 123 $ echo b >> aa
124 124 $ hg qnew -f regular
125 125
126 126 $ hg qpop
127 127 popping regular
128 128 now at: git
129 129
130 130 $ hg qfold regular
131 131
132 132 $ cat .hg/patches/git
133 133 # HG changeset patch
134 134 # Parent ???????????????????????????????????????? (glob)
135 135
136 136 diff --git a/a b/aa
137 137 copy from a
138 138 copy to aa
139 139 --- a/a
140 140 +++ b/aa
141 141 @@ -1,3 +1,4 @@
142 142 a
143 143 a
144 144 b
145 145 +b
146 146
147 147 Test saving last-message.txt:
148 148
149 149 $ hg qrefresh -m "original message"
150 150
151 151 $ cat > $TESTTMP/commitfailure.py <<EOF
152 152 > from mercurial import error
153 153 > def reposetup(ui, repo):
154 154 > class commitfailure(repo.__class__):
155 155 > def commit(self, *args, **kwargs):
156 > raise error.Abort('emulating unexpected abort')
156 > raise error.Abort(b'emulating unexpected abort')
157 157 > repo.__class__ = commitfailure
158 158 > EOF
159 159
160 160 $ cat >> .hg/hgrc <<EOF
161 161 > [extensions]
162 162 > # this failure occurs before editor invocation
163 163 > commitfailure = $TESTTMP/commitfailure.py
164 164 > EOF
165 165
166 166 $ cat > $TESTTMP/editor.sh << EOF
167 167 > echo "==== before editing"
168 168 > cat \$1
169 169 > echo "===="
170 170 > (echo; echo "test saving last-message.txt") >> \$1
171 171 > EOF
172 172
173 173 $ hg qapplied
174 174 p1
175 175 git
176 176 $ hg tip --template "{files}\n"
177 177 aa
178 178
179 179 (test that editor is not invoked before transaction starting,
180 180 and that combination of '--edit' and '--message' doesn't abort execution)
181 181
182 182 $ rm -f .hg/last-message.txt
183 183 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e -m MESSAGE p3
184 184 qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
185 185 abort: emulating unexpected abort
186 186 [255]
187 187 $ test -f .hg/last-message.txt
188 188 [1]
189 189
190 190 (reset applied patches and directory status)
191 191
192 192 $ cat >> .hg/hgrc <<EOF
193 193 > [extensions]
194 194 > # this failure occurs after editor invocation
195 195 > commitfailure = !
196 196 > EOF
197 197
198 198 $ hg qapplied
199 199 p1
200 200 $ hg status -A aa
201 201 ? aa
202 202 $ rm aa
203 203 $ hg status -m
204 204 M a
205 205 $ hg revert --no-backup -q a
206 206 $ hg qpush -q git
207 207 now at: git
208 208
209 209 (test that editor is invoked and commit message is saved into
210 210 "last-message.txt")
211 211
212 212 $ cat >> .hg/hgrc <<EOF
213 213 > [hooks]
214 214 > # this failure occurs after editor invocation
215 215 > pretxncommit.unexpectedabort = false
216 216 > EOF
217 217
218 218 $ rm -f .hg/last-message.txt
219 219 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e p3
220 220 ==== before editing
221 221 original message
222 222
223 223
224 224 HG: Enter commit message. Lines beginning with 'HG:' are removed.
225 225 HG: Leave message empty to use default message.
226 226 HG: --
227 227 HG: user: test
228 228 HG: branch 'default'
229 229 HG: added aa
230 230 HG: changed a
231 231 ====
232 232 note: commit message saved in .hg/last-message.txt
233 233 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
234 234 transaction abort!
235 235 rollback completed
236 236 qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
237 237 abort: pretxncommit.unexpectedabort hook exited with status 1
238 238 [255]
239 239 $ cat .hg/last-message.txt
240 240 original message
241 241
242 242
243 243
244 244 test saving last-message.txt
245 245
246 246 (confirm whether files listed up in the commit message editing are correct)
247 247
248 248 $ cat >> .hg/hgrc <<EOF
249 249 > [hooks]
250 250 > pretxncommit.unexpectedabort =
251 251 > EOF
252 252 $ hg status -u | while read f; do rm ${f}; done
253 253 $ hg revert --no-backup -q --all
254 254 $ hg qpush -q git
255 255 now at: git
256 256 $ hg qpush -q --move p3
257 257 now at: p3
258 258
259 259 $ hg status --rev "git^1" --rev . -arm
260 260 M a
261 261 A aa
262 262
263 263 $ cd ..
264 264
@@ -1,322 +1,322 b''
1 1
2 2 $ catpatch() {
3 3 > cat $1 | sed -e "s/^\(# Parent \).*/\1/"
4 4 > }
5 5 $ echo "[extensions]" >> $HGRCPATH
6 6 $ echo "mq=" >> $HGRCPATH
7 7 $ runtest() {
8 8 > hg init mq
9 9 > cd mq
10 10 >
11 11 > echo a > a
12 12 > hg ci -Ama
13 13 >
14 14 > echo '% qnew should refuse bad patch names'
15 15 > hg qnew series
16 16 > hg qnew status
17 17 > hg qnew guards
18 18 > hg qnew .
19 19 > hg qnew ..
20 20 > hg qnew .hgignore
21 21 > hg qnew .mqfoo
22 22 > hg qnew 'foo#bar'
23 23 > hg qnew 'foo:bar'
24 24 > hg qnew "`echo foo; echo bar`"
25 25 > hg qnew ' foo'
26 26 > hg qnew 'foo '
27 27 >
28 28 > hg qinit -c
29 29 >
30 30 > echo '% qnew with name containing slash'
31 31 > hg qnew foo/
32 32 > hg qnew foo/bar.patch
33 33 > hg qnew foo
34 34 > hg qseries
35 35 > hg qpop
36 36 > hg qdelete foo/bar.patch
37 37 >
38 38 > echo '% qnew with uncommitted changes'
39 39 > echo a > somefile
40 40 > hg add somefile
41 41 > hg qnew uncommitted.patch
42 42 > hg st
43 43 > hg qseries
44 44 >
45 45 > echo '% qnew implies add'
46 46 > hg -R .hg/patches st
47 47 >
48 48 > echo '% qnew missing'
49 49 > hg qnew missing.patch missing
50 50 >
51 51 > echo '% qnew -m'
52 52 > hg qnew -m 'foo bar' mtest.patch
53 53 > catpatch .hg/patches/mtest.patch
54 54 >
55 55 > echo '% qnew twice'
56 56 > hg qnew first.patch
57 57 > hg qnew first.patch
58 58 >
59 59 > touch ../first.patch
60 60 > hg qimport ../first.patch
61 61 >
62 62 > echo '% qnew -f from a subdirectory'
63 63 > hg qpop -a
64 64 > mkdir d
65 65 > cd d
66 66 > echo b > b
67 67 > hg ci -Am t
68 68 > echo b >> b
69 69 > hg st
70 70 > hg qnew -g -f p
71 71 > catpatch ../.hg/patches/p
72 72 >
73 73 > echo '% qnew -u with no username configured'
74 74 > HGUSER= hg qnew -u blue red
75 75 > catpatch ../.hg/patches/red
76 76 >
77 77 > echo '% qnew -e -u with no username configured'
78 78 > HGUSER= hg qnew -e -u chartreuse fucsia
79 79 > catpatch ../.hg/patches/fucsia
80 80 >
81 81 > echo '% fail when trying to import a merge'
82 82 > hg init merge
83 83 > cd merge
84 84 > touch a
85 85 > hg ci -Am null
86 86 > echo a >> a
87 87 > hg ci -m a
88 88 > hg up -r 0
89 89 > echo b >> a
90 90 > hg ci -m b
91 91 > hg merge -f 1
92 92 > hg resolve --mark a
93 93 > hg qnew -f merge
94 94 >
95 95 > cd ../../..
96 96 > rm -r mq
97 97 > }
98 98
99 99 plain headers
100 100
101 101 $ echo "[mq]" >> $HGRCPATH
102 102 $ echo "plain=true" >> $HGRCPATH
103 103 $ mkdir sandbox
104 104 $ (cd sandbox ; runtest)
105 105 adding a
106 106 % qnew should refuse bad patch names
107 107 abort: "series" cannot be used as the name of a patch
108 108 abort: "status" cannot be used as the name of a patch
109 109 abort: "guards" cannot be used as the name of a patch
110 110 abort: "." cannot be used as the name of a patch
111 111 abort: ".." cannot be used as the name of a patch
112 112 abort: patch name cannot begin with ".hg"
113 113 abort: patch name cannot begin with ".mq"
114 114 abort: '#' cannot be used in the name of a patch
115 115 abort: ':' cannot be used in the name of a patch
116 116 abort: '\n' cannot be used in the name of a patch
117 117 abort: patch name cannot begin or end with whitespace
118 118 abort: patch name cannot begin or end with whitespace
119 119 % qnew with name containing slash
120 120 abort: path ends in directory separator: foo/
121 121 abort: "foo" already exists as a directory
122 122 foo/bar.patch
123 123 popping foo/bar.patch
124 124 patch queue now empty
125 125 % qnew with uncommitted changes
126 126 uncommitted.patch
127 127 % qnew implies add
128 128 A .hgignore
129 129 A series
130 130 A uncommitted.patch
131 131 % qnew missing
132 132 abort: missing: * (glob)
133 133 % qnew -m
134 134 foo bar
135 135
136 136 % qnew twice
137 137 abort: patch "first.patch" already exists
138 138 abort: patch "first.patch" already exists
139 139 % qnew -f from a subdirectory
140 140 popping first.patch
141 141 popping mtest.patch
142 142 popping uncommitted.patch
143 143 patch queue now empty
144 144 adding d/b
145 145 M d/b
146 146 diff --git a/d/b b/d/b
147 147 --- a/d/b
148 148 +++ b/d/b
149 149 @@ -1,1 +1,2 @@
150 150 b
151 151 +b
152 152 % qnew -u with no username configured
153 153 From: blue
154 154
155 155 % qnew -e -u with no username configured
156 156 From: chartreuse
157 157
158 158 % fail when trying to import a merge
159 159 adding a
160 160 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
161 161 created new head
162 162 merging a
163 163 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
164 164 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
165 165 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
166 166 (no more unresolved files)
167 167 abort: cannot manage merge changesets
168 168 $ rm -r sandbox
169 169
170 170 hg headers
171 171
172 172 $ echo "plain=false" >> $HGRCPATH
173 173 $ mkdir sandbox
174 174 $ (cd sandbox ; runtest)
175 175 adding a
176 176 % qnew should refuse bad patch names
177 177 abort: "series" cannot be used as the name of a patch
178 178 abort: "status" cannot be used as the name of a patch
179 179 abort: "guards" cannot be used as the name of a patch
180 180 abort: "." cannot be used as the name of a patch
181 181 abort: ".." cannot be used as the name of a patch
182 182 abort: patch name cannot begin with ".hg"
183 183 abort: patch name cannot begin with ".mq"
184 184 abort: '#' cannot be used in the name of a patch
185 185 abort: ':' cannot be used in the name of a patch
186 186 abort: '\n' cannot be used in the name of a patch
187 187 abort: patch name cannot begin or end with whitespace
188 188 abort: patch name cannot begin or end with whitespace
189 189 % qnew with name containing slash
190 190 abort: path ends in directory separator: foo/
191 191 abort: "foo" already exists as a directory
192 192 foo/bar.patch
193 193 popping foo/bar.patch
194 194 patch queue now empty
195 195 % qnew with uncommitted changes
196 196 uncommitted.patch
197 197 % qnew implies add
198 198 A .hgignore
199 199 A series
200 200 A uncommitted.patch
201 201 % qnew missing
202 202 abort: missing: * (glob)
203 203 % qnew -m
204 204 # HG changeset patch
205 205 # Parent
206 206 foo bar
207 207
208 208 % qnew twice
209 209 abort: patch "first.patch" already exists
210 210 abort: patch "first.patch" already exists
211 211 % qnew -f from a subdirectory
212 212 popping first.patch
213 213 popping mtest.patch
214 214 popping uncommitted.patch
215 215 patch queue now empty
216 216 adding d/b
217 217 M d/b
218 218 # HG changeset patch
219 219 # Parent
220 220
221 221 diff --git a/d/b b/d/b
222 222 --- a/d/b
223 223 +++ b/d/b
224 224 @@ -1,1 +1,2 @@
225 225 b
226 226 +b
227 227 % qnew -u with no username configured
228 228 # HG changeset patch
229 229 # User blue
230 230 # Parent
231 231
232 232 % qnew -e -u with no username configured
233 233 # HG changeset patch
234 234 # User chartreuse
235 235 # Parent
236 236
237 237 % fail when trying to import a merge
238 238 adding a
239 239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
240 240 created new head
241 241 merging a
242 242 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
243 243 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
244 244 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
245 245 (no more unresolved files)
246 246 abort: cannot manage merge changesets
247 247 $ rm -r sandbox
248 248
249 249 Test saving last-message.txt
250 250
251 251 $ hg init repo
252 252 $ cd repo
253 253
254 254 $ cat > $TESTTMP/commitfailure.py <<EOF
255 255 > from mercurial import error
256 256 > def reposetup(ui, repo):
257 257 > class commitfailure(repo.__class__):
258 258 > def commit(self, *args, **kwargs):
259 > raise error.Abort('emulating unexpected abort')
259 > raise error.Abort(b'emulating unexpected abort')
260 260 > repo.__class__ = commitfailure
261 261 > EOF
262 262 $ cat >> .hg/hgrc <<EOF
263 263 > [extensions]
264 264 > # this failure occurs before editor invocation
265 265 > commitfailure = $TESTTMP/commitfailure.py
266 266 > EOF
267 267
268 268 $ cat > $TESTTMP/editor.sh << EOF
269 269 > echo "==== before editing"
270 270 > cat \$1
271 271 > echo "===="
272 272 > echo "test saving last-message.txt" >> \$1
273 273 > EOF
274 274
275 275 (test that editor is not invoked before transaction starting)
276 276
277 277 $ rm -f .hg/last-message.txt
278 278 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch
279 279 abort: emulating unexpected abort
280 280 [255]
281 281 $ test -f .hg/last-message.txt
282 282 [1]
283 283
284 284 (test that editor is invoked and commit message is saved into
285 285 "last-message.txt")
286 286
287 287 $ cat >> .hg/hgrc <<EOF
288 288 > [extensions]
289 289 > commitfailure = !
290 290 > [hooks]
291 291 > # this failure occurs after editor invocation
292 292 > pretxncommit.unexpectedabort = false
293 293 > EOF
294 294
295 295 $ rm -f .hg/last-message.txt
296 296 $ hg status
297 297 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch
298 298 ==== before editing
299 299
300 300
301 301 HG: Enter commit message. Lines beginning with 'HG:' are removed.
302 302 HG: Leave message empty to use default message.
303 303 HG: --
304 304 HG: user: test
305 305 HG: branch 'default'
306 306 HG: no files changed
307 307 ====
308 308 transaction abort!
309 309 rollback completed
310 310 note: commit message saved in .hg/last-message.txt
311 311 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
312 312 abort: pretxncommit.unexpectedabort hook exited with status 1
313 313 [255]
314 314 $ cat .hg/last-message.txt
315 315
316 316
317 317 test saving last-message.txt
318 318
319 319 $ cat >> .hg/hgrc <<EOF
320 320 > [hooks]
321 321 > pretxncommit.unexpectedabort =
322 322 > EOF
@@ -1,293 +1,293 b''
1 1 #testcases vfs svfs
2 2 #testcases safe normal
3 3
4 4 #if safe
5 5 $ echo "[format]" >> $HGRCPATH
6 6 $ echo "exp-share-safe = True" >> $HGRCPATH
7 7 #endif
8 8
9 9 $ echo "[extensions]" >> $HGRCPATH
10 10 $ echo "share = " >> $HGRCPATH
11 11
12 12 #if svfs
13 13 $ echo "[format]" >> $HGRCPATH
14 14 $ echo "bookmarks-in-store = yes " >> $HGRCPATH
15 15 #endif
16 16
17 17 prepare repo1
18 18
19 19 $ hg init repo1
20 20 $ cd repo1
21 21 $ echo a > a
22 22 $ hg commit -A -m'init'
23 23 adding a
24 24 $ echo a >> a
25 25 $ hg commit -m'change in shared clone'
26 26 $ echo b > b
27 27 $ hg commit -A -m'another file'
28 28 adding b
29 29
30 30 share it
31 31
32 32 $ cd ..
33 33 $ hg share repo1 repo2
34 34 updating working directory
35 35 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
36 36
37 37 test sharing bookmarks
38 38
39 39 $ hg share -B repo1 repo3
40 40 updating working directory
41 41 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 42 $ cd repo1
43 43 $ hg bookmark bm1
44 44 $ hg bookmarks
45 45 * bm1 2:c2e0ac586386
46 46 $ cd ../repo2
47 47 $ hg book bm2
48 48 $ hg bookmarks
49 49 bm1 2:c2e0ac586386 (svfs !)
50 50 * bm2 2:c2e0ac586386
51 51 $ cd ../repo3
52 52 $ hg bookmarks
53 53 bm1 2:c2e0ac586386
54 54 bm2 2:c2e0ac586386 (svfs !)
55 55 $ hg book bm3
56 56 $ hg bookmarks
57 57 bm1 2:c2e0ac586386
58 58 bm2 2:c2e0ac586386 (svfs !)
59 59 * bm3 2:c2e0ac586386
60 60 $ cd ../repo1
61 61 $ hg bookmarks
62 62 * bm1 2:c2e0ac586386
63 63 bm2 2:c2e0ac586386 (svfs !)
64 64 bm3 2:c2e0ac586386
65 65
66 66 check whether HG_PENDING makes pending changes only in relatd
67 67 repositories visible to an external hook.
68 68
69 69 In "hg share" case, another transaction can't run in other
70 70 repositories sharing same source repository, because starting
71 71 transaction requires locking store of source repository.
72 72
73 73 Therefore, this test scenario ignores checking visibility of
74 74 .hg/bookmarks.pending in repo2, which shares repo1 without bookmarks.
75 75
76 76 $ cat > $TESTTMP/checkbookmarks.sh <<EOF
77 77 > echo "@repo1"
78 78 > hg -R "$TESTTMP/repo1" bookmarks
79 79 > echo "@repo2"
80 80 > hg -R "$TESTTMP/repo2" bookmarks
81 81 > echo "@repo3"
82 82 > hg -R "$TESTTMP/repo3" bookmarks
83 83 > exit 1 # to avoid adding new bookmark for subsequent tests
84 84 > EOF
85 85
86 86 $ cd ../repo1
87 87 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
88 88 @repo1
89 89 bm1 2:c2e0ac586386
90 90 bm2 2:c2e0ac586386 (svfs !)
91 91 bm3 2:c2e0ac586386
92 92 * bmX 2:c2e0ac586386
93 93 @repo2
94 94 bm1 2:c2e0ac586386 (svfs !)
95 95 * bm2 2:c2e0ac586386
96 96 bm3 2:c2e0ac586386 (svfs !)
97 97 @repo3
98 98 bm1 2:c2e0ac586386
99 99 bm2 2:c2e0ac586386 (svfs !)
100 100 * bm3 2:c2e0ac586386
101 101 bmX 2:c2e0ac586386 (vfs !)
102 102 transaction abort!
103 103 rollback completed
104 104 abort: pretxnclose hook exited with status 1
105 105 [255]
106 106 $ hg book bm1
107 107
108 108 FYI, in contrast to above test, bmX is invisible in repo1 (= shared
109 109 src), because (1) HG_PENDING refers only repo3 and (2)
110 110 "bookmarks.pending" is written only into repo3.
111 111
112 112 $ cd ../repo3
113 113 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
114 114 @repo1
115 115 * bm1 2:c2e0ac586386
116 116 bm2 2:c2e0ac586386 (svfs !)
117 117 bm3 2:c2e0ac586386
118 118 @repo2
119 119 bm1 2:c2e0ac586386 (svfs !)
120 120 * bm2 2:c2e0ac586386
121 121 bm3 2:c2e0ac586386 (svfs !)
122 122 @repo3
123 123 bm1 2:c2e0ac586386
124 124 bm2 2:c2e0ac586386 (svfs !)
125 125 bm3 2:c2e0ac586386
126 126 * bmX 2:c2e0ac586386
127 127 transaction abort!
128 128 rollback completed
129 129 abort: pretxnclose hook exited with status 1
130 130 [255]
131 131 $ hg book bm3
132 132
133 133 clean up bm2 since it's uninteresting (not shared in the vfs case and
134 134 same as bm3 in the svfs case)
135 135 $ cd ../repo2
136 136 $ hg book -d bm2
137 137
138 138 $ cd ../repo1
139 139
140 140 test that commits work
141 141
142 142 $ echo 'shared bookmarks' > a
143 143 $ hg commit -m 'testing shared bookmarks'
144 144 $ hg bookmarks
145 145 * bm1 3:b87954705719
146 146 bm3 2:c2e0ac586386
147 147 $ cd ../repo3
148 148 $ hg bookmarks
149 149 bm1 3:b87954705719
150 150 * bm3 2:c2e0ac586386
151 151 $ echo 'more shared bookmarks' > a
152 152 $ hg commit -m 'testing shared bookmarks'
153 153 created new head
154 154 $ hg bookmarks
155 155 bm1 3:b87954705719
156 156 * bm3 4:62f4ded848e4
157 157 $ cd ../repo1
158 158 $ hg bookmarks
159 159 * bm1 3:b87954705719
160 160 bm3 4:62f4ded848e4
161 161 $ cd ..
162 162
163 163 test pushing bookmarks works
164 164
165 165 $ hg clone repo3 repo4
166 166 updating to branch default
167 167 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
168 168 $ cd repo4
169 169 $ hg boo bm4
170 170 $ echo foo > b
171 171 $ hg commit -m 'foo in b'
172 172 $ hg boo
173 173 bm1 3:b87954705719
174 174 bm3 4:62f4ded848e4
175 175 * bm4 5:92793bfc8cad
176 176 $ hg push -B bm4
177 177 pushing to $TESTTMP/repo3
178 178 searching for changes
179 179 adding changesets
180 180 adding manifests
181 181 adding file changes
182 182 added 1 changesets with 1 changes to 1 files
183 183 exporting bookmark bm4
184 184 $ cd ../repo1
185 185 $ hg bookmarks
186 186 * bm1 3:b87954705719
187 187 bm3 4:62f4ded848e4
188 188 bm4 5:92793bfc8cad
189 189 $ cd ../repo3
190 190 $ hg bookmarks
191 191 bm1 3:b87954705719
192 192 * bm3 4:62f4ded848e4
193 193 bm4 5:92793bfc8cad
194 194 $ cd ..
195 195
196 196 test behavior when sharing a shared repo
197 197
198 198 $ hg share -B repo3 missingdir/repo5
199 199 updating working directory
200 200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
201 201 $ cd missingdir/repo5
202 202 $ hg book
203 203 bm1 3:b87954705719
204 204 bm3 4:62f4ded848e4
205 205 bm4 5:92793bfc8cad
206 206 $ cd ../..
207 207
208 208 test what happens when an active bookmark is deleted
209 209
210 210 $ cd repo1
211 211 $ hg boo -d bm3
212 212 $ hg boo
213 213 * bm1 3:b87954705719
214 214 bm4 5:92793bfc8cad
215 215 $ cd ../repo3
216 216 $ hg boo
217 217 bm1 3:b87954705719
218 218 bm4 5:92793bfc8cad
219 219 $ cd ..
220 220
221 221 verify that bookmarks are not written on failed transaction
222 222
223 223 $ cat > failpullbookmarks.py << EOF
224 224 > """A small extension that makes bookmark pulls fail, for testing"""
225 225 > from __future__ import absolute_import
226 226 > from mercurial import (
227 227 > error,
228 228 > exchange,
229 229 > extensions,
230 230 > )
231 231 > def _pullbookmarks(orig, pullop):
232 232 > orig(pullop)
233 > raise error.HookAbort('forced failure by extension')
233 > raise error.HookAbort(b'forced failure by extension')
234 234 > def extsetup(ui):
235 235 > extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
236 236 > EOF
237 237 $ cd repo4
238 238 $ hg boo
239 239 bm1 3:b87954705719
240 240 bm3 4:62f4ded848e4
241 241 * bm4 5:92793bfc8cad
242 242 $ cd ../repo3
243 243 $ hg boo
244 244 bm1 3:b87954705719
245 245 bm4 5:92793bfc8cad
246 246 $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
247 247 pulling from $TESTTMP/repo4
248 248 searching for changes
249 249 no changes found
250 250 adding remote bookmark bm3
251 251 abort: forced failure by extension
252 252 [255]
253 253 $ hg boo
254 254 bm1 3:b87954705719
255 255 bm4 5:92793bfc8cad
256 256 $ hg pull $TESTTMP/repo4
257 257 pulling from $TESTTMP/repo4
258 258 searching for changes
259 259 no changes found
260 260 adding remote bookmark bm3
261 261 1 local changesets published
262 262 $ hg boo
263 263 bm1 3:b87954705719
264 264 * bm3 4:62f4ded848e4
265 265 bm4 5:92793bfc8cad
266 266 $ cd ..
267 267
268 268 verify bookmark behavior after unshare
269 269
270 270 $ cd repo3
271 271 $ hg unshare
272 272 $ hg boo
273 273 bm1 3:b87954705719
274 274 * bm3 4:62f4ded848e4
275 275 bm4 5:92793bfc8cad
276 276 $ hg boo -d bm4
277 277 $ hg boo bm5
278 278 $ hg boo
279 279 bm1 3:b87954705719
280 280 bm3 4:62f4ded848e4
281 281 * bm5 4:62f4ded848e4
282 282 $ cd ../repo1
283 283 $ hg boo
284 284 * bm1 3:b87954705719
285 285 bm3 4:62f4ded848e4
286 286 bm4 5:92793bfc8cad
287 287 $ cd ..
288 288
289 289 Test that if store is disabled, we drop the bookmarksinstore requirement
290 290
291 291 $ hg init brokenrepo --config format.bookmarks-in-store=True --config format.usestore=false
292 292 ignoring enabled 'format.bookmarks-in-store' config beacuse it is incompatible with disabled 'format.usestore' config
293 293 ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
@@ -1,184 +1,184 b''
1 1 test sparse
2 2
3 3 $ hg init myrepo
4 4 $ cd myrepo
5 5 $ cat >> $HGRCPATH <<EOF
6 6 > [extensions]
7 7 > sparse=
8 8 > purge=
9 9 > strip=
10 10 > rebase=
11 11 > EOF
12 12
13 13 $ echo a > index.html
14 14 $ echo x > data.py
15 15 $ echo z > readme.txt
16 16 $ cat > base.sparse <<EOF
17 17 > [include]
18 18 > *.sparse
19 19 > EOF
20 20 $ hg ci -Aqm 'initial'
21 21 $ cat > webpage.sparse <<EOF
22 22 > %include base.sparse
23 23 > [include]
24 24 > *.html
25 25 > EOF
26 26 $ hg ci -Aqm 'initial'
27 27
28 28 Import a rules file against a 'blank' sparse profile
29 29
30 30 $ cat > $TESTTMP/rules_to_import <<EOF
31 31 > [include]
32 32 > *.py
33 33 > EOF
34 34 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
35 35 $ ls -A
36 36 .hg
37 37 data.py
38 38
39 39 $ hg debugsparse --reset
40 40 $ rm .hg/sparse
41 41
42 42 $ cat > $TESTTMP/rules_to_import <<EOF
43 43 > %include base.sparse
44 44 > [include]
45 45 > *.py
46 46 > EOF
47 47 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
48 48 $ ls -A
49 49 .hg
50 50 base.sparse
51 51 data.py
52 52 webpage.sparse
53 53
54 54 $ hg debugsparse --reset
55 55 $ rm .hg/sparse
56 56
57 57 Start against an existing profile; rules *already active* should be ignored
58 58
59 59 $ hg debugsparse --enable-profile webpage.sparse
60 60 $ hg debugsparse --include *.py
61 61 $ cat > $TESTTMP/rules_to_import <<EOF
62 62 > %include base.sparse
63 63 > [include]
64 64 > *.html
65 65 > *.txt
66 66 > [exclude]
67 67 > *.py
68 68 > EOF
69 69 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
70 70 $ ls -A
71 71 .hg
72 72 base.sparse
73 73 index.html
74 74 readme.txt
75 75 webpage.sparse
76 76 $ cat .hg/sparse
77 77 %include webpage.sparse
78 78 [include]
79 79 *.py
80 80 *.txt
81 81 [exclude]
82 82 *.py
83 83
84 84 $ hg debugsparse --reset
85 85 $ rm .hg/sparse
86 86
87 87 Same tests, with -Tjson enabled to output summaries
88 88
89 89 $ cat > $TESTTMP/rules_to_import <<EOF
90 90 > [include]
91 91 > *.py
92 92 > EOF
93 93 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
94 94 [
95 95 {
96 96 "exclude_rules_added": 0,
97 97 "files_added": 0,
98 98 "files_conflicting": 0,
99 99 "files_dropped": 4,
100 100 "include_rules_added": 1,
101 101 "profiles_added": 0
102 102 }
103 103 ]
104 104
105 105 $ hg debugsparse --reset
106 106 $ rm .hg/sparse
107 107
108 108 $ cat > $TESTTMP/rules_to_import <<EOF
109 109 > %include base.sparse
110 110 > [include]
111 111 > *.py
112 112 > EOF
113 113 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
114 114 [
115 115 {
116 116 "exclude_rules_added": 0,
117 117 "files_added": 0,
118 118 "files_conflicting": 0,
119 119 "files_dropped": 2,
120 120 "include_rules_added": 1,
121 121 "profiles_added": 1
122 122 }
123 123 ]
124 124
125 125 $ hg debugsparse --reset
126 126 $ rm .hg/sparse
127 127
128 128 $ hg debugsparse --enable-profile webpage.sparse
129 129 $ hg debugsparse --include *.py
130 130 $ cat > $TESTTMP/rules_to_import <<EOF
131 131 > %include base.sparse
132 132 > [include]
133 133 > *.html
134 134 > *.txt
135 135 > [exclude]
136 136 > *.py
137 137 > EOF
138 138 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
139 139 [
140 140 {
141 141 "exclude_rules_added": 1,
142 142 "files_added": 1,
143 143 "files_conflicting": 0,
144 144 "files_dropped": 1,
145 145 "include_rules_added": 1,
146 146 "profiles_added": 0
147 147 }
148 148 ]
149 149
150 150 If importing results in no new rules being added, no refresh should take place!
151 151
152 152 $ cat > $TESTTMP/trap_sparse_refresh.py <<EOF
153 153 > from mercurial import error, sparse
154 154 > def extsetup(ui):
155 155 > def abort_refresh(*args, **kwargs):
156 > raise error.Abort('sparse._refresh called!')
156 > raise error.Abort(b'sparse._refresh called!')
157 157 > sparse.refreshwdir = abort_refresh
158 158 > EOF
159 159 $ cat >> $HGRCPATH <<EOF
160 160 > [extensions]
161 161 > trap_sparse_refresh=$TESTTMP/trap_sparse_refresh.py
162 162 > EOF
163 163 $ cat > $TESTTMP/rules_to_import <<EOF
164 164 > [include]
165 165 > *.py
166 166 > EOF
167 167 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
168 168
169 169 If an exception is raised during refresh, restore the existing rules again.
170 170
171 171 $ cat > $TESTTMP/rules_to_import <<EOF
172 172 > [exclude]
173 173 > *.html
174 174 > EOF
175 175 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
176 176 abort: sparse._refresh called!
177 177 [255]
178 178 $ cat .hg/sparse
179 179 %include webpage.sparse
180 180 [include]
181 181 *.py
182 182 *.txt
183 183 [exclude]
184 184 *.py
General Comments 0
You need to be logged in to leave comments. Login now