##// END OF EJS Templates
localrepo: provide indeterminate progress information while bundling
Augie Fackler -
r10432:8a8030fc default
parent child Browse files
Show More
@@ -1,2189 +1,2216
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as match_
15 15 import merge as merge_
16 16 import tags as tags_
17 17 from lock import release
18 18 import weakref, stat, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 20
21 21 class localrepository(repo.repository):
22 22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
23 23 supported = set('revlogv1 store fncache shared'.split())
24 24
25 25 def __init__(self, baseui, path=None, create=0):
26 26 repo.repository.__init__(self)
27 27 self.root = os.path.realpath(path)
28 28 self.path = os.path.join(self.root, ".hg")
29 29 self.origroot = path
30 30 self.opener = util.opener(self.path)
31 31 self.wopener = util.opener(self.root)
32 32 self.baseui = baseui
33 33 self.ui = baseui.copy()
34 34
35 35 try:
36 36 self.ui.readconfig(self.join("hgrc"), self.root)
37 37 extensions.loadall(self.ui)
38 38 except IOError:
39 39 pass
40 40
41 41 if not os.path.isdir(self.path):
42 42 if create:
43 43 if not os.path.exists(path):
44 44 os.mkdir(path)
45 45 os.mkdir(self.path)
46 46 requirements = ["revlogv1"]
47 47 if self.ui.configbool('format', 'usestore', True):
48 48 os.mkdir(os.path.join(self.path, "store"))
49 49 requirements.append("store")
50 50 if self.ui.configbool('format', 'usefncache', True):
51 51 requirements.append("fncache")
52 52 # create an invalid changelog
53 53 self.opener("00changelog.i", "a").write(
54 54 '\0\0\0\2' # represents revlogv2
55 55 ' dummy changelog to prevent using the old repo layout'
56 56 )
57 57 reqfile = self.opener("requires", "w")
58 58 for r in requirements:
59 59 reqfile.write("%s\n" % r)
60 60 reqfile.close()
61 61 else:
62 62 raise error.RepoError(_("repository %s not found") % path)
63 63 elif create:
64 64 raise error.RepoError(_("repository %s already exists") % path)
65 65 else:
66 66 # find requirements
67 67 requirements = set()
68 68 try:
69 69 requirements = set(self.opener("requires").read().splitlines())
70 70 except IOError, inst:
71 71 if inst.errno != errno.ENOENT:
72 72 raise
73 73 for r in requirements - self.supported:
74 74 raise error.RepoError(_("requirement '%s' not supported") % r)
75 75
76 76 self.sharedpath = self.path
77 77 try:
78 78 s = os.path.realpath(self.opener("sharedpath").read())
79 79 if not os.path.exists(s):
80 80 raise error.RepoError(
81 81 _('.hg/sharedpath points to nonexistent directory %s') % s)
82 82 self.sharedpath = s
83 83 except IOError, inst:
84 84 if inst.errno != errno.ENOENT:
85 85 raise
86 86
87 87 self.store = store.store(requirements, self.sharedpath, util.opener)
88 88 self.spath = self.store.path
89 89 self.sopener = self.store.opener
90 90 self.sjoin = self.store.join
91 91 self.opener.createmode = self.store.createmode
92 92 self.sopener.options = {}
93 93
94 94 # These two define the set of tags for this repository. _tags
95 95 # maps tag name to node; _tagtypes maps tag name to 'global' or
96 96 # 'local'. (Global tags are defined by .hgtags across all
97 97 # heads, and local tags are defined in .hg/localtags.) They
98 98 # constitute the in-memory cache of tags.
99 99 self._tags = None
100 100 self._tagtypes = None
101 101
102 102 self._branchcache = None # in UTF-8
103 103 self._branchcachetip = None
104 104 self.nodetagscache = None
105 105 self.filterpats = {}
106 106 self._datafilters = {}
107 107 self._transref = self._lockref = self._wlockref = None
108 108
109 109 @propertycache
110 110 def changelog(self):
111 111 c = changelog.changelog(self.sopener)
112 112 if 'HG_PENDING' in os.environ:
113 113 p = os.environ['HG_PENDING']
114 114 if p.startswith(self.root):
115 115 c.readpending('00changelog.i.a')
116 116 self.sopener.options['defversion'] = c.version
117 117 return c
118 118
119 119 @propertycache
120 120 def manifest(self):
121 121 return manifest.manifest(self.sopener)
122 122
123 123 @propertycache
124 124 def dirstate(self):
125 125 return dirstate.dirstate(self.opener, self.ui, self.root)
126 126
127 127 def __getitem__(self, changeid):
128 128 if changeid is None:
129 129 return context.workingctx(self)
130 130 return context.changectx(self, changeid)
131 131
132 132 def __contains__(self, changeid):
133 133 try:
134 134 return bool(self.lookup(changeid))
135 135 except error.RepoLookupError:
136 136 return False
137 137
138 138 def __nonzero__(self):
139 139 return True
140 140
141 141 def __len__(self):
142 142 return len(self.changelog)
143 143
144 144 def __iter__(self):
145 145 for i in xrange(len(self)):
146 146 yield i
147 147
148 148 def url(self):
149 149 return 'file:' + self.root
150 150
151 151 def hook(self, name, throw=False, **args):
152 152 return hook.hook(self.ui, self, name, throw, **args)
153 153
154 154 tag_disallowed = ':\r\n'
155 155
156 156 def _tag(self, names, node, message, local, user, date, extra={}):
157 157 if isinstance(names, str):
158 158 allchars = names
159 159 names = (names,)
160 160 else:
161 161 allchars = ''.join(names)
162 162 for c in self.tag_disallowed:
163 163 if c in allchars:
164 164 raise util.Abort(_('%r cannot be used in a tag name') % c)
165 165
166 166 for name in names:
167 167 self.hook('pretag', throw=True, node=hex(node), tag=name,
168 168 local=local)
169 169
170 170 def writetags(fp, names, munge, prevtags):
171 171 fp.seek(0, 2)
172 172 if prevtags and prevtags[-1] != '\n':
173 173 fp.write('\n')
174 174 for name in names:
175 175 m = munge and munge(name) or name
176 176 if self._tagtypes and name in self._tagtypes:
177 177 old = self._tags.get(name, nullid)
178 178 fp.write('%s %s\n' % (hex(old), m))
179 179 fp.write('%s %s\n' % (hex(node), m))
180 180 fp.close()
181 181
182 182 prevtags = ''
183 183 if local:
184 184 try:
185 185 fp = self.opener('localtags', 'r+')
186 186 except IOError:
187 187 fp = self.opener('localtags', 'a')
188 188 else:
189 189 prevtags = fp.read()
190 190
191 191 # local tags are stored in the current charset
192 192 writetags(fp, names, None, prevtags)
193 193 for name in names:
194 194 self.hook('tag', node=hex(node), tag=name, local=local)
195 195 return
196 196
197 197 try:
198 198 fp = self.wfile('.hgtags', 'rb+')
199 199 except IOError:
200 200 fp = self.wfile('.hgtags', 'ab')
201 201 else:
202 202 prevtags = fp.read()
203 203
204 204 # committed tags are stored in UTF-8
205 205 writetags(fp, names, encoding.fromlocal, prevtags)
206 206
207 207 if '.hgtags' not in self.dirstate:
208 208 self.add(['.hgtags'])
209 209
210 210 m = match_.exact(self.root, '', ['.hgtags'])
211 211 tagnode = self.commit(message, user, date, extra=extra, match=m)
212 212
213 213 for name in names:
214 214 self.hook('tag', node=hex(node), tag=name, local=local)
215 215
216 216 return tagnode
217 217
218 218 def tag(self, names, node, message, local, user, date):
219 219 '''tag a revision with one or more symbolic names.
220 220
221 221 names is a list of strings or, when adding a single tag, names may be a
222 222 string.
223 223
224 224 if local is True, the tags are stored in a per-repository file.
225 225 otherwise, they are stored in the .hgtags file, and a new
226 226 changeset is committed with the change.
227 227
228 228 keyword arguments:
229 229
230 230 local: whether to store tags in non-version-controlled file
231 231 (default False)
232 232
233 233 message: commit message to use if committing
234 234
235 235 user: name of user to use if committing
236 236
237 237 date: date tuple to use if committing'''
238 238
239 239 for x in self.status()[:5]:
240 240 if '.hgtags' in x:
241 241 raise util.Abort(_('working copy of .hgtags is changed '
242 242 '(please commit .hgtags manually)'))
243 243
244 244 self.tags() # instantiate the cache
245 245 self._tag(names, node, message, local, user, date)
246 246
247 247 def tags(self):
248 248 '''return a mapping of tag to node'''
249 249 if self._tags is None:
250 250 (self._tags, self._tagtypes) = self._findtags()
251 251
252 252 return self._tags
253 253
254 254 def _findtags(self):
255 255 '''Do the hard work of finding tags. Return a pair of dicts
256 256 (tags, tagtypes) where tags maps tag name to node, and tagtypes
257 257 maps tag name to a string like \'global\' or \'local\'.
258 258 Subclasses or extensions are free to add their own tags, but
259 259 should be aware that the returned dicts will be retained for the
260 260 duration of the localrepo object.'''
261 261
262 262 # XXX what tagtype should subclasses/extensions use? Currently
263 263 # mq and bookmarks add tags, but do not set the tagtype at all.
264 264 # Should each extension invent its own tag type? Should there
265 265 # be one tagtype for all such "virtual" tags? Or is the status
266 266 # quo fine?
267 267
268 268 alltags = {} # map tag name to (node, hist)
269 269 tagtypes = {}
270 270
271 271 tags_.findglobaltags(self.ui, self, alltags, tagtypes)
272 272 tags_.readlocaltags(self.ui, self, alltags, tagtypes)
273 273
274 274 # Build the return dicts. Have to re-encode tag names because
275 275 # the tags module always uses UTF-8 (in order not to lose info
276 276 # writing to the cache), but the rest of Mercurial wants them in
277 277 # local encoding.
278 278 tags = {}
279 279 for (name, (node, hist)) in alltags.iteritems():
280 280 if node != nullid:
281 281 tags[encoding.tolocal(name)] = node
282 282 tags['tip'] = self.changelog.tip()
283 283 tagtypes = dict([(encoding.tolocal(name), value)
284 284 for (name, value) in tagtypes.iteritems()])
285 285 return (tags, tagtypes)
286 286
287 287 def tagtype(self, tagname):
288 288 '''
289 289 return the type of the given tag. result can be:
290 290
291 291 'local' : a local tag
292 292 'global' : a global tag
293 293 None : tag does not exist
294 294 '''
295 295
296 296 self.tags()
297 297
298 298 return self._tagtypes.get(tagname)
299 299
300 300 def tagslist(self):
301 301 '''return a list of tags ordered by revision'''
302 302 l = []
303 303 for t, n in self.tags().iteritems():
304 304 try:
305 305 r = self.changelog.rev(n)
306 306 except:
307 307 r = -2 # sort to the beginning of the list if unknown
308 308 l.append((r, t, n))
309 309 return [(t, n) for r, t, n in sorted(l)]
310 310
311 311 def nodetags(self, node):
312 312 '''return the tags associated with a node'''
313 313 if not self.nodetagscache:
314 314 self.nodetagscache = {}
315 315 for t, n in self.tags().iteritems():
316 316 self.nodetagscache.setdefault(n, []).append(t)
317 317 return self.nodetagscache.get(node, [])
318 318
319 319 def _branchtags(self, partial, lrev):
320 320 # TODO: rename this function?
321 321 tiprev = len(self) - 1
322 322 if lrev != tiprev:
323 323 self._updatebranchcache(partial, lrev + 1, tiprev + 1)
324 324 self._writebranchcache(partial, self.changelog.tip(), tiprev)
325 325
326 326 return partial
327 327
328 328 def branchmap(self):
329 329 '''returns a dictionary {branch: [branchheads]}'''
330 330 tip = self.changelog.tip()
331 331 if self._branchcache is not None and self._branchcachetip == tip:
332 332 return self._branchcache
333 333
334 334 oldtip = self._branchcachetip
335 335 self._branchcachetip = tip
336 336 if oldtip is None or oldtip not in self.changelog.nodemap:
337 337 partial, last, lrev = self._readbranchcache()
338 338 else:
339 339 lrev = self.changelog.rev(oldtip)
340 340 partial = self._branchcache
341 341
342 342 self._branchtags(partial, lrev)
343 343 # this private cache holds all heads (not just tips)
344 344 self._branchcache = partial
345 345
346 346 return self._branchcache
347 347
348 348 def branchtags(self):
349 349 '''return a dict where branch names map to the tipmost head of
350 350 the branch, open heads come before closed'''
351 351 bt = {}
352 352 for bn, heads in self.branchmap().iteritems():
353 353 tip = heads[-1]
354 354 for h in reversed(heads):
355 355 if 'close' not in self.changelog.read(h)[5]:
356 356 tip = h
357 357 break
358 358 bt[bn] = tip
359 359 return bt
360 360
361 361
362 362 def _readbranchcache(self):
363 363 partial = {}
364 364 try:
365 365 f = self.opener("branchheads.cache")
366 366 lines = f.read().split('\n')
367 367 f.close()
368 368 except (IOError, OSError):
369 369 return {}, nullid, nullrev
370 370
371 371 try:
372 372 last, lrev = lines.pop(0).split(" ", 1)
373 373 last, lrev = bin(last), int(lrev)
374 374 if lrev >= len(self) or self[lrev].node() != last:
375 375 # invalidate the cache
376 376 raise ValueError('invalidating branch cache (tip differs)')
377 377 for l in lines:
378 378 if not l:
379 379 continue
380 380 node, label = l.split(" ", 1)
381 381 partial.setdefault(label.strip(), []).append(bin(node))
382 382 except KeyboardInterrupt:
383 383 raise
384 384 except Exception, inst:
385 385 if self.ui.debugflag:
386 386 self.ui.warn(str(inst), '\n')
387 387 partial, last, lrev = {}, nullid, nullrev
388 388 return partial, last, lrev
389 389
390 390 def _writebranchcache(self, branches, tip, tiprev):
391 391 try:
392 392 f = self.opener("branchheads.cache", "w", atomictemp=True)
393 393 f.write("%s %s\n" % (hex(tip), tiprev))
394 394 for label, nodes in branches.iteritems():
395 395 for node in nodes:
396 396 f.write("%s %s\n" % (hex(node), label))
397 397 f.rename()
398 398 except (IOError, OSError):
399 399 pass
400 400
401 401 def _updatebranchcache(self, partial, start, end):
402 402 # collect new branch entries
403 403 newbranches = {}
404 404 for r in xrange(start, end):
405 405 c = self[r]
406 406 newbranches.setdefault(c.branch(), []).append(c.node())
407 407 # if older branchheads are reachable from new ones, they aren't
408 408 # really branchheads. Note checking parents is insufficient:
409 409 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
410 410 for branch, newnodes in newbranches.iteritems():
411 411 bheads = partial.setdefault(branch, [])
412 412 bheads.extend(newnodes)
413 413 if len(bheads) < 2:
414 414 continue
415 415 newbheads = []
416 416 # starting from tip means fewer passes over reachable
417 417 while newnodes:
418 418 latest = newnodes.pop()
419 419 if latest not in bheads:
420 420 continue
421 421 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
422 422 reachable = self.changelog.reachable(latest, minbhrev)
423 423 bheads = [b for b in bheads if b not in reachable]
424 424 newbheads.insert(0, latest)
425 425 bheads.extend(newbheads)
426 426 partial[branch] = bheads
427 427
428 428 def lookup(self, key):
429 429 if isinstance(key, int):
430 430 return self.changelog.node(key)
431 431 elif key == '.':
432 432 return self.dirstate.parents()[0]
433 433 elif key == 'null':
434 434 return nullid
435 435 elif key == 'tip':
436 436 return self.changelog.tip()
437 437 n = self.changelog._match(key)
438 438 if n:
439 439 return n
440 440 if key in self.tags():
441 441 return self.tags()[key]
442 442 if key in self.branchtags():
443 443 return self.branchtags()[key]
444 444 n = self.changelog._partialmatch(key)
445 445 if n:
446 446 return n
447 447
448 448 # can't find key, check if it might have come from damaged dirstate
449 449 if key in self.dirstate.parents():
450 450 raise error.Abort(_("working directory has unknown parent '%s'!")
451 451 % short(key))
452 452 try:
453 453 if len(key) == 20:
454 454 key = hex(key)
455 455 except:
456 456 pass
457 457 raise error.RepoLookupError(_("unknown revision '%s'") % key)
458 458
459 459 def local(self):
460 460 return True
461 461
462 462 def join(self, f):
463 463 return os.path.join(self.path, f)
464 464
465 465 def wjoin(self, f):
466 466 return os.path.join(self.root, f)
467 467
468 468 def rjoin(self, f):
469 469 return os.path.join(self.root, util.pconvert(f))
470 470
471 471 def file(self, f):
472 472 if f[0] == '/':
473 473 f = f[1:]
474 474 return filelog.filelog(self.sopener, f)
475 475
476 476 def changectx(self, changeid):
477 477 return self[changeid]
478 478
479 479 def parents(self, changeid=None):
480 480 '''get list of changectxs for parents of changeid'''
481 481 return self[changeid].parents()
482 482
483 483 def filectx(self, path, changeid=None, fileid=None):
484 484 """changeid can be a changeset revision, node, or tag.
485 485 fileid can be a file revision or node."""
486 486 return context.filectx(self, path, changeid, fileid)
487 487
488 488 def getcwd(self):
489 489 return self.dirstate.getcwd()
490 490
491 491 def pathto(self, f, cwd=None):
492 492 return self.dirstate.pathto(f, cwd)
493 493
494 494 def wfile(self, f, mode='r'):
495 495 return self.wopener(f, mode)
496 496
497 497 def _link(self, f):
498 498 return os.path.islink(self.wjoin(f))
499 499
500 500 def _filter(self, filter, filename, data):
501 501 if filter not in self.filterpats:
502 502 l = []
503 503 for pat, cmd in self.ui.configitems(filter):
504 504 if cmd == '!':
505 505 continue
506 506 mf = match_.match(self.root, '', [pat])
507 507 fn = None
508 508 params = cmd
509 509 for name, filterfn in self._datafilters.iteritems():
510 510 if cmd.startswith(name):
511 511 fn = filterfn
512 512 params = cmd[len(name):].lstrip()
513 513 break
514 514 if not fn:
515 515 fn = lambda s, c, **kwargs: util.filter(s, c)
516 516 # Wrap old filters not supporting keyword arguments
517 517 if not inspect.getargspec(fn)[2]:
518 518 oldfn = fn
519 519 fn = lambda s, c, **kwargs: oldfn(s, c)
520 520 l.append((mf, fn, params))
521 521 self.filterpats[filter] = l
522 522
523 523 for mf, fn, cmd in self.filterpats[filter]:
524 524 if mf(filename):
525 525 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
526 526 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
527 527 break
528 528
529 529 return data
530 530
531 531 def adddatafilter(self, name, filter):
532 532 self._datafilters[name] = filter
533 533
534 534 def wread(self, filename):
535 535 if self._link(filename):
536 536 data = os.readlink(self.wjoin(filename))
537 537 else:
538 538 data = self.wopener(filename, 'r').read()
539 539 return self._filter("encode", filename, data)
540 540
541 541 def wwrite(self, filename, data, flags):
542 542 data = self._filter("decode", filename, data)
543 543 try:
544 544 os.unlink(self.wjoin(filename))
545 545 except OSError:
546 546 pass
547 547 if 'l' in flags:
548 548 self.wopener.symlink(data, filename)
549 549 else:
550 550 self.wopener(filename, 'w').write(data)
551 551 if 'x' in flags:
552 552 util.set_flags(self.wjoin(filename), False, True)
553 553
554 554 def wwritedata(self, filename, data):
555 555 return self._filter("decode", filename, data)
556 556
557 557 def transaction(self):
558 558 tr = self._transref and self._transref() or None
559 559 if tr and tr.running():
560 560 return tr.nest()
561 561
562 562 # abort here if the journal already exists
563 563 if os.path.exists(self.sjoin("journal")):
564 564 raise error.RepoError(
565 565 _("abandoned transaction found - run hg recover"))
566 566
567 567 # save dirstate for rollback
568 568 try:
569 569 ds = self.opener("dirstate").read()
570 570 except IOError:
571 571 ds = ""
572 572 self.opener("journal.dirstate", "w").write(ds)
573 573 self.opener("journal.branch", "w").write(self.dirstate.branch())
574 574
575 575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
576 576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
577 577 (self.join("journal.branch"), self.join("undo.branch"))]
578 578 tr = transaction.transaction(self.ui.warn, self.sopener,
579 579 self.sjoin("journal"),
580 580 aftertrans(renames),
581 581 self.store.createmode)
582 582 self._transref = weakref.ref(tr)
583 583 return tr
584 584
585 585 def recover(self):
586 586 lock = self.lock()
587 587 try:
588 588 if os.path.exists(self.sjoin("journal")):
589 589 self.ui.status(_("rolling back interrupted transaction\n"))
590 590 transaction.rollback(self.sopener, self.sjoin("journal"),
591 591 self.ui.warn)
592 592 self.invalidate()
593 593 return True
594 594 else:
595 595 self.ui.warn(_("no interrupted transaction available\n"))
596 596 return False
597 597 finally:
598 598 lock.release()
599 599
600 600 def rollback(self):
601 601 wlock = lock = None
602 602 try:
603 603 wlock = self.wlock()
604 604 lock = self.lock()
605 605 if os.path.exists(self.sjoin("undo")):
606 606 self.ui.status(_("rolling back last transaction\n"))
607 607 transaction.rollback(self.sopener, self.sjoin("undo"),
608 608 self.ui.warn)
609 609 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
610 610 try:
611 611 branch = self.opener("undo.branch").read()
612 612 self.dirstate.setbranch(branch)
613 613 except IOError:
614 614 self.ui.warn(_("Named branch could not be reset, "
615 615 "current branch still is: %s\n")
616 616 % encoding.tolocal(self.dirstate.branch()))
617 617 self.invalidate()
618 618 self.dirstate.invalidate()
619 619 self.destroyed()
620 620 else:
621 621 self.ui.warn(_("no rollback information available\n"))
622 622 finally:
623 623 release(lock, wlock)
624 624
625 625 def invalidate(self):
626 626 for a in "changelog manifest".split():
627 627 if a in self.__dict__:
628 628 delattr(self, a)
629 629 self._tags = None
630 630 self._tagtypes = None
631 631 self.nodetagscache = None
632 632 self._branchcache = None # in UTF-8
633 633 self._branchcachetip = None
634 634
635 635 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
636 636 try:
637 637 l = lock.lock(lockname, 0, releasefn, desc=desc)
638 638 except error.LockHeld, inst:
639 639 if not wait:
640 640 raise
641 641 self.ui.warn(_("waiting for lock on %s held by %r\n") %
642 642 (desc, inst.locker))
643 643 # default to 600 seconds timeout
644 644 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
645 645 releasefn, desc=desc)
646 646 if acquirefn:
647 647 acquirefn()
648 648 return l
649 649
650 650 def lock(self, wait=True):
651 651 '''Lock the repository store (.hg/store) and return a weak reference
652 652 to the lock. Use this before modifying the store (e.g. committing or
653 653 stripping). If you are opening a transaction, get a lock as well.)'''
654 654 l = self._lockref and self._lockref()
655 655 if l is not None and l.held:
656 656 l.lock()
657 657 return l
658 658
659 659 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
660 660 _('repository %s') % self.origroot)
661 661 self._lockref = weakref.ref(l)
662 662 return l
663 663
664 664 def wlock(self, wait=True):
665 665 '''Lock the non-store parts of the repository (everything under
666 666 .hg except .hg/store) and return a weak reference to the lock.
667 667 Use this before modifying files in .hg.'''
668 668 l = self._wlockref and self._wlockref()
669 669 if l is not None and l.held:
670 670 l.lock()
671 671 return l
672 672
673 673 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
674 674 self.dirstate.invalidate, _('working directory of %s') %
675 675 self.origroot)
676 676 self._wlockref = weakref.ref(l)
677 677 return l
678 678
679 679 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
680 680 """
681 681 commit an individual file as part of a larger transaction
682 682 """
683 683
684 684 fname = fctx.path()
685 685 text = fctx.data()
686 686 flog = self.file(fname)
687 687 fparent1 = manifest1.get(fname, nullid)
688 688 fparent2 = fparent2o = manifest2.get(fname, nullid)
689 689
690 690 meta = {}
691 691 copy = fctx.renamed()
692 692 if copy and copy[0] != fname:
693 693 # Mark the new revision of this file as a copy of another
694 694 # file. This copy data will effectively act as a parent
695 695 # of this new revision. If this is a merge, the first
696 696 # parent will be the nullid (meaning "look up the copy data")
697 697 # and the second one will be the other parent. For example:
698 698 #
699 699 # 0 --- 1 --- 3 rev1 changes file foo
700 700 # \ / rev2 renames foo to bar and changes it
701 701 # \- 2 -/ rev3 should have bar with all changes and
702 702 # should record that bar descends from
703 703 # bar in rev2 and foo in rev1
704 704 #
705 705 # this allows this merge to succeed:
706 706 #
707 707 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
708 708 # \ / merging rev3 and rev4 should use bar@rev2
709 709 # \- 2 --- 4 as the merge base
710 710 #
711 711
712 712 cfname = copy[0]
713 713 crev = manifest1.get(cfname)
714 714 newfparent = fparent2
715 715
716 716 if manifest2: # branch merge
717 717 if fparent2 == nullid or crev is None: # copied on remote side
718 718 if cfname in manifest2:
719 719 crev = manifest2[cfname]
720 720 newfparent = fparent1
721 721
722 722 # find source in nearest ancestor if we've lost track
723 723 if not crev:
724 724 self.ui.debug(" %s: searching for copy revision for %s\n" %
725 725 (fname, cfname))
726 726 for ancestor in self['.'].ancestors():
727 727 if cfname in ancestor:
728 728 crev = ancestor[cfname].filenode()
729 729 break
730 730
731 731 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
732 732 meta["copy"] = cfname
733 733 meta["copyrev"] = hex(crev)
734 734 fparent1, fparent2 = nullid, newfparent
735 735 elif fparent2 != nullid:
736 736 # is one parent an ancestor of the other?
737 737 fparentancestor = flog.ancestor(fparent1, fparent2)
738 738 if fparentancestor == fparent1:
739 739 fparent1, fparent2 = fparent2, nullid
740 740 elif fparentancestor == fparent2:
741 741 fparent2 = nullid
742 742
743 743 # is the file changed?
744 744 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
745 745 changelist.append(fname)
746 746 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
747 747
748 748 # are just the flags changed during merge?
749 749 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
750 750 changelist.append(fname)
751 751
752 752 return fparent1
753 753
754 754 def commit(self, text="", user=None, date=None, match=None, force=False,
755 755 editor=False, extra={}):
756 756 """Add a new revision to current repository.
757 757
758 758 Revision information is gathered from the working directory,
759 759 match can be used to filter the committed files. If editor is
760 760 supplied, it is called to get a commit message.
761 761 """
762 762
763 763 def fail(f, msg):
764 764 raise util.Abort('%s: %s' % (f, msg))
765 765
766 766 if not match:
767 767 match = match_.always(self.root, '')
768 768
769 769 if not force:
770 770 vdirs = []
771 771 match.dir = vdirs.append
772 772 match.bad = fail
773 773
774 774 wlock = self.wlock()
775 775 try:
776 776 p1, p2 = self.dirstate.parents()
777 777 wctx = self[None]
778 778
779 779 if (not force and p2 != nullid and match and
780 780 (match.files() or match.anypats())):
781 781 raise util.Abort(_('cannot partially commit a merge '
782 782 '(do not specify files or patterns)'))
783 783
784 784 changes = self.status(match=match, clean=force)
785 785 if force:
786 786 changes[0].extend(changes[6]) # mq may commit unchanged files
787 787
788 788 # check subrepos
789 789 subs = []
790 790 for s in wctx.substate:
791 791 if match(s) and wctx.sub(s).dirty():
792 792 subs.append(s)
793 793 if subs and '.hgsubstate' not in changes[0]:
794 794 changes[0].insert(0, '.hgsubstate')
795 795
796 796 # make sure all explicit patterns are matched
797 797 if not force and match.files():
798 798 matched = set(changes[0] + changes[1] + changes[2])
799 799
800 800 for f in match.files():
801 801 if f == '.' or f in matched or f in wctx.substate:
802 802 continue
803 803 if f in changes[3]: # missing
804 804 fail(f, _('file not found!'))
805 805 if f in vdirs: # visited directory
806 806 d = f + '/'
807 807 for mf in matched:
808 808 if mf.startswith(d):
809 809 break
810 810 else:
811 811 fail(f, _("no match under directory!"))
812 812 elif f not in self.dirstate:
813 813 fail(f, _("file not tracked!"))
814 814
815 815 if (not force and not extra.get("close") and p2 == nullid
816 816 and not (changes[0] or changes[1] or changes[2])
817 817 and self[None].branch() == self['.'].branch()):
818 818 return None
819 819
820 820 ms = merge_.mergestate(self)
821 821 for f in changes[0]:
822 822 if f in ms and ms[f] == 'u':
823 823 raise util.Abort(_("unresolved merge conflicts "
824 824 "(see hg resolve)"))
825 825
826 826 cctx = context.workingctx(self, (p1, p2), text, user, date,
827 827 extra, changes)
828 828 if editor:
829 829 cctx._text = editor(self, cctx, subs)
830 830 edited = (text != cctx._text)
831 831
832 832 # commit subs
833 833 if subs:
834 834 state = wctx.substate.copy()
835 835 for s in subs:
836 836 self.ui.status(_('committing subrepository %s\n') % s)
837 837 sr = wctx.sub(s).commit(cctx._text, user, date)
838 838 state[s] = (state[s][0], sr)
839 839 subrepo.writestate(self, state)
840 840
841 841 # Save commit message in case this transaction gets rolled back
842 842 # (e.g. by a pretxncommit hook). Leave the content alone on
843 843 # the assumption that the user will use the same editor again.
844 844 msgfile = self.opener('last-message.txt', 'wb')
845 845 msgfile.write(cctx._text)
846 846 msgfile.close()
847 847
848 848 try:
849 849 ret = self.commitctx(cctx, True)
850 850 except:
851 851 if edited:
852 852 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
853 853 self.ui.write(
854 854 _('note: commit message saved in %s\n') % msgfn)
855 855 raise
856 856
857 857 # update dirstate and mergestate
858 858 for f in changes[0] + changes[1]:
859 859 self.dirstate.normal(f)
860 860 for f in changes[2]:
861 861 self.dirstate.forget(f)
862 862 self.dirstate.setparents(ret)
863 863 ms.reset()
864 864
865 865 return ret
866 866
867 867 finally:
868 868 wlock.release()
869 869
870 870 def commitctx(self, ctx, error=False):
871 871 """Add a new revision to current repository.
872 872
873 873 Revision information is passed via the context argument.
874 874 """
875 875
876 876 tr = lock = None
877 877 removed = ctx.removed()
878 878 p1, p2 = ctx.p1(), ctx.p2()
879 879 m1 = p1.manifest().copy()
880 880 m2 = p2.manifest()
881 881 user = ctx.user()
882 882
883 883 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
884 884 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
885 885
886 886 lock = self.lock()
887 887 try:
888 888 tr = self.transaction()
889 889 trp = weakref.proxy(tr)
890 890
891 891 # check in files
892 892 new = {}
893 893 changed = []
894 894 linkrev = len(self)
895 895 for f in sorted(ctx.modified() + ctx.added()):
896 896 self.ui.note(f + "\n")
897 897 try:
898 898 fctx = ctx[f]
899 899 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
900 900 changed)
901 901 m1.set(f, fctx.flags())
902 902 except OSError, inst:
903 903 self.ui.warn(_("trouble committing %s!\n") % f)
904 904 raise
905 905 except IOError, inst:
906 906 errcode = getattr(inst, 'errno', errno.ENOENT)
907 907 if error or errcode and errcode != errno.ENOENT:
908 908 self.ui.warn(_("trouble committing %s!\n") % f)
909 909 raise
910 910 else:
911 911 removed.append(f)
912 912
913 913 # update manifest
914 914 m1.update(new)
915 915 removed = [f for f in sorted(removed) if f in m1 or f in m2]
916 916 drop = [f for f in removed if f in m1]
917 917 for f in drop:
918 918 del m1[f]
919 919 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
920 920 p2.manifestnode(), (new, drop))
921 921
922 922 # update changelog
923 923 self.changelog.delayupdate()
924 924 n = self.changelog.add(mn, changed + removed, ctx.description(),
925 925 trp, p1.node(), p2.node(),
926 926 user, ctx.date(), ctx.extra().copy())
927 927 p = lambda: self.changelog.writepending() and self.root or ""
928 928 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
929 929 parent2=xp2, pending=p)
930 930 self.changelog.finalize(trp)
931 931 tr.close()
932 932
933 933 if self._branchcache:
934 934 self.branchtags()
935 935
936 936 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
937 937 return n
938 938 finally:
939 939 del tr
940 940 lock.release()
941 941
942 942 def destroyed(self):
943 943 '''Inform the repository that nodes have been destroyed.
944 944 Intended for use by strip and rollback, so there's a common
945 945 place for anything that has to be done after destroying history.'''
946 946 # XXX it might be nice if we could take the list of destroyed
947 947 # nodes, but I don't see an easy way for rollback() to do that
948 948
949 949 # Ensure the persistent tag cache is updated. Doing it now
950 950 # means that the tag cache only has to worry about destroyed
951 951 # heads immediately after a strip/rollback. That in turn
952 952 # guarantees that "cachetip == currenttip" (comparing both rev
953 953 # and node) always means no nodes have been added or destroyed.
954 954
955 955 # XXX this is suboptimal when qrefresh'ing: we strip the current
956 956 # head, refresh the tag cache, then immediately add a new head.
957 957 # But I think doing it this way is necessary for the "instant
958 958 # tag cache retrieval" case to work.
959 959 tags_.findglobaltags(self.ui, self, {}, {})
960 960
961 961 def walk(self, match, node=None):
962 962 '''
963 963 walk recursively through the directory tree or a given
964 964 changeset, finding all files matched by the match
965 965 function
966 966 '''
967 967 return self[node].walk(match)
968 968
969 969 def status(self, node1='.', node2=None, match=None,
970 970 ignored=False, clean=False, unknown=False):
971 971 """return status of files between two nodes or node and working directory
972 972
973 973 If node1 is None, use the first dirstate parent instead.
974 974 If node2 is None, compare node1 with working directory.
975 975 """
976 976
977 977 def mfmatches(ctx):
978 978 mf = ctx.manifest().copy()
979 979 for fn in mf.keys():
980 980 if not match(fn):
981 981 del mf[fn]
982 982 return mf
983 983
984 984 if isinstance(node1, context.changectx):
985 985 ctx1 = node1
986 986 else:
987 987 ctx1 = self[node1]
988 988 if isinstance(node2, context.changectx):
989 989 ctx2 = node2
990 990 else:
991 991 ctx2 = self[node2]
992 992
993 993 working = ctx2.rev() is None
994 994 parentworking = working and ctx1 == self['.']
995 995 match = match or match_.always(self.root, self.getcwd())
996 996 listignored, listclean, listunknown = ignored, clean, unknown
997 997
998 998 # load earliest manifest first for caching reasons
999 999 if not working and ctx2.rev() < ctx1.rev():
1000 1000 ctx2.manifest()
1001 1001
1002 1002 if not parentworking:
1003 1003 def bad(f, msg):
1004 1004 if f not in ctx1:
1005 1005 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1006 1006 match.bad = bad
1007 1007
1008 1008 if working: # we need to scan the working dir
1009 1009 subrepos = ctx1.substate.keys()
1010 1010 s = self.dirstate.status(match, subrepos, listignored,
1011 1011 listclean, listunknown)
1012 1012 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1013 1013
1014 1014 # check for any possibly clean files
1015 1015 if parentworking and cmp:
1016 1016 fixup = []
1017 1017 # do a full compare of any files that might have changed
1018 1018 for f in sorted(cmp):
1019 1019 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1020 1020 or ctx1[f].cmp(ctx2[f].data())):
1021 1021 modified.append(f)
1022 1022 else:
1023 1023 fixup.append(f)
1024 1024
1025 1025 if listclean:
1026 1026 clean += fixup
1027 1027
1028 1028 # update dirstate for files that are actually clean
1029 1029 if fixup:
1030 1030 try:
1031 1031 # updating the dirstate is optional
1032 1032 # so we don't wait on the lock
1033 1033 wlock = self.wlock(False)
1034 1034 try:
1035 1035 for f in fixup:
1036 1036 self.dirstate.normal(f)
1037 1037 finally:
1038 1038 wlock.release()
1039 1039 except error.LockError:
1040 1040 pass
1041 1041
1042 1042 if not parentworking:
1043 1043 mf1 = mfmatches(ctx1)
1044 1044 if working:
1045 1045 # we are comparing working dir against non-parent
1046 1046 # generate a pseudo-manifest for the working dir
1047 1047 mf2 = mfmatches(self['.'])
1048 1048 for f in cmp + modified + added:
1049 1049 mf2[f] = None
1050 1050 mf2.set(f, ctx2.flags(f))
1051 1051 for f in removed:
1052 1052 if f in mf2:
1053 1053 del mf2[f]
1054 1054 else:
1055 1055 # we are comparing two revisions
1056 1056 deleted, unknown, ignored = [], [], []
1057 1057 mf2 = mfmatches(ctx2)
1058 1058
1059 1059 modified, added, clean = [], [], []
1060 1060 for fn in mf2:
1061 1061 if fn in mf1:
1062 1062 if (mf1.flags(fn) != mf2.flags(fn) or
1063 1063 (mf1[fn] != mf2[fn] and
1064 1064 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1065 1065 modified.append(fn)
1066 1066 elif listclean:
1067 1067 clean.append(fn)
1068 1068 del mf1[fn]
1069 1069 else:
1070 1070 added.append(fn)
1071 1071 removed = mf1.keys()
1072 1072
1073 1073 r = modified, added, removed, deleted, unknown, ignored, clean
1074 1074 [l.sort() for l in r]
1075 1075 return r
1076 1076
1077 1077 def add(self, list):
1078 1078 wlock = self.wlock()
1079 1079 try:
1080 1080 rejected = []
1081 1081 for f in list:
1082 1082 p = self.wjoin(f)
1083 1083 try:
1084 1084 st = os.lstat(p)
1085 1085 except:
1086 1086 self.ui.warn(_("%s does not exist!\n") % f)
1087 1087 rejected.append(f)
1088 1088 continue
1089 1089 if st.st_size > 10000000:
1090 1090 self.ui.warn(_("%s: files over 10MB may cause memory and"
1091 1091 " performance problems\n"
1092 1092 "(use 'hg revert %s' to unadd the file)\n")
1093 1093 % (f, f))
1094 1094 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1095 1095 self.ui.warn(_("%s not added: only files and symlinks "
1096 1096 "supported currently\n") % f)
1097 1097 rejected.append(p)
1098 1098 elif self.dirstate[f] in 'amn':
1099 1099 self.ui.warn(_("%s already tracked!\n") % f)
1100 1100 elif self.dirstate[f] == 'r':
1101 1101 self.dirstate.normallookup(f)
1102 1102 else:
1103 1103 self.dirstate.add(f)
1104 1104 return rejected
1105 1105 finally:
1106 1106 wlock.release()
1107 1107
1108 1108 def forget(self, list):
1109 1109 wlock = self.wlock()
1110 1110 try:
1111 1111 for f in list:
1112 1112 if self.dirstate[f] != 'a':
1113 1113 self.ui.warn(_("%s not added!\n") % f)
1114 1114 else:
1115 1115 self.dirstate.forget(f)
1116 1116 finally:
1117 1117 wlock.release()
1118 1118
1119 1119 def remove(self, list, unlink=False):
1120 1120 if unlink:
1121 1121 for f in list:
1122 1122 try:
1123 1123 util.unlink(self.wjoin(f))
1124 1124 except OSError, inst:
1125 1125 if inst.errno != errno.ENOENT:
1126 1126 raise
1127 1127 wlock = self.wlock()
1128 1128 try:
1129 1129 for f in list:
1130 1130 if unlink and os.path.exists(self.wjoin(f)):
1131 1131 self.ui.warn(_("%s still exists!\n") % f)
1132 1132 elif self.dirstate[f] == 'a':
1133 1133 self.dirstate.forget(f)
1134 1134 elif f not in self.dirstate:
1135 1135 self.ui.warn(_("%s not tracked!\n") % f)
1136 1136 else:
1137 1137 self.dirstate.remove(f)
1138 1138 finally:
1139 1139 wlock.release()
1140 1140
1141 1141 def undelete(self, list):
1142 1142 manifests = [self.manifest.read(self.changelog.read(p)[0])
1143 1143 for p in self.dirstate.parents() if p != nullid]
1144 1144 wlock = self.wlock()
1145 1145 try:
1146 1146 for f in list:
1147 1147 if self.dirstate[f] != 'r':
1148 1148 self.ui.warn(_("%s not removed!\n") % f)
1149 1149 else:
1150 1150 m = f in manifests[0] and manifests[0] or manifests[1]
1151 1151 t = self.file(f).read(m[f])
1152 1152 self.wwrite(f, t, m.flags(f))
1153 1153 self.dirstate.normal(f)
1154 1154 finally:
1155 1155 wlock.release()
1156 1156
1157 1157 def copy(self, source, dest):
1158 1158 p = self.wjoin(dest)
1159 1159 if not (os.path.exists(p) or os.path.islink(p)):
1160 1160 self.ui.warn(_("%s does not exist!\n") % dest)
1161 1161 elif not (os.path.isfile(p) or os.path.islink(p)):
1162 1162 self.ui.warn(_("copy failed: %s is not a file or a "
1163 1163 "symbolic link\n") % dest)
1164 1164 else:
1165 1165 wlock = self.wlock()
1166 1166 try:
1167 1167 if self.dirstate[dest] in '?r':
1168 1168 self.dirstate.add(dest)
1169 1169 self.dirstate.copy(source, dest)
1170 1170 finally:
1171 1171 wlock.release()
1172 1172
1173 1173 def heads(self, start=None):
1174 1174 heads = self.changelog.heads(start)
1175 1175 # sort the output in rev descending order
1176 1176 heads = [(-self.changelog.rev(h), h) for h in heads]
1177 1177 return [n for (r, n) in sorted(heads)]
1178 1178
1179 1179 def branchheads(self, branch=None, start=None, closed=False):
1180 1180 '''return a (possibly filtered) list of heads for the given branch
1181 1181
1182 1182 Heads are returned in topological order, from newest to oldest.
1183 1183 If branch is None, use the dirstate branch.
1184 1184 If start is not None, return only heads reachable from start.
1185 1185 If closed is True, return heads that are marked as closed as well.
1186 1186 '''
1187 1187 if branch is None:
1188 1188 branch = self[None].branch()
1189 1189 branches = self.branchmap()
1190 1190 if branch not in branches:
1191 1191 return []
1192 1192 # the cache returns heads ordered lowest to highest
1193 1193 bheads = list(reversed(branches[branch]))
1194 1194 if start is not None:
1195 1195 # filter out the heads that cannot be reached from startrev
1196 1196 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1197 1197 bheads = [h for h in bheads if h in fbheads]
1198 1198 if not closed:
1199 1199 bheads = [h for h in bheads if
1200 1200 ('close' not in self.changelog.read(h)[5])]
1201 1201 return bheads
1202 1202
1203 1203 def branches(self, nodes):
1204 1204 if not nodes:
1205 1205 nodes = [self.changelog.tip()]
1206 1206 b = []
1207 1207 for n in nodes:
1208 1208 t = n
1209 1209 while 1:
1210 1210 p = self.changelog.parents(n)
1211 1211 if p[1] != nullid or p[0] == nullid:
1212 1212 b.append((t, n, p[0], p[1]))
1213 1213 break
1214 1214 n = p[0]
1215 1215 return b
1216 1216
1217 1217 def between(self, pairs):
1218 1218 r = []
1219 1219
1220 1220 for top, bottom in pairs:
1221 1221 n, l, i = top, [], 0
1222 1222 f = 1
1223 1223
1224 1224 while n != bottom and n != nullid:
1225 1225 p = self.changelog.parents(n)[0]
1226 1226 if i == f:
1227 1227 l.append(n)
1228 1228 f = f * 2
1229 1229 n = p
1230 1230 i += 1
1231 1231
1232 1232 r.append(l)
1233 1233
1234 1234 return r
1235 1235
1236 1236 def findincoming(self, remote, base=None, heads=None, force=False):
1237 1237 """Return list of roots of the subsets of missing nodes from remote
1238 1238
1239 1239 If base dict is specified, assume that these nodes and their parents
1240 1240 exist on the remote side and that no child of a node of base exists
1241 1241 in both remote and self.
1242 1242 Furthermore base will be updated to include the nodes that exists
1243 1243 in self and remote but no children exists in self and remote.
1244 1244 If a list of heads is specified, return only nodes which are heads
1245 1245 or ancestors of these heads.
1246 1246
1247 1247 All the ancestors of base are in self and in remote.
1248 1248 All the descendants of the list returned are missing in self.
1249 1249 (and so we know that the rest of the nodes are missing in remote, see
1250 1250 outgoing)
1251 1251 """
1252 1252 return self.findcommonincoming(remote, base, heads, force)[1]
1253 1253
1254 1254 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1255 1255 """Return a tuple (common, missing roots, heads) used to identify
1256 1256 missing nodes from remote.
1257 1257
1258 1258 If base dict is specified, assume that these nodes and their parents
1259 1259 exist on the remote side and that no child of a node of base exists
1260 1260 in both remote and self.
1261 1261 Furthermore base will be updated to include the nodes that exists
1262 1262 in self and remote but no children exists in self and remote.
1263 1263 If a list of heads is specified, return only nodes which are heads
1264 1264 or ancestors of these heads.
1265 1265
1266 1266 All the ancestors of base are in self and in remote.
1267 1267 """
1268 1268 m = self.changelog.nodemap
1269 1269 search = []
1270 1270 fetch = set()
1271 1271 seen = set()
1272 1272 seenbranch = set()
1273 1273 if base is None:
1274 1274 base = {}
1275 1275
1276 1276 if not heads:
1277 1277 heads = remote.heads()
1278 1278
1279 1279 if self.changelog.tip() == nullid:
1280 1280 base[nullid] = 1
1281 1281 if heads != [nullid]:
1282 1282 return [nullid], [nullid], list(heads)
1283 1283 return [nullid], [], []
1284 1284
1285 1285 # assume we're closer to the tip than the root
1286 1286 # and start by examining the heads
1287 1287 self.ui.status(_("searching for changes\n"))
1288 1288
1289 1289 unknown = []
1290 1290 for h in heads:
1291 1291 if h not in m:
1292 1292 unknown.append(h)
1293 1293 else:
1294 1294 base[h] = 1
1295 1295
1296 1296 heads = unknown
1297 1297 if not unknown:
1298 1298 return base.keys(), [], []
1299 1299
1300 1300 req = set(unknown)
1301 1301 reqcnt = 0
1302 1302
1303 1303 # search through remote branches
1304 1304 # a 'branch' here is a linear segment of history, with four parts:
1305 1305 # head, root, first parent, second parent
1306 1306 # (a branch always has two parents (or none) by definition)
1307 1307 unknown = remote.branches(unknown)
1308 1308 while unknown:
1309 1309 r = []
1310 1310 while unknown:
1311 1311 n = unknown.pop(0)
1312 1312 if n[0] in seen:
1313 1313 continue
1314 1314
1315 1315 self.ui.debug("examining %s:%s\n"
1316 1316 % (short(n[0]), short(n[1])))
1317 1317 if n[0] == nullid: # found the end of the branch
1318 1318 pass
1319 1319 elif n in seenbranch:
1320 1320 self.ui.debug("branch already found\n")
1321 1321 continue
1322 1322 elif n[1] and n[1] in m: # do we know the base?
1323 1323 self.ui.debug("found incomplete branch %s:%s\n"
1324 1324 % (short(n[0]), short(n[1])))
1325 1325 search.append(n[0:2]) # schedule branch range for scanning
1326 1326 seenbranch.add(n)
1327 1327 else:
1328 1328 if n[1] not in seen and n[1] not in fetch:
1329 1329 if n[2] in m and n[3] in m:
1330 1330 self.ui.debug("found new changeset %s\n" %
1331 1331 short(n[1]))
1332 1332 fetch.add(n[1]) # earliest unknown
1333 1333 for p in n[2:4]:
1334 1334 if p in m:
1335 1335 base[p] = 1 # latest known
1336 1336
1337 1337 for p in n[2:4]:
1338 1338 if p not in req and p not in m:
1339 1339 r.append(p)
1340 1340 req.add(p)
1341 1341 seen.add(n[0])
1342 1342
1343 1343 if r:
1344 1344 reqcnt += 1
1345 1345 self.ui.debug("request %d: %s\n" %
1346 1346 (reqcnt, " ".join(map(short, r))))
1347 1347 for p in xrange(0, len(r), 10):
1348 1348 for b in remote.branches(r[p:p + 10]):
1349 1349 self.ui.debug("received %s:%s\n" %
1350 1350 (short(b[0]), short(b[1])))
1351 1351 unknown.append(b)
1352 1352
1353 1353 # do binary search on the branches we found
1354 1354 while search:
1355 1355 newsearch = []
1356 1356 reqcnt += 1
1357 1357 for n, l in zip(search, remote.between(search)):
1358 1358 l.append(n[1])
1359 1359 p = n[0]
1360 1360 f = 1
1361 1361 for i in l:
1362 1362 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1363 1363 if i in m:
1364 1364 if f <= 2:
1365 1365 self.ui.debug("found new branch changeset %s\n" %
1366 1366 short(p))
1367 1367 fetch.add(p)
1368 1368 base[i] = 1
1369 1369 else:
1370 1370 self.ui.debug("narrowed branch search to %s:%s\n"
1371 1371 % (short(p), short(i)))
1372 1372 newsearch.append((p, i))
1373 1373 break
1374 1374 p, f = i, f * 2
1375 1375 search = newsearch
1376 1376
1377 1377 # sanity check our fetch list
1378 1378 for f in fetch:
1379 1379 if f in m:
1380 1380 raise error.RepoError(_("already have changeset ")
1381 1381 + short(f[:4]))
1382 1382
1383 1383 if base.keys() == [nullid]:
1384 1384 if force:
1385 1385 self.ui.warn(_("warning: repository is unrelated\n"))
1386 1386 else:
1387 1387 raise util.Abort(_("repository is unrelated"))
1388 1388
1389 1389 self.ui.debug("found new changesets starting at " +
1390 1390 " ".join([short(f) for f in fetch]) + "\n")
1391 1391
1392 1392 self.ui.debug("%d total queries\n" % reqcnt)
1393 1393
1394 1394 return base.keys(), list(fetch), heads
1395 1395
1396 1396 def findoutgoing(self, remote, base=None, heads=None, force=False):
1397 1397 """Return list of nodes that are roots of subsets not in remote
1398 1398
1399 1399 If base dict is specified, assume that these nodes and their parents
1400 1400 exist on the remote side.
1401 1401 If a list of heads is specified, return only nodes which are heads
1402 1402 or ancestors of these heads, and return a second element which
1403 1403 contains all remote heads which get new children.
1404 1404 """
1405 1405 if base is None:
1406 1406 base = {}
1407 1407 self.findincoming(remote, base, heads, force=force)
1408 1408
1409 1409 self.ui.debug("common changesets up to "
1410 1410 + " ".join(map(short, base.keys())) + "\n")
1411 1411
1412 1412 remain = set(self.changelog.nodemap)
1413 1413
1414 1414 # prune everything remote has from the tree
1415 1415 remain.remove(nullid)
1416 1416 remove = base.keys()
1417 1417 while remove:
1418 1418 n = remove.pop(0)
1419 1419 if n in remain:
1420 1420 remain.remove(n)
1421 1421 for p in self.changelog.parents(n):
1422 1422 remove.append(p)
1423 1423
1424 1424 # find every node whose parents have been pruned
1425 1425 subset = []
1426 1426 # find every remote head that will get new children
1427 1427 updated_heads = set()
1428 1428 for n in remain:
1429 1429 p1, p2 = self.changelog.parents(n)
1430 1430 if p1 not in remain and p2 not in remain:
1431 1431 subset.append(n)
1432 1432 if heads:
1433 1433 if p1 in heads:
1434 1434 updated_heads.add(p1)
1435 1435 if p2 in heads:
1436 1436 updated_heads.add(p2)
1437 1437
1438 1438 # this is the set of all roots we have to push
1439 1439 if heads:
1440 1440 return subset, list(updated_heads)
1441 1441 else:
1442 1442 return subset
1443 1443
1444 1444 def pull(self, remote, heads=None, force=False):
1445 1445 lock = self.lock()
1446 1446 try:
1447 1447 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1448 1448 force=force)
1449 1449 if fetch == [nullid]:
1450 1450 self.ui.status(_("requesting all changes\n"))
1451 1451
1452 1452 if not fetch:
1453 1453 self.ui.status(_("no changes found\n"))
1454 1454 return 0
1455 1455
1456 1456 if heads is None and remote.capable('changegroupsubset'):
1457 1457 heads = rheads
1458 1458
1459 1459 if heads is None:
1460 1460 cg = remote.changegroup(fetch, 'pull')
1461 1461 else:
1462 1462 if not remote.capable('changegroupsubset'):
1463 1463 raise util.Abort(_("Partial pull cannot be done because "
1464 1464 "other repository doesn't support "
1465 1465 "changegroupsubset."))
1466 1466 cg = remote.changegroupsubset(fetch, heads, 'pull')
1467 1467 return self.addchangegroup(cg, 'pull', remote.url())
1468 1468 finally:
1469 1469 lock.release()
1470 1470
1471 1471 def push(self, remote, force=False, revs=None):
1472 1472 # there are two ways to push to remote repo:
1473 1473 #
1474 1474 # addchangegroup assumes local user can lock remote
1475 1475 # repo (local filesystem, old ssh servers).
1476 1476 #
1477 1477 # unbundle assumes local user cannot lock remote repo (new ssh
1478 1478 # servers, http servers).
1479 1479
1480 1480 if remote.capable('unbundle'):
1481 1481 return self.push_unbundle(remote, force, revs)
1482 1482 return self.push_addchangegroup(remote, force, revs)
1483 1483
1484 1484 def prepush(self, remote, force, revs):
1485 1485 '''Analyze the local and remote repositories and determine which
1486 1486 changesets need to be pushed to the remote. Return a tuple
1487 1487 (changegroup, remoteheads). changegroup is a readable file-like
1488 1488 object whose read() returns successive changegroup chunks ready to
1489 1489 be sent over the wire. remoteheads is the list of remote heads.
1490 1490 '''
1491 1491 common = {}
1492 1492 remote_heads = remote.heads()
1493 1493 inc = self.findincoming(remote, common, remote_heads, force=force)
1494 1494
1495 1495 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1496 1496 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1497 1497
1498 1498 def checkbranch(lheads, rheads, updatelb, branchname=None):
1499 1499 '''
1500 1500 check whether there are more local heads than remote heads on
1501 1501 a specific branch.
1502 1502
1503 1503 lheads: local branch heads
1504 1504 rheads: remote branch heads
1505 1505 updatelb: outgoing local branch bases
1506 1506 '''
1507 1507
1508 1508 warn = 0
1509 1509
1510 1510 if not revs and len(lheads) > len(rheads):
1511 1511 warn = 1
1512 1512 else:
1513 1513 # add local heads involved in the push
1514 1514 updatelheads = [self.changelog.heads(x, lheads)
1515 1515 for x in updatelb]
1516 1516 newheads = set(sum(updatelheads, [])) & set(lheads)
1517 1517
1518 1518 if not newheads:
1519 1519 return True
1520 1520
1521 1521 # add heads we don't have or that are not involved in the push
1522 1522 for r in rheads:
1523 1523 if r in self.changelog.nodemap:
1524 1524 desc = self.changelog.heads(r, heads)
1525 1525 l = [h for h in heads if h in desc]
1526 1526 if not l:
1527 1527 newheads.add(r)
1528 1528 else:
1529 1529 newheads.add(r)
1530 1530 if len(newheads) > len(rheads):
1531 1531 warn = 1
1532 1532
1533 1533 if warn:
1534 1534 if branchname is not None:
1535 1535 msg = _("abort: push creates new remote heads"
1536 1536 " on branch '%s'!\n") % branchname
1537 1537 else:
1538 1538 msg = _("abort: push creates new remote heads!\n")
1539 1539 self.ui.warn(msg)
1540 1540 if len(lheads) > len(rheads):
1541 1541 self.ui.status(_("(did you forget to merge?"
1542 1542 " use push -f to force)\n"))
1543 1543 else:
1544 1544 self.ui.status(_("(you should pull and merge or"
1545 1545 " use push -f to force)\n"))
1546 1546 return False
1547 1547 return True
1548 1548
1549 1549 if not bases:
1550 1550 self.ui.status(_("no changes found\n"))
1551 1551 return None, 1
1552 1552 elif not force:
1553 1553 # Check for each named branch if we're creating new remote heads.
1554 1554 # To be a remote head after push, node must be either:
1555 1555 # - unknown locally
1556 1556 # - a local outgoing head descended from update
1557 1557 # - a remote head that's known locally and not
1558 1558 # ancestral to an outgoing head
1559 1559 #
1560 1560 # New named branches cannot be created without --force.
1561 1561
1562 1562 if remote_heads != [nullid]:
1563 1563 if remote.capable('branchmap'):
1564 1564 remotebrheads = remote.branchmap()
1565 1565
1566 1566 if not revs:
1567 1567 localbrheads = self.branchmap()
1568 1568 else:
1569 1569 localbrheads = {}
1570 1570 for n in heads:
1571 1571 branch = self[n].branch()
1572 1572 localbrheads.setdefault(branch, []).append(n)
1573 1573
1574 1574 newbranches = list(set(localbrheads) - set(remotebrheads))
1575 1575 if newbranches: # new branch requires --force
1576 1576 branchnames = ', '.join("%s" % b for b in newbranches)
1577 1577 self.ui.warn(_("abort: push creates "
1578 1578 "new remote branches: %s!\n")
1579 1579 % branchnames)
1580 1580 # propose 'push -b .' in the msg too?
1581 1581 self.ui.status(_("(use 'hg push -f' to force)\n"))
1582 1582 return None, 0
1583 1583 for branch, lheads in localbrheads.iteritems():
1584 1584 if branch in remotebrheads:
1585 1585 rheads = remotebrheads[branch]
1586 1586 if not checkbranch(lheads, rheads, update, branch):
1587 1587 return None, 0
1588 1588 else:
1589 1589 if not checkbranch(heads, remote_heads, update):
1590 1590 return None, 0
1591 1591
1592 1592 if inc:
1593 1593 self.ui.warn(_("note: unsynced remote changes!\n"))
1594 1594
1595 1595
1596 1596 if revs is None:
1597 1597 # use the fast path, no race possible on push
1598 1598 nodes = self.changelog.findmissing(common.keys())
1599 1599 cg = self._changegroup(nodes, 'push')
1600 1600 else:
1601 1601 cg = self.changegroupsubset(update, revs, 'push')
1602 1602 return cg, remote_heads
1603 1603
1604 1604 def push_addchangegroup(self, remote, force, revs):
1605 1605 lock = remote.lock()
1606 1606 try:
1607 1607 ret = self.prepush(remote, force, revs)
1608 1608 if ret[0] is not None:
1609 1609 cg, remote_heads = ret
1610 1610 return remote.addchangegroup(cg, 'push', self.url())
1611 1611 return ret[1]
1612 1612 finally:
1613 1613 lock.release()
1614 1614
1615 1615 def push_unbundle(self, remote, force, revs):
1616 1616 # local repo finds heads on server, finds out what revs it
1617 1617 # must push. once revs transferred, if server finds it has
1618 1618 # different heads (someone else won commit/push race), server
1619 1619 # aborts.
1620 1620
1621 1621 ret = self.prepush(remote, force, revs)
1622 1622 if ret[0] is not None:
1623 1623 cg, remote_heads = ret
1624 1624 if force:
1625 1625 remote_heads = ['force']
1626 1626 return remote.unbundle(cg, remote_heads, 'push')
1627 1627 return ret[1]
1628 1628
1629 1629 def changegroupinfo(self, nodes, source):
1630 1630 if self.ui.verbose or source == 'bundle':
1631 1631 self.ui.status(_("%d changesets found\n") % len(nodes))
1632 1632 if self.ui.debugflag:
1633 1633 self.ui.debug("list of changesets:\n")
1634 1634 for node in nodes:
1635 1635 self.ui.debug("%s\n" % hex(node))
1636 1636
1637 1637 def changegroupsubset(self, bases, heads, source, extranodes=None):
1638 1638 """Compute a changegroup consisting of all the nodes that are
1639 1639 descendents of any of the bases and ancestors of any of the heads.
1640 1640 Return a chunkbuffer object whose read() method will return
1641 1641 successive changegroup chunks.
1642 1642
1643 1643 It is fairly complex as determining which filenodes and which
1644 1644 manifest nodes need to be included for the changeset to be complete
1645 1645 is non-trivial.
1646 1646
1647 1647 Another wrinkle is doing the reverse, figuring out which changeset in
1648 1648 the changegroup a particular filenode or manifestnode belongs to.
1649 1649
1650 1650 The caller can specify some nodes that must be included in the
1651 1651 changegroup using the extranodes argument. It should be a dict
1652 1652 where the keys are the filenames (or 1 for the manifest), and the
1653 1653 values are lists of (node, linknode) tuples, where node is a wanted
1654 1654 node and linknode is the changelog node that should be transmitted as
1655 1655 the linkrev.
1656 1656 """
1657 1657
1658 1658 # Set up some initial variables
1659 1659 # Make it easy to refer to self.changelog
1660 1660 cl = self.changelog
1661 1661 # msng is short for missing - compute the list of changesets in this
1662 1662 # changegroup.
1663 1663 if not bases:
1664 1664 bases = [nullid]
1665 1665 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1666 1666
1667 1667 if extranodes is None:
1668 1668 # can we go through the fast path ?
1669 1669 heads.sort()
1670 1670 allheads = self.heads()
1671 1671 allheads.sort()
1672 1672 if heads == allheads:
1673 1673 return self._changegroup(msng_cl_lst, source)
1674 1674
1675 1675 # slow path
1676 1676 self.hook('preoutgoing', throw=True, source=source)
1677 1677
1678 1678 self.changegroupinfo(msng_cl_lst, source)
1679 1679 # Some bases may turn out to be superfluous, and some heads may be
1680 1680 # too. nodesbetween will return the minimal set of bases and heads
1681 1681 # necessary to re-create the changegroup.
1682 1682
1683 1683 # Known heads are the list of heads that it is assumed the recipient
1684 1684 # of this changegroup will know about.
1685 1685 knownheads = set()
1686 1686 # We assume that all parents of bases are known heads.
1687 1687 for n in bases:
1688 1688 knownheads.update(cl.parents(n))
1689 1689 knownheads.discard(nullid)
1690 1690 knownheads = list(knownheads)
1691 1691 if knownheads:
1692 1692 # Now that we know what heads are known, we can compute which
1693 1693 # changesets are known. The recipient must know about all
1694 1694 # changesets required to reach the known heads from the null
1695 1695 # changeset.
1696 1696 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1697 1697 junk = None
1698 1698 # Transform the list into a set.
1699 1699 has_cl_set = set(has_cl_set)
1700 1700 else:
1701 1701 # If there were no known heads, the recipient cannot be assumed to
1702 1702 # know about any changesets.
1703 1703 has_cl_set = set()
1704 1704
1705 1705 # Make it easy to refer to self.manifest
1706 1706 mnfst = self.manifest
1707 1707 # We don't know which manifests are missing yet
1708 1708 msng_mnfst_set = {}
1709 1709 # Nor do we know which filenodes are missing.
1710 1710 msng_filenode_set = {}
1711 1711
1712 1712 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1713 1713 junk = None
1714 1714
1715 1715 # A changeset always belongs to itself, so the changenode lookup
1716 1716 # function for a changenode is identity.
1717 1717 def identity(x):
1718 1718 return x
1719 1719
1720 1720 # If we determine that a particular file or manifest node must be a
1721 1721 # node that the recipient of the changegroup will already have, we can
1722 1722 # also assume the recipient will have all the parents. This function
1723 1723 # prunes them from the set of missing nodes.
1724 1724 def prune_parents(revlog, hasset, msngset):
1725 1725 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1726 1726 msngset.pop(revlog.node(r), None)
1727 1727
1728 1728 # Use the information collected in collect_manifests_and_files to say
1729 1729 # which changenode any manifestnode belongs to.
1730 1730 def lookup_manifest_link(mnfstnode):
1731 1731 return msng_mnfst_set[mnfstnode]
1732 1732
1733 1733 # A function generating function that sets up the initial environment
1734 1734 # the inner function.
1735 1735 def filenode_collector(changedfiles):
1736 1736 # This gathers information from each manifestnode included in the
1737 1737 # changegroup about which filenodes the manifest node references
1738 1738 # so we can include those in the changegroup too.
1739 1739 #
1740 1740 # It also remembers which changenode each filenode belongs to. It
1741 1741 # does this by assuming the a filenode belongs to the changenode
1742 1742 # the first manifest that references it belongs to.
1743 1743 def collect_msng_filenodes(mnfstnode):
1744 1744 r = mnfst.rev(mnfstnode)
1745 1745 if r - 1 in mnfst.parentrevs(r):
1746 1746 # If the previous rev is one of the parents,
1747 1747 # we only need to see a diff.
1748 1748 deltamf = mnfst.readdelta(mnfstnode)
1749 1749 # For each line in the delta
1750 1750 for f, fnode in deltamf.iteritems():
1751 1751 f = changedfiles.get(f, None)
1752 1752 # And if the file is in the list of files we care
1753 1753 # about.
1754 1754 if f is not None:
1755 1755 # Get the changenode this manifest belongs to
1756 1756 clnode = msng_mnfst_set[mnfstnode]
1757 1757 # Create the set of filenodes for the file if
1758 1758 # there isn't one already.
1759 1759 ndset = msng_filenode_set.setdefault(f, {})
1760 1760 # And set the filenode's changelog node to the
1761 1761 # manifest's if it hasn't been set already.
1762 1762 ndset.setdefault(fnode, clnode)
1763 1763 else:
1764 1764 # Otherwise we need a full manifest.
1765 1765 m = mnfst.read(mnfstnode)
1766 1766 # For every file in we care about.
1767 1767 for f in changedfiles:
1768 1768 fnode = m.get(f, None)
1769 1769 # If it's in the manifest
1770 1770 if fnode is not None:
1771 1771 # See comments above.
1772 1772 clnode = msng_mnfst_set[mnfstnode]
1773 1773 ndset = msng_filenode_set.setdefault(f, {})
1774 1774 ndset.setdefault(fnode, clnode)
1775 1775 return collect_msng_filenodes
1776 1776
1777 1777 # We have a list of filenodes we think we need for a file, lets remove
1778 1778 # all those we know the recipient must have.
1779 1779 def prune_filenodes(f, filerevlog):
1780 1780 msngset = msng_filenode_set[f]
1781 1781 hasset = set()
1782 1782 # If a 'missing' filenode thinks it belongs to a changenode we
1783 1783 # assume the recipient must have, then the recipient must have
1784 1784 # that filenode.
1785 1785 for n in msngset:
1786 1786 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1787 1787 if clnode in has_cl_set:
1788 1788 hasset.add(n)
1789 1789 prune_parents(filerevlog, hasset, msngset)
1790 1790
1791 1791 # A function generator function that sets up the a context for the
1792 1792 # inner function.
1793 1793 def lookup_filenode_link_func(fname):
1794 1794 msngset = msng_filenode_set[fname]
1795 1795 # Lookup the changenode the filenode belongs to.
1796 1796 def lookup_filenode_link(fnode):
1797 1797 return msngset[fnode]
1798 1798 return lookup_filenode_link
1799 1799
1800 1800 # Add the nodes that were explicitly requested.
1801 1801 def add_extra_nodes(name, nodes):
1802 1802 if not extranodes or name not in extranodes:
1803 1803 return
1804 1804
1805 1805 for node, linknode in extranodes[name]:
1806 1806 if node not in nodes:
1807 1807 nodes[node] = linknode
1808 1808
1809 1809 # Now that we have all theses utility functions to help out and
1810 1810 # logically divide up the task, generate the group.
1811 1811 def gengroup():
1812 1812 # The set of changed files starts empty.
1813 1813 changedfiles = {}
1814 1814 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1815 1815
1816 1816 # Create a changenode group generator that will call our functions
1817 1817 # back to lookup the owning changenode and collect information.
1818 1818 group = cl.group(msng_cl_lst, identity, collect)
1819 cnt = 0
1819 1820 for chnk in group:
1820 1821 yield chnk
1822 self.ui.progress('bundle changes', cnt, unit='chunks')
1823 cnt += 1
1824 self.ui.progress('bundle changes', None, unit='chunks')
1825
1821 1826
1822 1827 # Figure out which manifest nodes (of the ones we think might be
1823 1828 # part of the changegroup) the recipient must know about and
1824 1829 # remove them from the changegroup.
1825 1830 has_mnfst_set = set()
1826 1831 for n in msng_mnfst_set:
1827 1832 # If a 'missing' manifest thinks it belongs to a changenode
1828 1833 # the recipient is assumed to have, obviously the recipient
1829 1834 # must have that manifest.
1830 1835 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1831 1836 if linknode in has_cl_set:
1832 1837 has_mnfst_set.add(n)
1833 1838 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1834 1839 add_extra_nodes(1, msng_mnfst_set)
1835 1840 msng_mnfst_lst = msng_mnfst_set.keys()
1836 1841 # Sort the manifestnodes by revision number.
1837 1842 msng_mnfst_lst.sort(key=mnfst.rev)
1838 1843 # Create a generator for the manifestnodes that calls our lookup
1839 1844 # and data collection functions back.
1840 1845 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1841 1846 filenode_collector(changedfiles))
1847 cnt = 0
1842 1848 for chnk in group:
1843 1849 yield chnk
1850 self.ui.progress('bundle manifests', cnt, unit='chunks')
1851 cnt += 1
1852 self.ui.progress('bundle manifests', None, unit='chunks')
1844 1853
1845 1854 # These are no longer needed, dereference and toss the memory for
1846 1855 # them.
1847 1856 msng_mnfst_lst = None
1848 1857 msng_mnfst_set.clear()
1849 1858
1850 1859 if extranodes:
1851 1860 for fname in extranodes:
1852 1861 if isinstance(fname, int):
1853 1862 continue
1854 1863 msng_filenode_set.setdefault(fname, {})
1855 1864 changedfiles[fname] = 1
1856 1865 # Go through all our files in order sorted by name.
1866 cnt = 0
1857 1867 for fname in sorted(changedfiles):
1858 1868 filerevlog = self.file(fname)
1859 1869 if not len(filerevlog):
1860 1870 raise util.Abort(_("empty or missing revlog for %s") % fname)
1861 1871 # Toss out the filenodes that the recipient isn't really
1862 1872 # missing.
1863 1873 if fname in msng_filenode_set:
1864 1874 prune_filenodes(fname, filerevlog)
1865 1875 add_extra_nodes(fname, msng_filenode_set[fname])
1866 1876 msng_filenode_lst = msng_filenode_set[fname].keys()
1867 1877 else:
1868 1878 msng_filenode_lst = []
1869 1879 # If any filenodes are left, generate the group for them,
1870 1880 # otherwise don't bother.
1871 1881 if len(msng_filenode_lst) > 0:
1872 1882 yield changegroup.chunkheader(len(fname))
1873 1883 yield fname
1874 1884 # Sort the filenodes by their revision #
1875 1885 msng_filenode_lst.sort(key=filerevlog.rev)
1876 1886 # Create a group generator and only pass in a changenode
1877 1887 # lookup function as we need to collect no information
1878 1888 # from filenodes.
1879 1889 group = filerevlog.group(msng_filenode_lst,
1880 1890 lookup_filenode_link_func(fname))
1881 1891 for chnk in group:
1892 self.ui.progress(
1893 'bundle files', cnt, item=fname, unit='chunks')
1894 cnt += 1
1882 1895 yield chnk
1883 1896 if fname in msng_filenode_set:
1884 1897 # Don't need this anymore, toss it to free memory.
1885 1898 del msng_filenode_set[fname]
1886 1899 # Signal that no more groups are left.
1887 1900 yield changegroup.closechunk()
1901 self.ui.progress('bundle files', None, unit='chunks')
1888 1902
1889 1903 if msng_cl_lst:
1890 1904 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1891 1905
1892 1906 return util.chunkbuffer(gengroup())
1893 1907
1894 1908 def changegroup(self, basenodes, source):
1895 1909 # to avoid a race we use changegroupsubset() (issue1320)
1896 1910 return self.changegroupsubset(basenodes, self.heads(), source)
1897 1911
1898 1912 def _changegroup(self, nodes, source):
1899 1913 """Compute the changegroup of all nodes that we have that a recipient
1900 1914 doesn't. Return a chunkbuffer object whose read() method will return
1901 1915 successive changegroup chunks.
1902 1916
1903 1917 This is much easier than the previous function as we can assume that
1904 1918 the recipient has any changenode we aren't sending them.
1905 1919
1906 1920 nodes is the set of nodes to send"""
1907 1921
1908 1922 self.hook('preoutgoing', throw=True, source=source)
1909 1923
1910 1924 cl = self.changelog
1911 1925 revset = set([cl.rev(n) for n in nodes])
1912 1926 self.changegroupinfo(nodes, source)
1913 1927
1914 1928 def identity(x):
1915 1929 return x
1916 1930
1917 1931 def gennodelst(log):
1918 1932 for r in log:
1919 1933 if log.linkrev(r) in revset:
1920 1934 yield log.node(r)
1921 1935
1922 1936 def lookuprevlink_func(revlog):
1923 1937 def lookuprevlink(n):
1924 1938 return cl.node(revlog.linkrev(revlog.rev(n)))
1925 1939 return lookuprevlink
1926 1940
1927 1941 def gengroup():
1928 1942 '''yield a sequence of changegroup chunks (strings)'''
1929 1943 # construct a list of all changed files
1930 1944 changedfiles = {}
1931 1945 mmfs = {}
1932 1946 collect = changegroup.collector(cl, mmfs, changedfiles)
1933 1947
1948 cnt = 0
1934 1949 for chnk in cl.group(nodes, identity, collect):
1950 self.ui.progress('bundle changes', cnt, unit='chunks')
1951 cnt += 1
1935 1952 yield chnk
1953 self.ui.progress('bundle changes', None, unit='chunks')
1936 1954
1937 1955 mnfst = self.manifest
1938 1956 nodeiter = gennodelst(mnfst)
1957 cnt = 0
1939 1958 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1959 self.ui.progress('bundle manifests', cnt, unit='chunks')
1960 cnt += 1
1940 1961 yield chnk
1962 self.ui.progress('bundle manifests', None, unit='chunks')
1941 1963
1964 cnt = 0
1942 1965 for fname in sorted(changedfiles):
1943 1966 filerevlog = self.file(fname)
1944 1967 if not len(filerevlog):
1945 1968 raise util.Abort(_("empty or missing revlog for %s") % fname)
1946 1969 nodeiter = gennodelst(filerevlog)
1947 1970 nodeiter = list(nodeiter)
1948 1971 if nodeiter:
1949 1972 yield changegroup.chunkheader(len(fname))
1950 1973 yield fname
1951 1974 lookup = lookuprevlink_func(filerevlog)
1952 1975 for chnk in filerevlog.group(nodeiter, lookup):
1976 self.ui.progress(
1977 'bundle files', cnt, item=fname, unit='chunks')
1978 cnt += 1
1953 1979 yield chnk
1980 self.ui.progress('bundle files', None, unit='chunks')
1954 1981
1955 1982 yield changegroup.closechunk()
1956 1983
1957 1984 if nodes:
1958 1985 self.hook('outgoing', node=hex(nodes[0]), source=source)
1959 1986
1960 1987 return util.chunkbuffer(gengroup())
1961 1988
1962 1989 def addchangegroup(self, source, srctype, url, emptyok=False):
1963 1990 """add changegroup to repo.
1964 1991
1965 1992 return values:
1966 1993 - nothing changed or no source: 0
1967 1994 - more heads than before: 1+added heads (2..n)
1968 1995 - less heads than before: -1-removed heads (-2..-n)
1969 1996 - number of heads stays the same: 1
1970 1997 """
1971 1998 def csmap(x):
1972 1999 self.ui.debug("add changeset %s\n" % short(x))
1973 2000 return len(cl)
1974 2001
1975 2002 def revmap(x):
1976 2003 return cl.rev(x)
1977 2004
1978 2005 if not source:
1979 2006 return 0
1980 2007
1981 2008 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1982 2009
1983 2010 changesets = files = revisions = 0
1984 2011
1985 2012 # write changelog data to temp files so concurrent readers will not see
1986 2013 # inconsistent view
1987 2014 cl = self.changelog
1988 2015 cl.delayupdate()
1989 2016 oldheads = len(cl.heads())
1990 2017
1991 2018 tr = self.transaction()
1992 2019 try:
1993 2020 trp = weakref.proxy(tr)
1994 2021 # pull off the changeset group
1995 2022 self.ui.status(_("adding changesets\n"))
1996 2023 clstart = len(cl)
1997 2024 class prog(object):
1998 2025 step = 'changesets'
1999 2026 count = 1
2000 2027 ui = self.ui
2001 2028 def __call__(self):
2002 2029 self.ui.progress(self.step, self.count, unit='chunks')
2003 2030 self.count += 1
2004 2031 pr = prog()
2005 2032 chunkiter = changegroup.chunkiter(source, progress=pr)
2006 2033 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2007 2034 raise util.Abort(_("received changelog group is empty"))
2008 2035 clend = len(cl)
2009 2036 changesets = clend - clstart
2010 2037 self.ui.progress('changesets', None)
2011 2038
2012 2039 # pull off the manifest group
2013 2040 self.ui.status(_("adding manifests\n"))
2014 2041 pr.step = 'manifests'
2015 2042 pr.count = 1
2016 2043 chunkiter = changegroup.chunkiter(source, progress=pr)
2017 2044 # no need to check for empty manifest group here:
2018 2045 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2019 2046 # no new manifest will be created and the manifest group will
2020 2047 # be empty during the pull
2021 2048 self.manifest.addgroup(chunkiter, revmap, trp)
2022 2049 self.ui.progress('manifests', None)
2023 2050
2024 2051 needfiles = {}
2025 2052 if self.ui.configbool('server', 'validate', default=False):
2026 2053 # validate incoming csets have their manifests
2027 2054 for cset in xrange(clstart, clend):
2028 2055 mfest = self.changelog.read(self.changelog.node(cset))[0]
2029 2056 mfest = self.manifest.readdelta(mfest)
2030 2057 # store file nodes we must see
2031 2058 for f, n in mfest.iteritems():
2032 2059 needfiles.setdefault(f, set()).add(n)
2033 2060
2034 2061 # process the files
2035 2062 self.ui.status(_("adding file changes\n"))
2036 2063 pr.step = 'files'
2037 2064 pr.count = 1
2038 2065 while 1:
2039 2066 f = changegroup.getchunk(source)
2040 2067 if not f:
2041 2068 break
2042 2069 self.ui.debug("adding %s revisions\n" % f)
2043 2070 fl = self.file(f)
2044 2071 o = len(fl)
2045 2072 chunkiter = changegroup.chunkiter(source, progress=pr)
2046 2073 if fl.addgroup(chunkiter, revmap, trp) is None:
2047 2074 raise util.Abort(_("received file revlog group is empty"))
2048 2075 revisions += len(fl) - o
2049 2076 files += 1
2050 2077 if f in needfiles:
2051 2078 needs = needfiles[f]
2052 2079 for new in xrange(o, len(fl)):
2053 2080 n = fl.node(new)
2054 2081 if n in needs:
2055 2082 needs.remove(n)
2056 2083 if not needs:
2057 2084 del needfiles[f]
2058 2085 self.ui.progress('files', None)
2059 2086
2060 2087 for f, needs in needfiles.iteritems():
2061 2088 fl = self.file(f)
2062 2089 for n in needs:
2063 2090 try:
2064 2091 fl.rev(n)
2065 2092 except error.LookupError:
2066 2093 raise util.Abort(
2067 2094 _('missing file data for %s:%s - run hg verify') %
2068 2095 (f, hex(n)))
2069 2096
2070 2097 newheads = len(cl.heads())
2071 2098 heads = ""
2072 2099 if oldheads and newheads != oldheads:
2073 2100 heads = _(" (%+d heads)") % (newheads - oldheads)
2074 2101
2075 2102 self.ui.status(_("added %d changesets"
2076 2103 " with %d changes to %d files%s\n")
2077 2104 % (changesets, revisions, files, heads))
2078 2105
2079 2106 if changesets > 0:
2080 2107 p = lambda: cl.writepending() and self.root or ""
2081 2108 self.hook('pretxnchangegroup', throw=True,
2082 2109 node=hex(cl.node(clstart)), source=srctype,
2083 2110 url=url, pending=p)
2084 2111
2085 2112 # make changelog see real files again
2086 2113 cl.finalize(trp)
2087 2114
2088 2115 tr.close()
2089 2116 finally:
2090 2117 del tr
2091 2118
2092 2119 if changesets > 0:
2093 2120 # forcefully update the on-disk branch cache
2094 2121 self.ui.debug("updating the branch cache\n")
2095 2122 self.branchtags()
2096 2123 self.hook("changegroup", node=hex(cl.node(clstart)),
2097 2124 source=srctype, url=url)
2098 2125
2099 2126 for i in xrange(clstart, clend):
2100 2127 self.hook("incoming", node=hex(cl.node(i)),
2101 2128 source=srctype, url=url)
2102 2129
2103 2130 # never return 0 here:
2104 2131 if newheads < oldheads:
2105 2132 return newheads - oldheads - 1
2106 2133 else:
2107 2134 return newheads - oldheads + 1
2108 2135
2109 2136
2110 2137 def stream_in(self, remote):
2111 2138 fp = remote.stream_out()
2112 2139 l = fp.readline()
2113 2140 try:
2114 2141 resp = int(l)
2115 2142 except ValueError:
2116 2143 raise error.ResponseError(
2117 2144 _('Unexpected response from remote server:'), l)
2118 2145 if resp == 1:
2119 2146 raise util.Abort(_('operation forbidden by server'))
2120 2147 elif resp == 2:
2121 2148 raise util.Abort(_('locking the remote repository failed'))
2122 2149 elif resp != 0:
2123 2150 raise util.Abort(_('the server sent an unknown error code'))
2124 2151 self.ui.status(_('streaming all changes\n'))
2125 2152 l = fp.readline()
2126 2153 try:
2127 2154 total_files, total_bytes = map(int, l.split(' ', 1))
2128 2155 except (ValueError, TypeError):
2129 2156 raise error.ResponseError(
2130 2157 _('Unexpected response from remote server:'), l)
2131 2158 self.ui.status(_('%d files to transfer, %s of data\n') %
2132 2159 (total_files, util.bytecount(total_bytes)))
2133 2160 start = time.time()
2134 2161 for i in xrange(total_files):
2135 2162 # XXX doesn't support '\n' or '\r' in filenames
2136 2163 l = fp.readline()
2137 2164 try:
2138 2165 name, size = l.split('\0', 1)
2139 2166 size = int(size)
2140 2167 except (ValueError, TypeError):
2141 2168 raise error.ResponseError(
2142 2169 _('Unexpected response from remote server:'), l)
2143 2170 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2144 2171 # for backwards compat, name was partially encoded
2145 2172 ofp = self.sopener(store.decodedir(name), 'w')
2146 2173 for chunk in util.filechunkiter(fp, limit=size):
2147 2174 ofp.write(chunk)
2148 2175 ofp.close()
2149 2176 elapsed = time.time() - start
2150 2177 if elapsed <= 0:
2151 2178 elapsed = 0.001
2152 2179 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2153 2180 (util.bytecount(total_bytes), elapsed,
2154 2181 util.bytecount(total_bytes / elapsed)))
2155 2182 self.invalidate()
2156 2183 return len(self.heads()) + 1
2157 2184
2158 2185 def clone(self, remote, heads=[], stream=False):
2159 2186 '''clone remote repository.
2160 2187
2161 2188 keyword arguments:
2162 2189 heads: list of revs to clone (forces use of pull)
2163 2190 stream: use streaming clone if possible'''
2164 2191
2165 2192 # now, all clients that can request uncompressed clones can
2166 2193 # read repo formats supported by all servers that can serve
2167 2194 # them.
2168 2195
2169 2196 # if revlog format changes, client will have to check version
2170 2197 # and format flags on "stream" capability, and use
2171 2198 # uncompressed only if compatible.
2172 2199
2173 2200 if stream and not heads and remote.capable('stream'):
2174 2201 return self.stream_in(remote)
2175 2202 return self.pull(remote, heads)
2176 2203
2177 2204 # used to avoid circular references so destructors work
2178 2205 def aftertrans(files):
2179 2206 renamefiles = [tuple(t) for t in files]
2180 2207 def a():
2181 2208 for src, dest in renamefiles:
2182 2209 util.rename(src, dest)
2183 2210 return a
2184 2211
2185 2212 def instance(ui, path, create):
2186 2213 return localrepository(ui, util.drop_scheme('file', path), create)
2187 2214
2188 2215 def islocal(path):
2189 2216 return True
@@ -1,717 +1,1165
1 1 3:911600dab2ae
2 2 requesting all changes
3 3 adding changesets
4 4 adding manifests
5 5 adding file changes
6 6 added 1 changesets with 3 changes to 3 files
7 7 updating to branch default
8 8 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
9 9
10 10 Extension disabled for lack of a hook
11 11 Pushing as user fred
12 12 hgrc = """
13 13 """
14 14 pushing to ../b
15 15 searching for changes
16 16 common changesets up to 6675d58eff77
17 17 3 changesets found
18 18 list of changesets:
19 19 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
20 20 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
21 21 911600dab2ae7a9baff75958b84fe606851ce955
22 22 adding changesets
23 bundle changes: 0 chunks
24 bundle changes: 1 chunks
25 bundle changes: 2 chunks
26 bundle changes: 3 chunks
27 bundle changes: 4 chunks
28 bundle changes: 5 chunks
29 bundle changes: 6 chunks
30 bundle changes: 7 chunks
31 bundle changes: 8 chunks
32 bundle changes: 9 chunks
33 bundle manifests: 0 chunks
34 bundle manifests: 1 chunks
35 bundle manifests: 2 chunks
36 bundle manifests: 3 chunks
37 bundle manifests: 4 chunks
38 bundle manifests: 5 chunks
39 bundle manifests: 6 chunks
40 bundle manifests: 7 chunks
41 bundle manifests: 8 chunks
42 bundle manifests: 9 chunks
43 bundle files: foo/Bar/file.txt 0 chunks
44 bundle files: foo/Bar/file.txt 1 chunks
45 bundle files: foo/Bar/file.txt 2 chunks
46 bundle files: foo/Bar/file.txt 3 chunks
47 bundle files: foo/file.txt 4 chunks
48 bundle files: foo/file.txt 5 chunks
49 bundle files: foo/file.txt 6 chunks
50 bundle files: foo/file.txt 7 chunks
51 bundle files: quux/file.py 8 chunks
52 bundle files: quux/file.py 9 chunks
53 bundle files: quux/file.py 10 chunks
54 bundle files: quux/file.py 11 chunks
23 55 changesets: 1 chunks
24 56 add changeset ef1ea85a6374
25 57 changesets: 2 chunks
26 58 add changeset f9cafe1212c8
27 59 changesets: 3 chunks
28 60 add changeset 911600dab2ae
29 61 adding manifests
30 62 manifests: 1 chunks
31 63 manifests: 2 chunks
32 64 manifests: 3 chunks
33 65 adding file changes
34 66 adding foo/Bar/file.txt revisions
35 67 files: 1 chunks
36 68 adding foo/file.txt revisions
37 69 files: 2 chunks
38 70 adding quux/file.py revisions
39 71 files: 3 chunks
40 72 added 3 changesets with 3 changes to 3 files
41 73 updating the branch cache
42 74 rolling back last transaction
43 75 0:6675d58eff77
44 76
45 77 Extension disabled for lack of acl.sources
46 78 Pushing as user fred
47 79 hgrc = """
48 80 [hooks]
49 81 pretxnchangegroup.acl = python:hgext.acl.hook
50 82 """
51 83 pushing to ../b
52 84 searching for changes
53 85 common changesets up to 6675d58eff77
54 86 invalidating branch cache (tip differs)
55 87 3 changesets found
56 88 list of changesets:
57 89 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
58 90 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
59 91 911600dab2ae7a9baff75958b84fe606851ce955
60 92 adding changesets
93 bundle changes: 0 chunks
94 bundle changes: 1 chunks
95 bundle changes: 2 chunks
96 bundle changes: 3 chunks
97 bundle changes: 4 chunks
98 bundle changes: 5 chunks
99 bundle changes: 6 chunks
100 bundle changes: 7 chunks
101 bundle changes: 8 chunks
102 bundle changes: 9 chunks
103 bundle manifests: 0 chunks
104 bundle manifests: 1 chunks
105 bundle manifests: 2 chunks
106 bundle manifests: 3 chunks
107 bundle manifests: 4 chunks
108 bundle manifests: 5 chunks
109 bundle manifests: 6 chunks
110 bundle manifests: 7 chunks
111 bundle manifests: 8 chunks
112 bundle manifests: 9 chunks
113 bundle files: foo/Bar/file.txt 0 chunks
114 bundle files: foo/Bar/file.txt 1 chunks
115 bundle files: foo/Bar/file.txt 2 chunks
116 bundle files: foo/Bar/file.txt 3 chunks
117 bundle files: foo/file.txt 4 chunks
118 bundle files: foo/file.txt 5 chunks
119 bundle files: foo/file.txt 6 chunks
120 bundle files: foo/file.txt 7 chunks
121 bundle files: quux/file.py 8 chunks
122 bundle files: quux/file.py 9 chunks
123 bundle files: quux/file.py 10 chunks
124 bundle files: quux/file.py 11 chunks
61 125 changesets: 1 chunks
62 126 add changeset ef1ea85a6374
63 127 changesets: 2 chunks
64 128 add changeset f9cafe1212c8
65 129 changesets: 3 chunks
66 130 add changeset 911600dab2ae
67 131 adding manifests
68 132 manifests: 1 chunks
69 133 manifests: 2 chunks
70 134 manifests: 3 chunks
71 135 adding file changes
72 136 adding foo/Bar/file.txt revisions
73 137 files: 1 chunks
74 138 adding foo/file.txt revisions
75 139 files: 2 chunks
76 140 adding quux/file.py revisions
77 141 files: 3 chunks
78 142 added 3 changesets with 3 changes to 3 files
79 143 calling hook pretxnchangegroup.acl: hgext.acl.hook
80 144 acl: changes have source "push" - skipping
81 145 updating the branch cache
82 146 rolling back last transaction
83 147 0:6675d58eff77
84 148
85 149 No [acl.allow]/[acl.deny]
86 150 Pushing as user fred
87 151 hgrc = """
88 152 [hooks]
89 153 pretxnchangegroup.acl = python:hgext.acl.hook
90 154 [acl]
91 155 sources = push
92 156 """
93 157 pushing to ../b
94 158 searching for changes
95 159 common changesets up to 6675d58eff77
96 160 invalidating branch cache (tip differs)
97 161 3 changesets found
98 162 list of changesets:
99 163 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
100 164 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
101 165 911600dab2ae7a9baff75958b84fe606851ce955
102 166 adding changesets
167 bundle changes: 0 chunks
168 bundle changes: 1 chunks
169 bundle changes: 2 chunks
170 bundle changes: 3 chunks
171 bundle changes: 4 chunks
172 bundle changes: 5 chunks
173 bundle changes: 6 chunks
174 bundle changes: 7 chunks
175 bundle changes: 8 chunks
176 bundle changes: 9 chunks
177 bundle manifests: 0 chunks
178 bundle manifests: 1 chunks
179 bundle manifests: 2 chunks
180 bundle manifests: 3 chunks
181 bundle manifests: 4 chunks
182 bundle manifests: 5 chunks
183 bundle manifests: 6 chunks
184 bundle manifests: 7 chunks
185 bundle manifests: 8 chunks
186 bundle manifests: 9 chunks
187 bundle files: foo/Bar/file.txt 0 chunks
188 bundle files: foo/Bar/file.txt 1 chunks
189 bundle files: foo/Bar/file.txt 2 chunks
190 bundle files: foo/Bar/file.txt 3 chunks
191 bundle files: foo/file.txt 4 chunks
192 bundle files: foo/file.txt 5 chunks
193 bundle files: foo/file.txt 6 chunks
194 bundle files: foo/file.txt 7 chunks
195 bundle files: quux/file.py 8 chunks
196 bundle files: quux/file.py 9 chunks
197 bundle files: quux/file.py 10 chunks
198 bundle files: quux/file.py 11 chunks
103 199 changesets: 1 chunks
104 200 add changeset ef1ea85a6374
105 201 changesets: 2 chunks
106 202 add changeset f9cafe1212c8
107 203 changesets: 3 chunks
108 204 add changeset 911600dab2ae
109 205 adding manifests
110 206 manifests: 1 chunks
111 207 manifests: 2 chunks
112 208 manifests: 3 chunks
113 209 adding file changes
114 210 adding foo/Bar/file.txt revisions
115 211 files: 1 chunks
116 212 adding foo/file.txt revisions
117 213 files: 2 chunks
118 214 adding quux/file.py revisions
119 215 files: 3 chunks
120 216 added 3 changesets with 3 changes to 3 files
121 217 calling hook pretxnchangegroup.acl: hgext.acl.hook
122 218 acl: acl.allow not enabled
123 219 acl: acl.deny not enabled
124 220 acl: allowing changeset ef1ea85a6374
125 221 acl: allowing changeset f9cafe1212c8
126 222 acl: allowing changeset 911600dab2ae
127 223 updating the branch cache
128 224 rolling back last transaction
129 225 0:6675d58eff77
130 226
131 227 Empty [acl.allow]
132 228 Pushing as user fred
133 229 hgrc = """
134 230 [hooks]
135 231 pretxnchangegroup.acl = python:hgext.acl.hook
136 232 [acl]
137 233 sources = push
138 234 [acl.allow]
139 235 """
140 236 pushing to ../b
141 237 searching for changes
142 238 common changesets up to 6675d58eff77
143 239 invalidating branch cache (tip differs)
144 240 3 changesets found
145 241 list of changesets:
146 242 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
147 243 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
148 244 911600dab2ae7a9baff75958b84fe606851ce955
149 245 adding changesets
246 bundle changes: 0 chunks
247 bundle changes: 1 chunks
248 bundle changes: 2 chunks
249 bundle changes: 3 chunks
250 bundle changes: 4 chunks
251 bundle changes: 5 chunks
252 bundle changes: 6 chunks
253 bundle changes: 7 chunks
254 bundle changes: 8 chunks
255 bundle changes: 9 chunks
256 bundle manifests: 0 chunks
257 bundle manifests: 1 chunks
258 bundle manifests: 2 chunks
259 bundle manifests: 3 chunks
260 bundle manifests: 4 chunks
261 bundle manifests: 5 chunks
262 bundle manifests: 6 chunks
263 bundle manifests: 7 chunks
264 bundle manifests: 8 chunks
265 bundle manifests: 9 chunks
266 bundle files: foo/Bar/file.txt 0 chunks
267 bundle files: foo/Bar/file.txt 1 chunks
268 bundle files: foo/Bar/file.txt 2 chunks
269 bundle files: foo/Bar/file.txt 3 chunks
270 bundle files: foo/file.txt 4 chunks
271 bundle files: foo/file.txt 5 chunks
272 bundle files: foo/file.txt 6 chunks
273 bundle files: foo/file.txt 7 chunks
274 bundle files: quux/file.py 8 chunks
275 bundle files: quux/file.py 9 chunks
276 bundle files: quux/file.py 10 chunks
277 bundle files: quux/file.py 11 chunks
150 278 changesets: 1 chunks
151 279 add changeset ef1ea85a6374
152 280 changesets: 2 chunks
153 281 add changeset f9cafe1212c8
154 282 changesets: 3 chunks
155 283 add changeset 911600dab2ae
156 284 adding manifests
157 285 manifests: 1 chunks
158 286 manifests: 2 chunks
159 287 manifests: 3 chunks
160 288 adding file changes
161 289 adding foo/Bar/file.txt revisions
162 290 files: 1 chunks
163 291 adding foo/file.txt revisions
164 292 files: 2 chunks
165 293 adding quux/file.py revisions
166 294 files: 3 chunks
167 295 added 3 changesets with 3 changes to 3 files
168 296 calling hook pretxnchangegroup.acl: hgext.acl.hook
169 297 acl: acl.allow enabled, 0 entries for user fred
170 298 acl: acl.deny not enabled
171 299 acl: user fred not allowed on foo/file.txt
172 300 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
173 301 transaction abort!
174 302 rollback completed
175 303 abort: acl: access denied for changeset ef1ea85a6374
176 304 no rollback information available
177 305 0:6675d58eff77
178 306
179 307 fred is allowed inside foo/
180 308 Pushing as user fred
181 309 hgrc = """
182 310 [hooks]
183 311 pretxnchangegroup.acl = python:hgext.acl.hook
184 312 [acl]
185 313 sources = push
186 314 [acl.allow]
187 315 foo/** = fred
188 316 """
189 317 pushing to ../b
190 318 searching for changes
191 319 common changesets up to 6675d58eff77
192 320 3 changesets found
193 321 list of changesets:
194 322 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
195 323 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
196 324 911600dab2ae7a9baff75958b84fe606851ce955
197 325 adding changesets
326 bundle changes: 0 chunks
327 bundle changes: 1 chunks
328 bundle changes: 2 chunks
329 bundle changes: 3 chunks
330 bundle changes: 4 chunks
331 bundle changes: 5 chunks
332 bundle changes: 6 chunks
333 bundle changes: 7 chunks
334 bundle changes: 8 chunks
335 bundle changes: 9 chunks
336 bundle manifests: 0 chunks
337 bundle manifests: 1 chunks
338 bundle manifests: 2 chunks
339 bundle manifests: 3 chunks
340 bundle manifests: 4 chunks
341 bundle manifests: 5 chunks
342 bundle manifests: 6 chunks
343 bundle manifests: 7 chunks
344 bundle manifests: 8 chunks
345 bundle manifests: 9 chunks
346 bundle files: foo/Bar/file.txt 0 chunks
347 bundle files: foo/Bar/file.txt 1 chunks
348 bundle files: foo/Bar/file.txt 2 chunks
349 bundle files: foo/Bar/file.txt 3 chunks
350 bundle files: foo/file.txt 4 chunks
351 bundle files: foo/file.txt 5 chunks
352 bundle files: foo/file.txt 6 chunks
353 bundle files: foo/file.txt 7 chunks
354 bundle files: quux/file.py 8 chunks
355 bundle files: quux/file.py 9 chunks
356 bundle files: quux/file.py 10 chunks
357 bundle files: quux/file.py 11 chunks
198 358 changesets: 1 chunks
199 359 add changeset ef1ea85a6374
200 360 changesets: 2 chunks
201 361 add changeset f9cafe1212c8
202 362 changesets: 3 chunks
203 363 add changeset 911600dab2ae
204 364 adding manifests
205 365 manifests: 1 chunks
206 366 manifests: 2 chunks
207 367 manifests: 3 chunks
208 368 adding file changes
209 369 adding foo/Bar/file.txt revisions
210 370 files: 1 chunks
211 371 adding foo/file.txt revisions
212 372 files: 2 chunks
213 373 adding quux/file.py revisions
214 374 files: 3 chunks
215 375 added 3 changesets with 3 changes to 3 files
216 376 calling hook pretxnchangegroup.acl: hgext.acl.hook
217 377 acl: acl.allow enabled, 1 entries for user fred
218 378 acl: acl.deny not enabled
219 379 acl: allowing changeset ef1ea85a6374
220 380 acl: allowing changeset f9cafe1212c8
221 381 acl: user fred not allowed on quux/file.py
222 382 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
223 383 transaction abort!
224 384 rollback completed
225 385 abort: acl: access denied for changeset 911600dab2ae
226 386 no rollback information available
227 387 0:6675d58eff77
228 388
229 389 Empty [acl.deny]
230 390 Pushing as user barney
231 391 hgrc = """
232 392 [hooks]
233 393 pretxnchangegroup.acl = python:hgext.acl.hook
234 394 [acl]
235 395 sources = push
236 396 [acl.allow]
237 397 foo/** = fred
238 398 [acl.deny]
239 399 """
240 400 pushing to ../b
241 401 searching for changes
242 402 common changesets up to 6675d58eff77
243 403 3 changesets found
244 404 list of changesets:
245 405 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
246 406 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
247 407 911600dab2ae7a9baff75958b84fe606851ce955
248 408 adding changesets
409 bundle changes: 0 chunks
410 bundle changes: 1 chunks
411 bundle changes: 2 chunks
412 bundle changes: 3 chunks
413 bundle changes: 4 chunks
414 bundle changes: 5 chunks
415 bundle changes: 6 chunks
416 bundle changes: 7 chunks
417 bundle changes: 8 chunks
418 bundle changes: 9 chunks
419 bundle manifests: 0 chunks
420 bundle manifests: 1 chunks
421 bundle manifests: 2 chunks
422 bundle manifests: 3 chunks
423 bundle manifests: 4 chunks
424 bundle manifests: 5 chunks
425 bundle manifests: 6 chunks
426 bundle manifests: 7 chunks
427 bundle manifests: 8 chunks
428 bundle manifests: 9 chunks
429 bundle files: foo/Bar/file.txt 0 chunks
430 bundle files: foo/Bar/file.txt 1 chunks
431 bundle files: foo/Bar/file.txt 2 chunks
432 bundle files: foo/Bar/file.txt 3 chunks
433 bundle files: foo/file.txt 4 chunks
434 bundle files: foo/file.txt 5 chunks
435 bundle files: foo/file.txt 6 chunks
436 bundle files: foo/file.txt 7 chunks
437 bundle files: quux/file.py 8 chunks
438 bundle files: quux/file.py 9 chunks
439 bundle files: quux/file.py 10 chunks
440 bundle files: quux/file.py 11 chunks
249 441 changesets: 1 chunks
250 442 add changeset ef1ea85a6374
251 443 changesets: 2 chunks
252 444 add changeset f9cafe1212c8
253 445 changesets: 3 chunks
254 446 add changeset 911600dab2ae
255 447 adding manifests
256 448 manifests: 1 chunks
257 449 manifests: 2 chunks
258 450 manifests: 3 chunks
259 451 adding file changes
260 452 adding foo/Bar/file.txt revisions
261 453 files: 1 chunks
262 454 adding foo/file.txt revisions
263 455 files: 2 chunks
264 456 adding quux/file.py revisions
265 457 files: 3 chunks
266 458 added 3 changesets with 3 changes to 3 files
267 459 calling hook pretxnchangegroup.acl: hgext.acl.hook
268 460 acl: acl.allow enabled, 0 entries for user barney
269 461 acl: acl.deny enabled, 0 entries for user barney
270 462 acl: user barney not allowed on foo/file.txt
271 463 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
272 464 transaction abort!
273 465 rollback completed
274 466 abort: acl: access denied for changeset ef1ea85a6374
275 467 no rollback information available
276 468 0:6675d58eff77
277 469
278 470 fred is allowed inside foo/, but not foo/bar/ (case matters)
279 471 Pushing as user fred
280 472 hgrc = """
281 473 [hooks]
282 474 pretxnchangegroup.acl = python:hgext.acl.hook
283 475 [acl]
284 476 sources = push
285 477 [acl.allow]
286 478 foo/** = fred
287 479 [acl.deny]
288 480 foo/bar/** = fred
289 481 """
290 482 pushing to ../b
291 483 searching for changes
292 484 common changesets up to 6675d58eff77
293 485 3 changesets found
294 486 list of changesets:
295 487 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
296 488 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
297 489 911600dab2ae7a9baff75958b84fe606851ce955
298 490 adding changesets
491 bundle changes: 0 chunks
492 bundle changes: 1 chunks
493 bundle changes: 2 chunks
494 bundle changes: 3 chunks
495 bundle changes: 4 chunks
496 bundle changes: 5 chunks
497 bundle changes: 6 chunks
498 bundle changes: 7 chunks
499 bundle changes: 8 chunks
500 bundle changes: 9 chunks
501 bundle manifests: 0 chunks
502 bundle manifests: 1 chunks
503 bundle manifests: 2 chunks
504 bundle manifests: 3 chunks
505 bundle manifests: 4 chunks
506 bundle manifests: 5 chunks
507 bundle manifests: 6 chunks
508 bundle manifests: 7 chunks
509 bundle manifests: 8 chunks
510 bundle manifests: 9 chunks
511 bundle files: foo/Bar/file.txt 0 chunks
512 bundle files: foo/Bar/file.txt 1 chunks
513 bundle files: foo/Bar/file.txt 2 chunks
514 bundle files: foo/Bar/file.txt 3 chunks
515 bundle files: foo/file.txt 4 chunks
516 bundle files: foo/file.txt 5 chunks
517 bundle files: foo/file.txt 6 chunks
518 bundle files: foo/file.txt 7 chunks
519 bundle files: quux/file.py 8 chunks
520 bundle files: quux/file.py 9 chunks
521 bundle files: quux/file.py 10 chunks
522 bundle files: quux/file.py 11 chunks
299 523 changesets: 1 chunks
300 524 add changeset ef1ea85a6374
301 525 changesets: 2 chunks
302 526 add changeset f9cafe1212c8
303 527 changesets: 3 chunks
304 528 add changeset 911600dab2ae
305 529 adding manifests
306 530 manifests: 1 chunks
307 531 manifests: 2 chunks
308 532 manifests: 3 chunks
309 533 adding file changes
310 534 adding foo/Bar/file.txt revisions
311 535 files: 1 chunks
312 536 adding foo/file.txt revisions
313 537 files: 2 chunks
314 538 adding quux/file.py revisions
315 539 files: 3 chunks
316 540 added 3 changesets with 3 changes to 3 files
317 541 calling hook pretxnchangegroup.acl: hgext.acl.hook
318 542 acl: acl.allow enabled, 1 entries for user fred
319 543 acl: acl.deny enabled, 1 entries for user fred
320 544 acl: allowing changeset ef1ea85a6374
321 545 acl: allowing changeset f9cafe1212c8
322 546 acl: user fred not allowed on quux/file.py
323 547 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
324 548 transaction abort!
325 549 rollback completed
326 550 abort: acl: access denied for changeset 911600dab2ae
327 551 no rollback information available
328 552 0:6675d58eff77
329 553
330 554 fred is allowed inside foo/, but not foo/Bar/
331 555 Pushing as user fred
332 556 hgrc = """
333 557 [hooks]
334 558 pretxnchangegroup.acl = python:hgext.acl.hook
335 559 [acl]
336 560 sources = push
337 561 [acl.allow]
338 562 foo/** = fred
339 563 [acl.deny]
340 564 foo/bar/** = fred
341 565 foo/Bar/** = fred
342 566 """
343 567 pushing to ../b
344 568 searching for changes
345 569 common changesets up to 6675d58eff77
346 570 3 changesets found
347 571 list of changesets:
348 572 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
349 573 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
350 574 911600dab2ae7a9baff75958b84fe606851ce955
351 575 adding changesets
576 bundle changes: 0 chunks
577 bundle changes: 1 chunks
578 bundle changes: 2 chunks
579 bundle changes: 3 chunks
580 bundle changes: 4 chunks
581 bundle changes: 5 chunks
582 bundle changes: 6 chunks
583 bundle changes: 7 chunks
584 bundle changes: 8 chunks
585 bundle changes: 9 chunks
586 bundle manifests: 0 chunks
587 bundle manifests: 1 chunks
588 bundle manifests: 2 chunks
589 bundle manifests: 3 chunks
590 bundle manifests: 4 chunks
591 bundle manifests: 5 chunks
592 bundle manifests: 6 chunks
593 bundle manifests: 7 chunks
594 bundle manifests: 8 chunks
595 bundle manifests: 9 chunks
596 bundle files: foo/Bar/file.txt 0 chunks
597 bundle files: foo/Bar/file.txt 1 chunks
598 bundle files: foo/Bar/file.txt 2 chunks
599 bundle files: foo/Bar/file.txt 3 chunks
600 bundle files: foo/file.txt 4 chunks
601 bundle files: foo/file.txt 5 chunks
602 bundle files: foo/file.txt 6 chunks
603 bundle files: foo/file.txt 7 chunks
604 bundle files: quux/file.py 8 chunks
605 bundle files: quux/file.py 9 chunks
606 bundle files: quux/file.py 10 chunks
607 bundle files: quux/file.py 11 chunks
352 608 changesets: 1 chunks
353 609 add changeset ef1ea85a6374
354 610 changesets: 2 chunks
355 611 add changeset f9cafe1212c8
356 612 changesets: 3 chunks
357 613 add changeset 911600dab2ae
358 614 adding manifests
359 615 manifests: 1 chunks
360 616 manifests: 2 chunks
361 617 manifests: 3 chunks
362 618 adding file changes
363 619 adding foo/Bar/file.txt revisions
364 620 files: 1 chunks
365 621 adding foo/file.txt revisions
366 622 files: 2 chunks
367 623 adding quux/file.py revisions
368 624 files: 3 chunks
369 625 added 3 changesets with 3 changes to 3 files
370 626 calling hook pretxnchangegroup.acl: hgext.acl.hook
371 627 acl: acl.allow enabled, 1 entries for user fred
372 628 acl: acl.deny enabled, 2 entries for user fred
373 629 acl: allowing changeset ef1ea85a6374
374 630 acl: user fred denied on foo/Bar/file.txt
375 631 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
376 632 transaction abort!
377 633 rollback completed
378 634 abort: acl: access denied for changeset f9cafe1212c8
379 635 no rollback information available
380 636 0:6675d58eff77
381 637
382 638 barney is not mentioned => not allowed anywhere
383 639 Pushing as user barney
384 640 hgrc = """
385 641 [hooks]
386 642 pretxnchangegroup.acl = python:hgext.acl.hook
387 643 [acl]
388 644 sources = push
389 645 [acl.allow]
390 646 foo/** = fred
391 647 [acl.deny]
392 648 foo/bar/** = fred
393 649 foo/Bar/** = fred
394 650 """
395 651 pushing to ../b
396 652 searching for changes
397 653 common changesets up to 6675d58eff77
398 654 3 changesets found
399 655 list of changesets:
400 656 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
401 657 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
402 658 911600dab2ae7a9baff75958b84fe606851ce955
403 659 adding changesets
660 bundle changes: 0 chunks
661 bundle changes: 1 chunks
662 bundle changes: 2 chunks
663 bundle changes: 3 chunks
664 bundle changes: 4 chunks
665 bundle changes: 5 chunks
666 bundle changes: 6 chunks
667 bundle changes: 7 chunks
668 bundle changes: 8 chunks
669 bundle changes: 9 chunks
670 bundle manifests: 0 chunks
671 bundle manifests: 1 chunks
672 bundle manifests: 2 chunks
673 bundle manifests: 3 chunks
674 bundle manifests: 4 chunks
675 bundle manifests: 5 chunks
676 bundle manifests: 6 chunks
677 bundle manifests: 7 chunks
678 bundle manifests: 8 chunks
679 bundle manifests: 9 chunks
680 bundle files: foo/Bar/file.txt 0 chunks
681 bundle files: foo/Bar/file.txt 1 chunks
682 bundle files: foo/Bar/file.txt 2 chunks
683 bundle files: foo/Bar/file.txt 3 chunks
684 bundle files: foo/file.txt 4 chunks
685 bundle files: foo/file.txt 5 chunks
686 bundle files: foo/file.txt 6 chunks
687 bundle files: foo/file.txt 7 chunks
688 bundle files: quux/file.py 8 chunks
689 bundle files: quux/file.py 9 chunks
690 bundle files: quux/file.py 10 chunks
691 bundle files: quux/file.py 11 chunks
404 692 changesets: 1 chunks
405 693 add changeset ef1ea85a6374
406 694 changesets: 2 chunks
407 695 add changeset f9cafe1212c8
408 696 changesets: 3 chunks
409 697 add changeset 911600dab2ae
410 698 adding manifests
411 699 manifests: 1 chunks
412 700 manifests: 2 chunks
413 701 manifests: 3 chunks
414 702 adding file changes
415 703 adding foo/Bar/file.txt revisions
416 704 files: 1 chunks
417 705 adding foo/file.txt revisions
418 706 files: 2 chunks
419 707 adding quux/file.py revisions
420 708 files: 3 chunks
421 709 added 3 changesets with 3 changes to 3 files
422 710 calling hook pretxnchangegroup.acl: hgext.acl.hook
423 711 acl: acl.allow enabled, 0 entries for user barney
424 712 acl: acl.deny enabled, 0 entries for user barney
425 713 acl: user barney not allowed on foo/file.txt
426 714 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
427 715 transaction abort!
428 716 rollback completed
429 717 abort: acl: access denied for changeset ef1ea85a6374
430 718 no rollback information available
431 719 0:6675d58eff77
432 720
433 721 barney is allowed everywhere
434 722 Pushing as user barney
435 723 hgrc = """
436 724 [hooks]
437 725 pretxnchangegroup.acl = python:hgext.acl.hook
438 726 [acl]
439 727 sources = push
440 728 [acl.allow]
441 729 foo/** = fred
442 730 [acl.deny]
443 731 foo/bar/** = fred
444 732 foo/Bar/** = fred
445 733 [acl.allow]
446 734 ** = barney
447 735 """
448 736 pushing to ../b
449 737 searching for changes
450 738 common changesets up to 6675d58eff77
451 739 3 changesets found
452 740 list of changesets:
453 741 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
454 742 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
455 743 911600dab2ae7a9baff75958b84fe606851ce955
456 744 adding changesets
745 bundle changes: 0 chunks
746 bundle changes: 1 chunks
747 bundle changes: 2 chunks
748 bundle changes: 3 chunks
749 bundle changes: 4 chunks
750 bundle changes: 5 chunks
751 bundle changes: 6 chunks
752 bundle changes: 7 chunks
753 bundle changes: 8 chunks
754 bundle changes: 9 chunks
755 bundle manifests: 0 chunks
756 bundle manifests: 1 chunks
757 bundle manifests: 2 chunks
758 bundle manifests: 3 chunks
759 bundle manifests: 4 chunks
760 bundle manifests: 5 chunks
761 bundle manifests: 6 chunks
762 bundle manifests: 7 chunks
763 bundle manifests: 8 chunks
764 bundle manifests: 9 chunks
765 bundle files: foo/Bar/file.txt 0 chunks
766 bundle files: foo/Bar/file.txt 1 chunks
767 bundle files: foo/Bar/file.txt 2 chunks
768 bundle files: foo/Bar/file.txt 3 chunks
769 bundle files: foo/file.txt 4 chunks
770 bundle files: foo/file.txt 5 chunks
771 bundle files: foo/file.txt 6 chunks
772 bundle files: foo/file.txt 7 chunks
773 bundle files: quux/file.py 8 chunks
774 bundle files: quux/file.py 9 chunks
775 bundle files: quux/file.py 10 chunks
776 bundle files: quux/file.py 11 chunks
457 777 changesets: 1 chunks
458 778 add changeset ef1ea85a6374
459 779 changesets: 2 chunks
460 780 add changeset f9cafe1212c8
461 781 changesets: 3 chunks
462 782 add changeset 911600dab2ae
463 783 adding manifests
464 784 manifests: 1 chunks
465 785 manifests: 2 chunks
466 786 manifests: 3 chunks
467 787 adding file changes
468 788 adding foo/Bar/file.txt revisions
469 789 files: 1 chunks
470 790 adding foo/file.txt revisions
471 791 files: 2 chunks
472 792 adding quux/file.py revisions
473 793 files: 3 chunks
474 794 added 3 changesets with 3 changes to 3 files
475 795 calling hook pretxnchangegroup.acl: hgext.acl.hook
476 796 acl: acl.allow enabled, 1 entries for user barney
477 797 acl: acl.deny enabled, 0 entries for user barney
478 798 acl: allowing changeset ef1ea85a6374
479 799 acl: allowing changeset f9cafe1212c8
480 800 acl: allowing changeset 911600dab2ae
481 801 updating the branch cache
482 802 rolling back last transaction
483 803 0:6675d58eff77
484 804
485 805 wilma can change files with a .txt extension
486 806 Pushing as user wilma
487 807 hgrc = """
488 808 [hooks]
489 809 pretxnchangegroup.acl = python:hgext.acl.hook
490 810 [acl]
491 811 sources = push
492 812 [acl.allow]
493 813 foo/** = fred
494 814 [acl.deny]
495 815 foo/bar/** = fred
496 816 foo/Bar/** = fred
497 817 [acl.allow]
498 818 ** = barney
499 819 **/*.txt = wilma
500 820 """
501 821 pushing to ../b
502 822 searching for changes
503 823 common changesets up to 6675d58eff77
504 824 invalidating branch cache (tip differs)
505 825 3 changesets found
506 826 list of changesets:
507 827 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
508 828 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
509 829 911600dab2ae7a9baff75958b84fe606851ce955
510 830 adding changesets
831 bundle changes: 0 chunks
832 bundle changes: 1 chunks
833 bundle changes: 2 chunks
834 bundle changes: 3 chunks
835 bundle changes: 4 chunks
836 bundle changes: 5 chunks
837 bundle changes: 6 chunks
838 bundle changes: 7 chunks
839 bundle changes: 8 chunks
840 bundle changes: 9 chunks
841 bundle manifests: 0 chunks
842 bundle manifests: 1 chunks
843 bundle manifests: 2 chunks
844 bundle manifests: 3 chunks
845 bundle manifests: 4 chunks
846 bundle manifests: 5 chunks
847 bundle manifests: 6 chunks
848 bundle manifests: 7 chunks
849 bundle manifests: 8 chunks
850 bundle manifests: 9 chunks
851 bundle files: foo/Bar/file.txt 0 chunks
852 bundle files: foo/Bar/file.txt 1 chunks
853 bundle files: foo/Bar/file.txt 2 chunks
854 bundle files: foo/Bar/file.txt 3 chunks
855 bundle files: foo/file.txt 4 chunks
856 bundle files: foo/file.txt 5 chunks
857 bundle files: foo/file.txt 6 chunks
858 bundle files: foo/file.txt 7 chunks
859 bundle files: quux/file.py 8 chunks
860 bundle files: quux/file.py 9 chunks
861 bundle files: quux/file.py 10 chunks
862 bundle files: quux/file.py 11 chunks
511 863 changesets: 1 chunks
512 864 add changeset ef1ea85a6374
513 865 changesets: 2 chunks
514 866 add changeset f9cafe1212c8
515 867 changesets: 3 chunks
516 868 add changeset 911600dab2ae
517 869 adding manifests
518 870 manifests: 1 chunks
519 871 manifests: 2 chunks
520 872 manifests: 3 chunks
521 873 adding file changes
522 874 adding foo/Bar/file.txt revisions
523 875 files: 1 chunks
524 876 adding foo/file.txt revisions
525 877 files: 2 chunks
526 878 adding quux/file.py revisions
527 879 files: 3 chunks
528 880 added 3 changesets with 3 changes to 3 files
529 881 calling hook pretxnchangegroup.acl: hgext.acl.hook
530 882 acl: acl.allow enabled, 1 entries for user wilma
531 883 acl: acl.deny enabled, 0 entries for user wilma
532 884 acl: allowing changeset ef1ea85a6374
533 885 acl: allowing changeset f9cafe1212c8
534 886 acl: user wilma not allowed on quux/file.py
535 887 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
536 888 transaction abort!
537 889 rollback completed
538 890 abort: acl: access denied for changeset 911600dab2ae
539 891 no rollback information available
540 892 0:6675d58eff77
541 893
542 894 file specified by acl.config does not exist
543 895 Pushing as user barney
544 896 hgrc = """
545 897 [hooks]
546 898 pretxnchangegroup.acl = python:hgext.acl.hook
547 899 [acl]
548 900 sources = push
549 901 [acl.allow]
550 902 foo/** = fred
551 903 [acl.deny]
552 904 foo/bar/** = fred
553 905 foo/Bar/** = fred
554 906 [acl.allow]
555 907 ** = barney
556 908 **/*.txt = wilma
557 909 [acl]
558 910 config = ../acl.config
559 911 """
560 912 pushing to ../b
561 913 searching for changes
562 914 common changesets up to 6675d58eff77
563 915 3 changesets found
564 916 list of changesets:
565 917 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
566 918 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
567 919 911600dab2ae7a9baff75958b84fe606851ce955
568 920 adding changesets
921 bundle changes: 0 chunks
922 bundle changes: 1 chunks
923 bundle changes: 2 chunks
924 bundle changes: 3 chunks
925 bundle changes: 4 chunks
926 bundle changes: 5 chunks
927 bundle changes: 6 chunks
928 bundle changes: 7 chunks
929 bundle changes: 8 chunks
930 bundle changes: 9 chunks
931 bundle manifests: 0 chunks
932 bundle manifests: 1 chunks
933 bundle manifests: 2 chunks
934 bundle manifests: 3 chunks
935 bundle manifests: 4 chunks
936 bundle manifests: 5 chunks
937 bundle manifests: 6 chunks
938 bundle manifests: 7 chunks
939 bundle manifests: 8 chunks
940 bundle manifests: 9 chunks
941 bundle files: foo/Bar/file.txt 0 chunks
942 bundle files: foo/Bar/file.txt 1 chunks
943 bundle files: foo/Bar/file.txt 2 chunks
944 bundle files: foo/Bar/file.txt 3 chunks
945 bundle files: foo/file.txt 4 chunks
946 bundle files: foo/file.txt 5 chunks
947 bundle files: foo/file.txt 6 chunks
948 bundle files: foo/file.txt 7 chunks
949 bundle files: quux/file.py 8 chunks
950 bundle files: quux/file.py 9 chunks
951 bundle files: quux/file.py 10 chunks
952 bundle files: quux/file.py 11 chunks
569 953 changesets: 1 chunks
570 954 add changeset ef1ea85a6374
571 955 changesets: 2 chunks
572 956 add changeset f9cafe1212c8
573 957 changesets: 3 chunks
574 958 add changeset 911600dab2ae
575 959 adding manifests
576 960 manifests: 1 chunks
577 961 manifests: 2 chunks
578 962 manifests: 3 chunks
579 963 adding file changes
580 964 adding foo/Bar/file.txt revisions
581 965 files: 1 chunks
582 966 adding foo/file.txt revisions
583 967 files: 2 chunks
584 968 adding quux/file.py revisions
585 969 files: 3 chunks
586 970 added 3 changesets with 3 changes to 3 files
587 971 calling hook pretxnchangegroup.acl: hgext.acl.hook
588 972 error: pretxnchangegroup.acl hook raised an exception: [Errno 2] No such file or directory: '../acl.config'
589 973 transaction abort!
590 974 rollback completed
591 975 abort: No such file or directory: ../acl.config
592 976 no rollback information available
593 977 0:6675d58eff77
594 978
595 979 betty is allowed inside foo/ by a acl.config file
596 980 Pushing as user betty
597 981 hgrc = """
598 982 [hooks]
599 983 pretxnchangegroup.acl = python:hgext.acl.hook
600 984 [acl]
601 985 sources = push
602 986 [acl.allow]
603 987 foo/** = fred
604 988 [acl.deny]
605 989 foo/bar/** = fred
606 990 foo/Bar/** = fred
607 991 [acl.allow]
608 992 ** = barney
609 993 **/*.txt = wilma
610 994 [acl]
611 995 config = ../acl.config
612 996 """
613 997 acl.config = """
614 998 [acl.allow]
615 999 foo/** = betty
616 1000 """
617 1001 pushing to ../b
618 1002 searching for changes
619 1003 common changesets up to 6675d58eff77
620 1004 3 changesets found
621 1005 list of changesets:
622 1006 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
623 1007 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
624 1008 911600dab2ae7a9baff75958b84fe606851ce955
625 1009 adding changesets
1010 bundle changes: 0 chunks
1011 bundle changes: 1 chunks
1012 bundle changes: 2 chunks
1013 bundle changes: 3 chunks
1014 bundle changes: 4 chunks
1015 bundle changes: 5 chunks
1016 bundle changes: 6 chunks
1017 bundle changes: 7 chunks
1018 bundle changes: 8 chunks
1019 bundle changes: 9 chunks
1020 bundle manifests: 0 chunks
1021 bundle manifests: 1 chunks
1022 bundle manifests: 2 chunks
1023 bundle manifests: 3 chunks
1024 bundle manifests: 4 chunks
1025 bundle manifests: 5 chunks
1026 bundle manifests: 6 chunks
1027 bundle manifests: 7 chunks
1028 bundle manifests: 8 chunks
1029 bundle manifests: 9 chunks
1030 bundle files: foo/Bar/file.txt 0 chunks
1031 bundle files: foo/Bar/file.txt 1 chunks
1032 bundle files: foo/Bar/file.txt 2 chunks
1033 bundle files: foo/Bar/file.txt 3 chunks
1034 bundle files: foo/file.txt 4 chunks
1035 bundle files: foo/file.txt 5 chunks
1036 bundle files: foo/file.txt 6 chunks
1037 bundle files: foo/file.txt 7 chunks
1038 bundle files: quux/file.py 8 chunks
1039 bundle files: quux/file.py 9 chunks
1040 bundle files: quux/file.py 10 chunks
1041 bundle files: quux/file.py 11 chunks
626 1042 changesets: 1 chunks
627 1043 add changeset ef1ea85a6374
628 1044 changesets: 2 chunks
629 1045 add changeset f9cafe1212c8
630 1046 changesets: 3 chunks
631 1047 add changeset 911600dab2ae
632 1048 adding manifests
633 1049 manifests: 1 chunks
634 1050 manifests: 2 chunks
635 1051 manifests: 3 chunks
636 1052 adding file changes
637 1053 adding foo/Bar/file.txt revisions
638 1054 files: 1 chunks
639 1055 adding foo/file.txt revisions
640 1056 files: 2 chunks
641 1057 adding quux/file.py revisions
642 1058 files: 3 chunks
643 1059 added 3 changesets with 3 changes to 3 files
644 1060 calling hook pretxnchangegroup.acl: hgext.acl.hook
645 1061 acl: acl.allow enabled, 1 entries for user betty
646 1062 acl: acl.deny enabled, 0 entries for user betty
647 1063 acl: allowing changeset ef1ea85a6374
648 1064 acl: allowing changeset f9cafe1212c8
649 1065 acl: user betty not allowed on quux/file.py
650 1066 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
651 1067 transaction abort!
652 1068 rollback completed
653 1069 abort: acl: access denied for changeset 911600dab2ae
654 1070 no rollback information available
655 1071 0:6675d58eff77
656 1072
657 1073 acl.config can set only [acl.allow]/[acl.deny]
658 1074 Pushing as user barney
659 1075 hgrc = """
660 1076 [hooks]
661 1077 pretxnchangegroup.acl = python:hgext.acl.hook
662 1078 [acl]
663 1079 sources = push
664 1080 [acl.allow]
665 1081 foo/** = fred
666 1082 [acl.deny]
667 1083 foo/bar/** = fred
668 1084 foo/Bar/** = fred
669 1085 [acl.allow]
670 1086 ** = barney
671 1087 **/*.txt = wilma
672 1088 [acl]
673 1089 config = ../acl.config
674 1090 """
675 1091 acl.config = """
676 1092 [acl.allow]
677 1093 foo/** = betty
678 1094 [hooks]
679 1095 changegroup.acl = false
680 1096 """
681 1097 pushing to ../b
682 1098 searching for changes
683 1099 common changesets up to 6675d58eff77
684 1100 3 changesets found
685 1101 list of changesets:
686 1102 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
687 1103 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
688 1104 911600dab2ae7a9baff75958b84fe606851ce955
689 1105 adding changesets
1106 bundle changes: 0 chunks
1107 bundle changes: 1 chunks
1108 bundle changes: 2 chunks
1109 bundle changes: 3 chunks
1110 bundle changes: 4 chunks
1111 bundle changes: 5 chunks
1112 bundle changes: 6 chunks
1113 bundle changes: 7 chunks
1114 bundle changes: 8 chunks
1115 bundle changes: 9 chunks
1116 bundle manifests: 0 chunks
1117 bundle manifests: 1 chunks
1118 bundle manifests: 2 chunks
1119 bundle manifests: 3 chunks
1120 bundle manifests: 4 chunks
1121 bundle manifests: 5 chunks
1122 bundle manifests: 6 chunks
1123 bundle manifests: 7 chunks
1124 bundle manifests: 8 chunks
1125 bundle manifests: 9 chunks
1126 bundle files: foo/Bar/file.txt 0 chunks
1127 bundle files: foo/Bar/file.txt 1 chunks
1128 bundle files: foo/Bar/file.txt 2 chunks
1129 bundle files: foo/Bar/file.txt 3 chunks
1130 bundle files: foo/file.txt 4 chunks
1131 bundle files: foo/file.txt 5 chunks
1132 bundle files: foo/file.txt 6 chunks
1133 bundle files: foo/file.txt 7 chunks
1134 bundle files: quux/file.py 8 chunks
1135 bundle files: quux/file.py 9 chunks
1136 bundle files: quux/file.py 10 chunks
1137 bundle files: quux/file.py 11 chunks
690 1138 changesets: 1 chunks
691 1139 add changeset ef1ea85a6374
692 1140 changesets: 2 chunks
693 1141 add changeset f9cafe1212c8
694 1142 changesets: 3 chunks
695 1143 add changeset 911600dab2ae
696 1144 adding manifests
697 1145 manifests: 1 chunks
698 1146 manifests: 2 chunks
699 1147 manifests: 3 chunks
700 1148 adding file changes
701 1149 adding foo/Bar/file.txt revisions
702 1150 files: 1 chunks
703 1151 adding foo/file.txt revisions
704 1152 files: 2 chunks
705 1153 adding quux/file.py revisions
706 1154 files: 3 chunks
707 1155 added 3 changesets with 3 changes to 3 files
708 1156 calling hook pretxnchangegroup.acl: hgext.acl.hook
709 1157 acl: acl.allow enabled, 1 entries for user barney
710 1158 acl: acl.deny enabled, 0 entries for user barney
711 1159 acl: allowing changeset ef1ea85a6374
712 1160 acl: allowing changeset f9cafe1212c8
713 1161 acl: allowing changeset 911600dab2ae
714 1162 updating the branch cache
715 1163 rolling back last transaction
716 1164 0:6675d58eff77
717 1165
@@ -1,343 +1,365
1 1 ====== Setting up test
2 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
3 3 created new head
4 4 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
5 5 checking changesets
6 6 checking manifests
7 7 crosschecking files in changesets and manifests
8 8 checking files
9 9 4 files, 9 changesets, 7 total revisions
10 10 ====== Bundle --all
11 11 9 changesets found
12 12 ====== Bundle test to full.hg
13 13 searching for changes
14 14 9 changesets found
15 15 ====== Unbundle full.hg in test
16 16 adding changesets
17 17 adding manifests
18 18 adding file changes
19 19 added 0 changesets with 0 changes to 4 files
20 20 (run 'hg update' to get a working copy)
21 21 ====== Verify empty
22 22 checking changesets
23 23 checking manifests
24 24 crosschecking files in changesets and manifests
25 25 checking files
26 26 0 files, 0 changesets, 0 total revisions
27 27 ====== Pull full.hg into test (using --cwd)
28 28 pulling from ../full.hg
29 29 searching for changes
30 30 no changes found
31 31 ====== Pull full.hg into empty (using --cwd)
32 32 pulling from ../full.hg
33 33 requesting all changes
34 34 adding changesets
35 35 adding manifests
36 36 adding file changes
37 37 added 9 changesets with 7 changes to 4 files (+1 heads)
38 38 (run 'hg heads' to see heads, 'hg merge' to merge)
39 39 ====== Rollback empty
40 40 rolling back last transaction
41 41 ====== Pull full.hg into empty again (using --cwd)
42 42 pulling from ../full.hg
43 43 requesting all changes
44 44 adding changesets
45 45 adding manifests
46 46 adding file changes
47 47 added 9 changesets with 7 changes to 4 files (+1 heads)
48 48 (run 'hg heads' to see heads, 'hg merge' to merge)
49 49 ====== Pull full.hg into test (using -R)
50 50 pulling from full.hg
51 51 searching for changes
52 52 no changes found
53 53 ====== Pull full.hg into empty (using -R)
54 54 pulling from full.hg
55 55 searching for changes
56 56 no changes found
57 57 ====== Rollback empty
58 58 rolling back last transaction
59 59 ====== Pull full.hg into empty again (using -R)
60 60 pulling from full.hg
61 61 requesting all changes
62 62 adding changesets
63 63 adding manifests
64 64 adding file changes
65 65 added 9 changesets with 7 changes to 4 files (+1 heads)
66 66 (run 'hg heads' to see heads, 'hg merge' to merge)
67 67 ====== Log -R full.hg in fresh empty
68 68 changeset: 8:836ac62537ab
69 69 tag: tip
70 70 parent: 3:ac69c658229d
71 71 user: test
72 72 date: Mon Jan 12 13:46:40 1970 +0000
73 73 summary: 0.3m
74 74
75 75 changeset: 7:80fe151401c2
76 76 user: test
77 77 date: Mon Jan 12 13:46:40 1970 +0000
78 78 summary: 1.3m
79 79
80 80 changeset: 6:1e3f6b843bd6
81 81 user: test
82 82 date: Mon Jan 12 13:46:40 1970 +0000
83 83 summary: 1.3
84 84
85 85 changeset: 5:024e4e7df376
86 86 user: test
87 87 date: Mon Jan 12 13:46:40 1970 +0000
88 88 summary: 1.2
89 89
90 90 changeset: 4:5f4f3ceb285e
91 91 parent: 0:5649c9d34dd8
92 92 user: test
93 93 date: Mon Jan 12 13:46:40 1970 +0000
94 94 summary: 1.1
95 95
96 96 changeset: 3:ac69c658229d
97 97 user: test
98 98 date: Mon Jan 12 13:46:40 1970 +0000
99 99 summary: 0.3
100 100
101 101 changeset: 2:d62976ca1e50
102 102 user: test
103 103 date: Mon Jan 12 13:46:40 1970 +0000
104 104 summary: 0.2
105 105
106 106 changeset: 1:10b2180f755b
107 107 user: test
108 108 date: Mon Jan 12 13:46:40 1970 +0000
109 109 summary: 0.1
110 110
111 111 changeset: 0:5649c9d34dd8
112 112 user: test
113 113 date: Mon Jan 12 13:46:40 1970 +0000
114 114 summary: 0.0
115 115
116 116 ====== Pull ../full.hg into empty (with hook)
117 117 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:../full.hg
118 118 pulling from bundle://../full.hg
119 119 requesting all changes
120 120 adding changesets
121 121 adding manifests
122 122 adding file changes
123 123 added 9 changesets with 7 changes to 4 files (+1 heads)
124 124 (run 'hg heads' to see heads, 'hg merge' to merge)
125 125 ====== Rollback empty
126 126 rolling back last transaction
127 127 ====== Log -R bundle:empty+full.hg
128 128 8 7 6 5 4 3 2 1 0
129 129 ====== Pull full.hg into empty again (using -R; with hook)
130 130 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:empty+full.hg
131 131 pulling from full.hg
132 132 requesting all changes
133 133 adding changesets
134 134 adding manifests
135 135 adding file changes
136 136 added 9 changesets with 7 changes to 4 files (+1 heads)
137 137 (run 'hg heads' to see heads, 'hg merge' to merge)
138 138 ====== Create partial clones
139 139 requesting all changes
140 140 adding changesets
141 141 adding manifests
142 142 adding file changes
143 143 added 4 changesets with 4 changes to 1 files
144 144 updating to branch default
145 145 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
146 146 updating to branch default
147 147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
148 148 ====== Log -R full.hg in partial
149 149 changeset: 8:836ac62537ab
150 150 tag: tip
151 151 parent: 3:ac69c658229d
152 152 user: test
153 153 date: Mon Jan 12 13:46:40 1970 +0000
154 154 summary: 0.3m
155 155
156 156 changeset: 7:80fe151401c2
157 157 user: test
158 158 date: Mon Jan 12 13:46:40 1970 +0000
159 159 summary: 1.3m
160 160
161 161 changeset: 6:1e3f6b843bd6
162 162 user: test
163 163 date: Mon Jan 12 13:46:40 1970 +0000
164 164 summary: 1.3
165 165
166 166 changeset: 5:024e4e7df376
167 167 user: test
168 168 date: Mon Jan 12 13:46:40 1970 +0000
169 169 summary: 1.2
170 170
171 171 changeset: 4:5f4f3ceb285e
172 172 parent: 0:5649c9d34dd8
173 173 user: test
174 174 date: Mon Jan 12 13:46:40 1970 +0000
175 175 summary: 1.1
176 176
177 177 changeset: 3:ac69c658229d
178 178 user: test
179 179 date: Mon Jan 12 13:46:40 1970 +0000
180 180 summary: 0.3
181 181
182 182 changeset: 2:d62976ca1e50
183 183 user: test
184 184 date: Mon Jan 12 13:46:40 1970 +0000
185 185 summary: 0.2
186 186
187 187 changeset: 1:10b2180f755b
188 188 user: test
189 189 date: Mon Jan 12 13:46:40 1970 +0000
190 190 summary: 0.1
191 191
192 192 changeset: 0:5649c9d34dd8
193 193 user: test
194 194 date: Mon Jan 12 13:46:40 1970 +0000
195 195 summary: 0.0
196 196
197 197 ====== Incoming full.hg in partial
198 198 comparing with bundle://../full.hg
199 199 searching for changes
200 200 changeset: 4:5f4f3ceb285e
201 201 parent: 0:5649c9d34dd8
202 202 user: test
203 203 date: Mon Jan 12 13:46:40 1970 +0000
204 204 summary: 1.1
205 205
206 206 changeset: 5:024e4e7df376
207 207 user: test
208 208 date: Mon Jan 12 13:46:40 1970 +0000
209 209 summary: 1.2
210 210
211 211 changeset: 6:1e3f6b843bd6
212 212 user: test
213 213 date: Mon Jan 12 13:46:40 1970 +0000
214 214 summary: 1.3
215 215
216 216 changeset: 7:80fe151401c2
217 217 user: test
218 218 date: Mon Jan 12 13:46:40 1970 +0000
219 219 summary: 1.3m
220 220
221 221 changeset: 8:836ac62537ab
222 222 tag: tip
223 223 parent: 3:ac69c658229d
224 224 user: test
225 225 date: Mon Jan 12 13:46:40 1970 +0000
226 226 summary: 0.3m
227 227
228 228 ====== Outgoing -R full.hg vs partial2 in partial
229 229 comparing with ../partial2
230 230 searching for changes
231 231 changeset: 4:5f4f3ceb285e
232 232 parent: 0:5649c9d34dd8
233 233 user: test
234 234 date: Mon Jan 12 13:46:40 1970 +0000
235 235 summary: 1.1
236 236
237 237 changeset: 5:024e4e7df376
238 238 user: test
239 239 date: Mon Jan 12 13:46:40 1970 +0000
240 240 summary: 1.2
241 241
242 242 changeset: 6:1e3f6b843bd6
243 243 user: test
244 244 date: Mon Jan 12 13:46:40 1970 +0000
245 245 summary: 1.3
246 246
247 247 changeset: 7:80fe151401c2
248 248 user: test
249 249 date: Mon Jan 12 13:46:40 1970 +0000
250 250 summary: 1.3m
251 251
252 252 changeset: 8:836ac62537ab
253 253 tag: tip
254 254 parent: 3:ac69c658229d
255 255 user: test
256 256 date: Mon Jan 12 13:46:40 1970 +0000
257 257 summary: 0.3m
258 258
259 259 ====== Outgoing -R does-not-exist.hg vs partial2 in partial
260 260 abort: No such file or directory: ../does-not-exist.hg
261 261 ====== Direct clone from bundle (all-history)
262 262 requesting all changes
263 263 adding changesets
264 264 adding manifests
265 265 adding file changes
266 266 added 9 changesets with 7 changes to 4 files (+1 heads)
267 267 updating to branch default
268 268 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
269 269 changeset: 8:836ac62537ab
270 270 tag: tip
271 271 parent: 3:ac69c658229d
272 272 user: test
273 273 date: Mon Jan 12 13:46:40 1970 +0000
274 274 summary: 0.3m
275 275
276 276 changeset: 7:80fe151401c2
277 277 user: test
278 278 date: Mon Jan 12 13:46:40 1970 +0000
279 279 summary: 1.3m
280 280
281 281 ====== Unbundle incremental bundles into fresh empty in one go
282 282 1 changesets found
283 283 1 changesets found
284 284 adding changesets
285 285 adding manifests
286 286 adding file changes
287 287 added 1 changesets with 1 changes to 1 files
288 288 adding changesets
289 289 adding manifests
290 290 adding file changes
291 291 added 1 changesets with 1 changes to 1 files
292 292 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
293 293 ====== test for 540d1059c802
294 294 updating to branch default
295 295 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
296 296 searching for changes
297 297 1 changesets found
298 298 comparing with ../bundle.hg
299 299 searching for changes
300 300 changeset: 2:ed1b79f46b9a
301 301 tag: tip
302 302 parent: 0:bbd179dfa0a7
303 303 user: test
304 304 date: Thu Jan 01 00:00:00 1970 +0000
305 305 summary: change foo
306 306
307 307 ===== test that verify bundle does not traceback
308 308 abort: 00changelog.i@bbd179dfa0a7: unknown parent!
309 309 abort: cannot verify bundle or remote repos
310 310 checking changesets
311 311 checking manifests
312 312 crosschecking files in changesets and manifests
313 313 checking files
314 314 2 files, 2 changesets, 2 total revisions
315 315 ====== diff against bundle
316 316 diff -r 836ac62537ab anotherfile
317 317 --- a/anotherfile Mon Jan 12 13:46:40 1970 +0000
318 318 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
319 319 @@ -1,4 +0,0 @@
320 320 -0
321 321 -1
322 322 -2
323 323 -3
324 324 ====== bundle single branch
325 325 adding a
326 326 adding b
327 327 adding b1
328 328 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
329 329 adding c
330 330 created new head
331 331 adding c1
332 332 == bundling via incoming
333 333 comparing with .
334 334 searching for changes
335 335 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
336 336 5ece8e77363e2b5269e27c66828b72da29e4341a
337 337 == bundling
338 338 searching for changes
339 339 common changesets up to c0025332f9ed
340 340 2 changesets found
341 341 list of changesets:
342 342 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
343 343 5ece8e77363e2b5269e27c66828b72da29e4341a
344 bundle changes: 0 chunks
345 bundle changes: 1 chunks
346 bundle changes: 2 chunks
347 bundle changes: 3 chunks
348 bundle changes: 4 chunks
349 bundle changes: 5 chunks
350 bundle changes: 6 chunks
351 bundle manifests: 0 chunks
352 bundle manifests: 1 chunks
353 bundle manifests: 2 chunks
354 bundle manifests: 3 chunks
355 bundle manifests: 4 chunks
356 bundle manifests: 5 chunks
357 bundle manifests: 6 chunks
358 bundle files: b 0 chunks
359 bundle files: b 1 chunks
360 bundle files: b 2 chunks
361 bundle files: b 3 chunks
362 bundle files: b1 4 chunks
363 bundle files: b1 5 chunks
364 bundle files: b1 6 chunks
365 bundle files: b1 7 chunks
General Comments 0
You need to be logged in to leave comments. Login now