##// END OF EJS Templates
pushkey: gracefully handle prepushkey hook failure (issue4455)...
Pierre-Yves David -
r23416:53a65929 stable
parent child Browse files
Show More
@@ -1,1804 +1,1810 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from node import hex, nullid, short
8 8 from i18n import _
9 9 import urllib
10 10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 12 import lock as lockmod
13 13 import transaction, store, encoding, exchange, bundle2
14 14 import scmutil, util, extensions, hook, error, revset
15 15 import match as matchmod
16 16 import merge as mergemod
17 17 import tags as tagsmod
18 18 from lock import release
19 19 import weakref, errno, os, time, inspect
20 20 import branchmap, pathutil
21 21 propertycache = util.propertycache
22 22 filecache = scmutil.filecache
23 23
24 24 class repofilecache(filecache):
25 25 """All filecache usage on repo are done for logic that should be unfiltered
26 26 """
27 27
28 28 def __get__(self, repo, type=None):
29 29 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 30 def __set__(self, repo, value):
31 31 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 32 def __delete__(self, repo):
33 33 return super(repofilecache, self).__delete__(repo.unfiltered())
34 34
35 35 class storecache(repofilecache):
36 36 """filecache for files in the store"""
37 37 def join(self, obj, fname):
38 38 return obj.sjoin(fname)
39 39
40 40 class unfilteredpropertycache(propertycache):
41 41 """propertycache that apply to unfiltered repo only"""
42 42
43 43 def __get__(self, repo, type=None):
44 44 unfi = repo.unfiltered()
45 45 if unfi is repo:
46 46 return super(unfilteredpropertycache, self).__get__(unfi)
47 47 return getattr(unfi, self.name)
48 48
49 49 class filteredpropertycache(propertycache):
50 50 """propertycache that must take filtering in account"""
51 51
52 52 def cachevalue(self, obj, value):
53 53 object.__setattr__(obj, self.name, value)
54 54
55 55
56 56 def hasunfilteredcache(repo, name):
57 57 """check if a repo has an unfilteredpropertycache value for <name>"""
58 58 return name in vars(repo.unfiltered())
59 59
60 60 def unfilteredmethod(orig):
61 61 """decorate method that always need to be run on unfiltered version"""
62 62 def wrapper(repo, *args, **kwargs):
63 63 return orig(repo.unfiltered(), *args, **kwargs)
64 64 return wrapper
65 65
66 66 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 67 'unbundle'))
68 68 legacycaps = moderncaps.union(set(['changegroupsubset']))
69 69
70 70 class localpeer(peer.peerrepository):
71 71 '''peer for a local repo; reflects only the most recent API'''
72 72
73 73 def __init__(self, repo, caps=moderncaps):
74 74 peer.peerrepository.__init__(self)
75 75 self._repo = repo.filtered('served')
76 76 self.ui = repo.ui
77 77 self._caps = repo._restrictcapabilities(caps)
78 78 self.requirements = repo.requirements
79 79 self.supportedformats = repo.supportedformats
80 80
81 81 def close(self):
82 82 self._repo.close()
83 83
84 84 def _capabilities(self):
85 85 return self._caps
86 86
87 87 def local(self):
88 88 return self._repo
89 89
90 90 def canpush(self):
91 91 return True
92 92
93 93 def url(self):
94 94 return self._repo.url()
95 95
96 96 def lookup(self, key):
97 97 return self._repo.lookup(key)
98 98
99 99 def branchmap(self):
100 100 return self._repo.branchmap()
101 101
102 102 def heads(self):
103 103 return self._repo.heads()
104 104
105 105 def known(self, nodes):
106 106 return self._repo.known(nodes)
107 107
108 108 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 109 format='HG10', **kwargs):
110 110 cg = exchange.getbundle(self._repo, source, heads=heads,
111 111 common=common, bundlecaps=bundlecaps, **kwargs)
112 112 if bundlecaps is not None and 'HG2Y' in bundlecaps:
113 113 # When requesting a bundle2, getbundle returns a stream to make the
114 114 # wire level function happier. We need to build a proper object
115 115 # from it in local peer.
116 116 cg = bundle2.unbundle20(self.ui, cg)
117 117 return cg
118 118
119 119 # TODO We might want to move the next two calls into legacypeer and add
120 120 # unbundle instead.
121 121
122 122 def unbundle(self, cg, heads, url):
123 123 """apply a bundle on a repo
124 124
125 125 This function handles the repo locking itself."""
126 126 try:
127 127 cg = exchange.readbundle(self.ui, cg, None)
128 128 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
129 129 if util.safehasattr(ret, 'getchunks'):
130 130 # This is a bundle20 object, turn it into an unbundler.
131 131 # This little dance should be dropped eventually when the API
132 132 # is finally improved.
133 133 stream = util.chunkbuffer(ret.getchunks())
134 134 ret = bundle2.unbundle20(self.ui, stream)
135 135 return ret
136 136 except error.PushRaced, exc:
137 137 raise error.ResponseError(_('push failed:'), str(exc))
138 138
139 139 def lock(self):
140 140 return self._repo.lock()
141 141
142 142 def addchangegroup(self, cg, source, url):
143 143 return changegroup.addchangegroup(self._repo, cg, source, url)
144 144
145 145 def pushkey(self, namespace, key, old, new):
146 146 return self._repo.pushkey(namespace, key, old, new)
147 147
148 148 def listkeys(self, namespace):
149 149 return self._repo.listkeys(namespace)
150 150
151 151 def debugwireargs(self, one, two, three=None, four=None, five=None):
152 152 '''used to test argument passing over the wire'''
153 153 return "%s %s %s %s %s" % (one, two, three, four, five)
154 154
155 155 class locallegacypeer(localpeer):
156 156 '''peer extension which implements legacy methods too; used for tests with
157 157 restricted capabilities'''
158 158
159 159 def __init__(self, repo):
160 160 localpeer.__init__(self, repo, caps=legacycaps)
161 161
162 162 def branches(self, nodes):
163 163 return self._repo.branches(nodes)
164 164
165 165 def between(self, pairs):
166 166 return self._repo.between(pairs)
167 167
168 168 def changegroup(self, basenodes, source):
169 169 return changegroup.changegroup(self._repo, basenodes, source)
170 170
171 171 def changegroupsubset(self, bases, heads, source):
172 172 return changegroup.changegroupsubset(self._repo, bases, heads, source)
173 173
174 174 class localrepository(object):
175 175
176 176 supportedformats = set(('revlogv1', 'generaldelta'))
177 177 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
178 178 'dotencode'))
179 179 openerreqs = set(('revlogv1', 'generaldelta'))
180 180 requirements = ['revlogv1']
181 181 filtername = None
182 182
183 183 # a list of (ui, featureset) functions.
184 184 # only functions defined in module of enabled extensions are invoked
185 185 featuresetupfuncs = set()
186 186
187 187 def _baserequirements(self, create):
188 188 return self.requirements[:]
189 189
190 190 def __init__(self, baseui, path=None, create=False):
191 191 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
192 192 self.wopener = self.wvfs
193 193 self.root = self.wvfs.base
194 194 self.path = self.wvfs.join(".hg")
195 195 self.origroot = path
196 196 self.auditor = pathutil.pathauditor(self.root, self._checknested)
197 197 self.vfs = scmutil.vfs(self.path)
198 198 self.opener = self.vfs
199 199 self.baseui = baseui
200 200 self.ui = baseui.copy()
201 201 self.ui.copy = baseui.copy # prevent copying repo configuration
202 202 # A list of callback to shape the phase if no data were found.
203 203 # Callback are in the form: func(repo, roots) --> processed root.
204 204 # This list it to be filled by extension during repo setup
205 205 self._phasedefaults = []
206 206 try:
207 207 self.ui.readconfig(self.join("hgrc"), self.root)
208 208 extensions.loadall(self.ui)
209 209 except IOError:
210 210 pass
211 211
212 212 if self.featuresetupfuncs:
213 213 self.supported = set(self._basesupported) # use private copy
214 214 extmods = set(m.__name__ for n, m
215 215 in extensions.extensions(self.ui))
216 216 for setupfunc in self.featuresetupfuncs:
217 217 if setupfunc.__module__ in extmods:
218 218 setupfunc(self.ui, self.supported)
219 219 else:
220 220 self.supported = self._basesupported
221 221
222 222 if not self.vfs.isdir():
223 223 if create:
224 224 if not self.wvfs.exists():
225 225 self.wvfs.makedirs()
226 226 self.vfs.makedir(notindexed=True)
227 227 requirements = self._baserequirements(create)
228 228 if self.ui.configbool('format', 'usestore', True):
229 229 self.vfs.mkdir("store")
230 230 requirements.append("store")
231 231 if self.ui.configbool('format', 'usefncache', True):
232 232 requirements.append("fncache")
233 233 if self.ui.configbool('format', 'dotencode', True):
234 234 requirements.append('dotencode')
235 235 # create an invalid changelog
236 236 self.vfs.append(
237 237 "00changelog.i",
238 238 '\0\0\0\2' # represents revlogv2
239 239 ' dummy changelog to prevent using the old repo layout'
240 240 )
241 241 if self.ui.configbool('format', 'generaldelta', False):
242 242 requirements.append("generaldelta")
243 243 requirements = set(requirements)
244 244 else:
245 245 raise error.RepoError(_("repository %s not found") % path)
246 246 elif create:
247 247 raise error.RepoError(_("repository %s already exists") % path)
248 248 else:
249 249 try:
250 250 requirements = scmutil.readrequires(self.vfs, self.supported)
251 251 except IOError, inst:
252 252 if inst.errno != errno.ENOENT:
253 253 raise
254 254 requirements = set()
255 255
256 256 self.sharedpath = self.path
257 257 try:
258 258 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
259 259 realpath=True)
260 260 s = vfs.base
261 261 if not vfs.exists():
262 262 raise error.RepoError(
263 263 _('.hg/sharedpath points to nonexistent directory %s') % s)
264 264 self.sharedpath = s
265 265 except IOError, inst:
266 266 if inst.errno != errno.ENOENT:
267 267 raise
268 268
269 269 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
270 270 self.spath = self.store.path
271 271 self.svfs = self.store.vfs
272 272 self.sopener = self.svfs
273 273 self.sjoin = self.store.join
274 274 self.vfs.createmode = self.store.createmode
275 275 self._applyrequirements(requirements)
276 276 if create:
277 277 self._writerequirements()
278 278
279 279
280 280 self._branchcaches = {}
281 281 self.filterpats = {}
282 282 self._datafilters = {}
283 283 self._transref = self._lockref = self._wlockref = None
284 284
285 285 # A cache for various files under .hg/ that tracks file changes,
286 286 # (used by the filecache decorator)
287 287 #
288 288 # Maps a property name to its util.filecacheentry
289 289 self._filecache = {}
290 290
291 291 # hold sets of revision to be filtered
292 292 # should be cleared when something might have changed the filter value:
293 293 # - new changesets,
294 294 # - phase change,
295 295 # - new obsolescence marker,
296 296 # - working directory parent change,
297 297 # - bookmark changes
298 298 self.filteredrevcache = {}
299 299
300 300 def close(self):
301 301 pass
302 302
303 303 def _restrictcapabilities(self, caps):
304 304 # bundle2 is not ready for prime time, drop it unless explicitly
305 305 # required by the tests (or some brave tester)
306 306 if self.ui.configbool('experimental', 'bundle2-exp', False):
307 307 caps = set(caps)
308 308 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
309 309 caps.add('bundle2-exp=' + urllib.quote(capsblob))
310 310 return caps
311 311
312 312 def _applyrequirements(self, requirements):
313 313 self.requirements = requirements
314 314 self.sopener.options = dict((r, 1) for r in requirements
315 315 if r in self.openerreqs)
316 316 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
317 317 if chunkcachesize is not None:
318 318 self.sopener.options['chunkcachesize'] = chunkcachesize
319 319
320 320 def _writerequirements(self):
321 321 reqfile = self.opener("requires", "w")
322 322 for r in sorted(self.requirements):
323 323 reqfile.write("%s\n" % r)
324 324 reqfile.close()
325 325
326 326 def _checknested(self, path):
327 327 """Determine if path is a legal nested repository."""
328 328 if not path.startswith(self.root):
329 329 return False
330 330 subpath = path[len(self.root) + 1:]
331 331 normsubpath = util.pconvert(subpath)
332 332
333 333 # XXX: Checking against the current working copy is wrong in
334 334 # the sense that it can reject things like
335 335 #
336 336 # $ hg cat -r 10 sub/x.txt
337 337 #
338 338 # if sub/ is no longer a subrepository in the working copy
339 339 # parent revision.
340 340 #
341 341 # However, it can of course also allow things that would have
342 342 # been rejected before, such as the above cat command if sub/
343 343 # is a subrepository now, but was a normal directory before.
344 344 # The old path auditor would have rejected by mistake since it
345 345 # panics when it sees sub/.hg/.
346 346 #
347 347 # All in all, checking against the working copy seems sensible
348 348 # since we want to prevent access to nested repositories on
349 349 # the filesystem *now*.
350 350 ctx = self[None]
351 351 parts = util.splitpath(subpath)
352 352 while parts:
353 353 prefix = '/'.join(parts)
354 354 if prefix in ctx.substate:
355 355 if prefix == normsubpath:
356 356 return True
357 357 else:
358 358 sub = ctx.sub(prefix)
359 359 return sub.checknested(subpath[len(prefix) + 1:])
360 360 else:
361 361 parts.pop()
362 362 return False
363 363
364 364 def peer(self):
365 365 return localpeer(self) # not cached to avoid reference cycle
366 366
367 367 def unfiltered(self):
368 368 """Return unfiltered version of the repository
369 369
370 370 Intended to be overwritten by filtered repo."""
371 371 return self
372 372
373 373 def filtered(self, name):
374 374 """Return a filtered version of a repository"""
375 375 # build a new class with the mixin and the current class
376 376 # (possibly subclass of the repo)
377 377 class proxycls(repoview.repoview, self.unfiltered().__class__):
378 378 pass
379 379 return proxycls(self, name)
380 380
381 381 @repofilecache('bookmarks')
382 382 def _bookmarks(self):
383 383 return bookmarks.bmstore(self)
384 384
385 385 @repofilecache('bookmarks.current')
386 386 def _bookmarkcurrent(self):
387 387 return bookmarks.readcurrent(self)
388 388
389 389 def bookmarkheads(self, bookmark):
390 390 name = bookmark.split('@', 1)[0]
391 391 heads = []
392 392 for mark, n in self._bookmarks.iteritems():
393 393 if mark.split('@', 1)[0] == name:
394 394 heads.append(n)
395 395 return heads
396 396
397 397 @storecache('phaseroots')
398 398 def _phasecache(self):
399 399 return phases.phasecache(self, self._phasedefaults)
400 400
401 401 @storecache('obsstore')
402 402 def obsstore(self):
403 403 # read default format for new obsstore.
404 404 defaultformat = self.ui.configint('format', 'obsstore-version', None)
405 405 # rely on obsstore class default when possible.
406 406 kwargs = {}
407 407 if defaultformat is not None:
408 408 kwargs['defaultformat'] = defaultformat
409 409 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
410 410 store = obsolete.obsstore(self.sopener, readonly=readonly,
411 411 **kwargs)
412 412 if store and readonly:
413 413 # message is rare enough to not be translated
414 414 msg = 'obsolete feature not enabled but %i markers found!\n'
415 415 self.ui.warn(msg % len(list(store)))
416 416 return store
417 417
418 418 @storecache('00changelog.i')
419 419 def changelog(self):
420 420 c = changelog.changelog(self.sopener)
421 421 if 'HG_PENDING' in os.environ:
422 422 p = os.environ['HG_PENDING']
423 423 if p.startswith(self.root):
424 424 c.readpending('00changelog.i.a')
425 425 return c
426 426
427 427 @storecache('00manifest.i')
428 428 def manifest(self):
429 429 return manifest.manifest(self.sopener)
430 430
431 431 @repofilecache('dirstate')
432 432 def dirstate(self):
433 433 warned = [0]
434 434 def validate(node):
435 435 try:
436 436 self.changelog.rev(node)
437 437 return node
438 438 except error.LookupError:
439 439 if not warned[0]:
440 440 warned[0] = True
441 441 self.ui.warn(_("warning: ignoring unknown"
442 442 " working parent %s!\n") % short(node))
443 443 return nullid
444 444
445 445 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
446 446
447 447 def __getitem__(self, changeid):
448 448 if changeid is None:
449 449 return context.workingctx(self)
450 450 return context.changectx(self, changeid)
451 451
452 452 def __contains__(self, changeid):
453 453 try:
454 454 return bool(self.lookup(changeid))
455 455 except error.RepoLookupError:
456 456 return False
457 457
458 458 def __nonzero__(self):
459 459 return True
460 460
461 461 def __len__(self):
462 462 return len(self.changelog)
463 463
464 464 def __iter__(self):
465 465 return iter(self.changelog)
466 466
467 467 def revs(self, expr, *args):
468 468 '''Return a list of revisions matching the given revset'''
469 469 expr = revset.formatspec(expr, *args)
470 470 m = revset.match(None, expr)
471 471 return m(self, revset.spanset(self))
472 472
473 473 def set(self, expr, *args):
474 474 '''
475 475 Yield a context for each matching revision, after doing arg
476 476 replacement via revset.formatspec
477 477 '''
478 478 for r in self.revs(expr, *args):
479 479 yield self[r]
480 480
481 481 def url(self):
482 482 return 'file:' + self.root
483 483
484 484 def hook(self, name, throw=False, **args):
485 485 """Call a hook, passing this repo instance.
486 486
487 487 This a convenience method to aid invoking hooks. Extensions likely
488 488 won't call this unless they have registered a custom hook or are
489 489 replacing code that is expected to call a hook.
490 490 """
491 491 return hook.hook(self.ui, self, name, throw, **args)
492 492
493 493 @unfilteredmethod
494 494 def _tag(self, names, node, message, local, user, date, extra={},
495 495 editor=False):
496 496 if isinstance(names, str):
497 497 names = (names,)
498 498
499 499 branches = self.branchmap()
500 500 for name in names:
501 501 self.hook('pretag', throw=True, node=hex(node), tag=name,
502 502 local=local)
503 503 if name in branches:
504 504 self.ui.warn(_("warning: tag %s conflicts with existing"
505 505 " branch name\n") % name)
506 506
507 507 def writetags(fp, names, munge, prevtags):
508 508 fp.seek(0, 2)
509 509 if prevtags and prevtags[-1] != '\n':
510 510 fp.write('\n')
511 511 for name in names:
512 512 m = munge and munge(name) or name
513 513 if (self._tagscache.tagtypes and
514 514 name in self._tagscache.tagtypes):
515 515 old = self.tags().get(name, nullid)
516 516 fp.write('%s %s\n' % (hex(old), m))
517 517 fp.write('%s %s\n' % (hex(node), m))
518 518 fp.close()
519 519
520 520 prevtags = ''
521 521 if local:
522 522 try:
523 523 fp = self.opener('localtags', 'r+')
524 524 except IOError:
525 525 fp = self.opener('localtags', 'a')
526 526 else:
527 527 prevtags = fp.read()
528 528
529 529 # local tags are stored in the current charset
530 530 writetags(fp, names, None, prevtags)
531 531 for name in names:
532 532 self.hook('tag', node=hex(node), tag=name, local=local)
533 533 return
534 534
535 535 try:
536 536 fp = self.wfile('.hgtags', 'rb+')
537 537 except IOError, e:
538 538 if e.errno != errno.ENOENT:
539 539 raise
540 540 fp = self.wfile('.hgtags', 'ab')
541 541 else:
542 542 prevtags = fp.read()
543 543
544 544 # committed tags are stored in UTF-8
545 545 writetags(fp, names, encoding.fromlocal, prevtags)
546 546
547 547 fp.close()
548 548
549 549 self.invalidatecaches()
550 550
551 551 if '.hgtags' not in self.dirstate:
552 552 self[None].add(['.hgtags'])
553 553
554 554 m = matchmod.exact(self.root, '', ['.hgtags'])
555 555 tagnode = self.commit(message, user, date, extra=extra, match=m,
556 556 editor=editor)
557 557
558 558 for name in names:
559 559 self.hook('tag', node=hex(node), tag=name, local=local)
560 560
561 561 return tagnode
562 562
563 563 def tag(self, names, node, message, local, user, date, editor=False):
564 564 '''tag a revision with one or more symbolic names.
565 565
566 566 names is a list of strings or, when adding a single tag, names may be a
567 567 string.
568 568
569 569 if local is True, the tags are stored in a per-repository file.
570 570 otherwise, they are stored in the .hgtags file, and a new
571 571 changeset is committed with the change.
572 572
573 573 keyword arguments:
574 574
575 575 local: whether to store tags in non-version-controlled file
576 576 (default False)
577 577
578 578 message: commit message to use if committing
579 579
580 580 user: name of user to use if committing
581 581
582 582 date: date tuple to use if committing'''
583 583
584 584 if not local:
585 585 m = matchmod.exact(self.root, '', ['.hgtags'])
586 586 if util.any(self.status(match=m, unknown=True, ignored=True)):
587 587 raise util.Abort(_('working copy of .hgtags is changed'),
588 588 hint=_('please commit .hgtags manually'))
589 589
590 590 self.tags() # instantiate the cache
591 591 self._tag(names, node, message, local, user, date, editor=editor)
592 592
593 593 @filteredpropertycache
594 594 def _tagscache(self):
595 595 '''Returns a tagscache object that contains various tags related
596 596 caches.'''
597 597
598 598 # This simplifies its cache management by having one decorated
599 599 # function (this one) and the rest simply fetch things from it.
600 600 class tagscache(object):
601 601 def __init__(self):
602 602 # These two define the set of tags for this repository. tags
603 603 # maps tag name to node; tagtypes maps tag name to 'global' or
604 604 # 'local'. (Global tags are defined by .hgtags across all
605 605 # heads, and local tags are defined in .hg/localtags.)
606 606 # They constitute the in-memory cache of tags.
607 607 self.tags = self.tagtypes = None
608 608
609 609 self.nodetagscache = self.tagslist = None
610 610
611 611 cache = tagscache()
612 612 cache.tags, cache.tagtypes = self._findtags()
613 613
614 614 return cache
615 615
616 616 def tags(self):
617 617 '''return a mapping of tag to node'''
618 618 t = {}
619 619 if self.changelog.filteredrevs:
620 620 tags, tt = self._findtags()
621 621 else:
622 622 tags = self._tagscache.tags
623 623 for k, v in tags.iteritems():
624 624 try:
625 625 # ignore tags to unknown nodes
626 626 self.changelog.rev(v)
627 627 t[k] = v
628 628 except (error.LookupError, ValueError):
629 629 pass
630 630 return t
631 631
632 632 def _findtags(self):
633 633 '''Do the hard work of finding tags. Return a pair of dicts
634 634 (tags, tagtypes) where tags maps tag name to node, and tagtypes
635 635 maps tag name to a string like \'global\' or \'local\'.
636 636 Subclasses or extensions are free to add their own tags, but
637 637 should be aware that the returned dicts will be retained for the
638 638 duration of the localrepo object.'''
639 639
640 640 # XXX what tagtype should subclasses/extensions use? Currently
641 641 # mq and bookmarks add tags, but do not set the tagtype at all.
642 642 # Should each extension invent its own tag type? Should there
643 643 # be one tagtype for all such "virtual" tags? Or is the status
644 644 # quo fine?
645 645
646 646 alltags = {} # map tag name to (node, hist)
647 647 tagtypes = {}
648 648
649 649 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
650 650 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
651 651
652 652 # Build the return dicts. Have to re-encode tag names because
653 653 # the tags module always uses UTF-8 (in order not to lose info
654 654 # writing to the cache), but the rest of Mercurial wants them in
655 655 # local encoding.
656 656 tags = {}
657 657 for (name, (node, hist)) in alltags.iteritems():
658 658 if node != nullid:
659 659 tags[encoding.tolocal(name)] = node
660 660 tags['tip'] = self.changelog.tip()
661 661 tagtypes = dict([(encoding.tolocal(name), value)
662 662 for (name, value) in tagtypes.iteritems()])
663 663 return (tags, tagtypes)
664 664
665 665 def tagtype(self, tagname):
666 666 '''
667 667 return the type of the given tag. result can be:
668 668
669 669 'local' : a local tag
670 670 'global' : a global tag
671 671 None : tag does not exist
672 672 '''
673 673
674 674 return self._tagscache.tagtypes.get(tagname)
675 675
676 676 def tagslist(self):
677 677 '''return a list of tags ordered by revision'''
678 678 if not self._tagscache.tagslist:
679 679 l = []
680 680 for t, n in self.tags().iteritems():
681 681 l.append((self.changelog.rev(n), t, n))
682 682 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
683 683
684 684 return self._tagscache.tagslist
685 685
686 686 def nodetags(self, node):
687 687 '''return the tags associated with a node'''
688 688 if not self._tagscache.nodetagscache:
689 689 nodetagscache = {}
690 690 for t, n in self._tagscache.tags.iteritems():
691 691 nodetagscache.setdefault(n, []).append(t)
692 692 for tags in nodetagscache.itervalues():
693 693 tags.sort()
694 694 self._tagscache.nodetagscache = nodetagscache
695 695 return self._tagscache.nodetagscache.get(node, [])
696 696
697 697 def nodebookmarks(self, node):
698 698 marks = []
699 699 for bookmark, n in self._bookmarks.iteritems():
700 700 if n == node:
701 701 marks.append(bookmark)
702 702 return sorted(marks)
703 703
704 704 def branchmap(self):
705 705 '''returns a dictionary {branch: [branchheads]} with branchheads
706 706 ordered by increasing revision number'''
707 707 branchmap.updatecache(self)
708 708 return self._branchcaches[self.filtername]
709 709
710 710 def branchtip(self, branch):
711 711 '''return the tip node for a given branch'''
712 712 try:
713 713 return self.branchmap().branchtip(branch)
714 714 except KeyError:
715 715 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
716 716
717 717 def lookup(self, key):
718 718 return self[key].node()
719 719
720 720 def lookupbranch(self, key, remote=None):
721 721 repo = remote or self
722 722 if key in repo.branchmap():
723 723 return key
724 724
725 725 repo = (remote and remote.local()) and remote or self
726 726 return repo[key].branch()
727 727
728 728 def known(self, nodes):
729 729 nm = self.changelog.nodemap
730 730 pc = self._phasecache
731 731 result = []
732 732 for n in nodes:
733 733 r = nm.get(n)
734 734 resp = not (r is None or pc.phase(self, r) >= phases.secret)
735 735 result.append(resp)
736 736 return result
737 737
738 738 def local(self):
739 739 return self
740 740
741 741 def cancopy(self):
742 742 # so statichttprepo's override of local() works
743 743 if not self.local():
744 744 return False
745 745 if not self.ui.configbool('phases', 'publish', True):
746 746 return True
747 747 # if publishing we can't copy if there is filtered content
748 748 return not self.filtered('visible').changelog.filteredrevs
749 749
750 750 def join(self, f, *insidef):
751 751 return os.path.join(self.path, f, *insidef)
752 752
753 753 def wjoin(self, f, *insidef):
754 754 return os.path.join(self.root, f, *insidef)
755 755
756 756 def file(self, f):
757 757 if f[0] == '/':
758 758 f = f[1:]
759 759 return filelog.filelog(self.sopener, f)
760 760
761 761 def changectx(self, changeid):
762 762 return self[changeid]
763 763
764 764 def parents(self, changeid=None):
765 765 '''get list of changectxs for parents of changeid'''
766 766 return self[changeid].parents()
767 767
768 768 def setparents(self, p1, p2=nullid):
769 769 self.dirstate.beginparentchange()
770 770 copies = self.dirstate.setparents(p1, p2)
771 771 pctx = self[p1]
772 772 if copies:
773 773 # Adjust copy records, the dirstate cannot do it, it
774 774 # requires access to parents manifests. Preserve them
775 775 # only for entries added to first parent.
776 776 for f in copies:
777 777 if f not in pctx and copies[f] in pctx:
778 778 self.dirstate.copy(copies[f], f)
779 779 if p2 == nullid:
780 780 for f, s in sorted(self.dirstate.copies().items()):
781 781 if f not in pctx and s not in pctx:
782 782 self.dirstate.copy(None, f)
783 783 self.dirstate.endparentchange()
784 784
785 785 def filectx(self, path, changeid=None, fileid=None):
786 786 """changeid can be a changeset revision, node, or tag.
787 787 fileid can be a file revision or node."""
788 788 return context.filectx(self, path, changeid, fileid)
789 789
790 790 def getcwd(self):
791 791 return self.dirstate.getcwd()
792 792
793 793 def pathto(self, f, cwd=None):
794 794 return self.dirstate.pathto(f, cwd)
795 795
796 796 def wfile(self, f, mode='r'):
797 797 return self.wopener(f, mode)
798 798
799 799 def _link(self, f):
800 800 return self.wvfs.islink(f)
801 801
802 802 def _loadfilter(self, filter):
803 803 if filter not in self.filterpats:
804 804 l = []
805 805 for pat, cmd in self.ui.configitems(filter):
806 806 if cmd == '!':
807 807 continue
808 808 mf = matchmod.match(self.root, '', [pat])
809 809 fn = None
810 810 params = cmd
811 811 for name, filterfn in self._datafilters.iteritems():
812 812 if cmd.startswith(name):
813 813 fn = filterfn
814 814 params = cmd[len(name):].lstrip()
815 815 break
816 816 if not fn:
817 817 fn = lambda s, c, **kwargs: util.filter(s, c)
818 818 # Wrap old filters not supporting keyword arguments
819 819 if not inspect.getargspec(fn)[2]:
820 820 oldfn = fn
821 821 fn = lambda s, c, **kwargs: oldfn(s, c)
822 822 l.append((mf, fn, params))
823 823 self.filterpats[filter] = l
824 824 return self.filterpats[filter]
825 825
826 826 def _filter(self, filterpats, filename, data):
827 827 for mf, fn, cmd in filterpats:
828 828 if mf(filename):
829 829 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
830 830 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
831 831 break
832 832
833 833 return data
834 834
835 835 @unfilteredpropertycache
836 836 def _encodefilterpats(self):
837 837 return self._loadfilter('encode')
838 838
839 839 @unfilteredpropertycache
840 840 def _decodefilterpats(self):
841 841 return self._loadfilter('decode')
842 842
843 843 def adddatafilter(self, name, filter):
844 844 self._datafilters[name] = filter
845 845
846 846 def wread(self, filename):
847 847 if self._link(filename):
848 848 data = self.wvfs.readlink(filename)
849 849 else:
850 850 data = self.wopener.read(filename)
851 851 return self._filter(self._encodefilterpats, filename, data)
852 852
853 853 def wwrite(self, filename, data, flags):
854 854 data = self._filter(self._decodefilterpats, filename, data)
855 855 if 'l' in flags:
856 856 self.wopener.symlink(data, filename)
857 857 else:
858 858 self.wopener.write(filename, data)
859 859 if 'x' in flags:
860 860 self.wvfs.setflags(filename, False, True)
861 861
862 862 def wwritedata(self, filename, data):
863 863 return self._filter(self._decodefilterpats, filename, data)
864 864
865 865 def transaction(self, desc, report=None):
866 866 tr = self._transref and self._transref() or None
867 867 if tr and tr.running():
868 868 return tr.nest()
869 869
870 870 # abort here if the journal already exists
871 871 if self.svfs.exists("journal"):
872 872 raise error.RepoError(
873 873 _("abandoned transaction found"),
874 874 hint=_("run 'hg recover' to clean up transaction"))
875 875
876 876 def onclose():
877 877 self.store.write(self._transref())
878 878
879 879 self._writejournal(desc)
880 880 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
881 881 rp = report and report or self.ui.warn
882 882 tr = transaction.transaction(rp, self.sopener,
883 883 "journal",
884 884 aftertrans(renames),
885 885 self.store.createmode,
886 886 onclose)
887 887 self._transref = weakref.ref(tr)
888 888 return tr
889 889
890 890 def _journalfiles(self):
891 891 return ((self.svfs, 'journal'),
892 892 (self.vfs, 'journal.dirstate'),
893 893 (self.vfs, 'journal.branch'),
894 894 (self.vfs, 'journal.desc'),
895 895 (self.vfs, 'journal.bookmarks'),
896 896 (self.svfs, 'journal.phaseroots'))
897 897
898 898 def undofiles(self):
899 899 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
900 900
901 901 def _writejournal(self, desc):
902 902 self.opener.write("journal.dirstate",
903 903 self.opener.tryread("dirstate"))
904 904 self.opener.write("journal.branch",
905 905 encoding.fromlocal(self.dirstate.branch()))
906 906 self.opener.write("journal.desc",
907 907 "%d\n%s\n" % (len(self), desc))
908 908 self.opener.write("journal.bookmarks",
909 909 self.opener.tryread("bookmarks"))
910 910 self.sopener.write("journal.phaseroots",
911 911 self.sopener.tryread("phaseroots"))
912 912
913 913 def recover(self):
914 914 lock = self.lock()
915 915 try:
916 916 if self.svfs.exists("journal"):
917 917 self.ui.status(_("rolling back interrupted transaction\n"))
918 918 transaction.rollback(self.sopener, "journal",
919 919 self.ui.warn)
920 920 self.invalidate()
921 921 return True
922 922 else:
923 923 self.ui.warn(_("no interrupted transaction available\n"))
924 924 return False
925 925 finally:
926 926 lock.release()
927 927
928 928 def rollback(self, dryrun=False, force=False):
929 929 wlock = lock = None
930 930 try:
931 931 wlock = self.wlock()
932 932 lock = self.lock()
933 933 if self.svfs.exists("undo"):
934 934 return self._rollback(dryrun, force)
935 935 else:
936 936 self.ui.warn(_("no rollback information available\n"))
937 937 return 1
938 938 finally:
939 939 release(lock, wlock)
940 940
941 941 @unfilteredmethod # Until we get smarter cache management
942 942 def _rollback(self, dryrun, force):
943 943 ui = self.ui
944 944 try:
945 945 args = self.opener.read('undo.desc').splitlines()
946 946 (oldlen, desc, detail) = (int(args[0]), args[1], None)
947 947 if len(args) >= 3:
948 948 detail = args[2]
949 949 oldtip = oldlen - 1
950 950
951 951 if detail and ui.verbose:
952 952 msg = (_('repository tip rolled back to revision %s'
953 953 ' (undo %s: %s)\n')
954 954 % (oldtip, desc, detail))
955 955 else:
956 956 msg = (_('repository tip rolled back to revision %s'
957 957 ' (undo %s)\n')
958 958 % (oldtip, desc))
959 959 except IOError:
960 960 msg = _('rolling back unknown transaction\n')
961 961 desc = None
962 962
963 963 if not force and self['.'] != self['tip'] and desc == 'commit':
964 964 raise util.Abort(
965 965 _('rollback of last commit while not checked out '
966 966 'may lose data'), hint=_('use -f to force'))
967 967
968 968 ui.status(msg)
969 969 if dryrun:
970 970 return 0
971 971
972 972 parents = self.dirstate.parents()
973 973 self.destroying()
974 974 transaction.rollback(self.sopener, 'undo', ui.warn)
975 975 if self.vfs.exists('undo.bookmarks'):
976 976 self.vfs.rename('undo.bookmarks', 'bookmarks')
977 977 if self.svfs.exists('undo.phaseroots'):
978 978 self.svfs.rename('undo.phaseroots', 'phaseroots')
979 979 self.invalidate()
980 980
981 981 parentgone = (parents[0] not in self.changelog.nodemap or
982 982 parents[1] not in self.changelog.nodemap)
983 983 if parentgone:
984 984 self.vfs.rename('undo.dirstate', 'dirstate')
985 985 try:
986 986 branch = self.opener.read('undo.branch')
987 987 self.dirstate.setbranch(encoding.tolocal(branch))
988 988 except IOError:
989 989 ui.warn(_('named branch could not be reset: '
990 990 'current branch is still \'%s\'\n')
991 991 % self.dirstate.branch())
992 992
993 993 self.dirstate.invalidate()
994 994 parents = tuple([p.rev() for p in self.parents()])
995 995 if len(parents) > 1:
996 996 ui.status(_('working directory now based on '
997 997 'revisions %d and %d\n') % parents)
998 998 else:
999 999 ui.status(_('working directory now based on '
1000 1000 'revision %d\n') % parents)
1001 1001 # TODO: if we know which new heads may result from this rollback, pass
1002 1002 # them to destroy(), which will prevent the branchhead cache from being
1003 1003 # invalidated.
1004 1004 self.destroyed()
1005 1005 return 0
1006 1006
1007 1007 def invalidatecaches(self):
1008 1008
1009 1009 if '_tagscache' in vars(self):
1010 1010 # can't use delattr on proxy
1011 1011 del self.__dict__['_tagscache']
1012 1012
1013 1013 self.unfiltered()._branchcaches.clear()
1014 1014 self.invalidatevolatilesets()
1015 1015
1016 1016 def invalidatevolatilesets(self):
1017 1017 self.filteredrevcache.clear()
1018 1018 obsolete.clearobscaches(self)
1019 1019
1020 1020 def invalidatedirstate(self):
1021 1021 '''Invalidates the dirstate, causing the next call to dirstate
1022 1022 to check if it was modified since the last time it was read,
1023 1023 rereading it if it has.
1024 1024
1025 1025 This is different to dirstate.invalidate() that it doesn't always
1026 1026 rereads the dirstate. Use dirstate.invalidate() if you want to
1027 1027 explicitly read the dirstate again (i.e. restoring it to a previous
1028 1028 known good state).'''
1029 1029 if hasunfilteredcache(self, 'dirstate'):
1030 1030 for k in self.dirstate._filecache:
1031 1031 try:
1032 1032 delattr(self.dirstate, k)
1033 1033 except AttributeError:
1034 1034 pass
1035 1035 delattr(self.unfiltered(), 'dirstate')
1036 1036
1037 1037 def invalidate(self):
1038 1038 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1039 1039 for k in self._filecache:
1040 1040 # dirstate is invalidated separately in invalidatedirstate()
1041 1041 if k == 'dirstate':
1042 1042 continue
1043 1043
1044 1044 try:
1045 1045 delattr(unfiltered, k)
1046 1046 except AttributeError:
1047 1047 pass
1048 1048 self.invalidatecaches()
1049 1049 self.store.invalidatecaches()
1050 1050
1051 1051 def invalidateall(self):
1052 1052 '''Fully invalidates both store and non-store parts, causing the
1053 1053 subsequent operation to reread any outside changes.'''
1054 1054 # extension should hook this to invalidate its caches
1055 1055 self.invalidate()
1056 1056 self.invalidatedirstate()
1057 1057
1058 1058 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1059 1059 try:
1060 1060 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1061 1061 except error.LockHeld, inst:
1062 1062 if not wait:
1063 1063 raise
1064 1064 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1065 1065 (desc, inst.locker))
1066 1066 # default to 600 seconds timeout
1067 1067 l = lockmod.lock(vfs, lockname,
1068 1068 int(self.ui.config("ui", "timeout", "600")),
1069 1069 releasefn, desc=desc)
1070 1070 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1071 1071 if acquirefn:
1072 1072 acquirefn()
1073 1073 return l
1074 1074
1075 1075 def _afterlock(self, callback):
1076 1076 """add a callback to the current repository lock.
1077 1077
1078 1078 The callback will be executed on lock release."""
1079 1079 l = self._lockref and self._lockref()
1080 1080 if l:
1081 1081 l.postrelease.append(callback)
1082 1082 else:
1083 1083 callback()
1084 1084
1085 1085 def lock(self, wait=True):
1086 1086 '''Lock the repository store (.hg/store) and return a weak reference
1087 1087 to the lock. Use this before modifying the store (e.g. committing or
1088 1088 stripping). If you are opening a transaction, get a lock as well.)'''
1089 1089 l = self._lockref and self._lockref()
1090 1090 if l is not None and l.held:
1091 1091 l.lock()
1092 1092 return l
1093 1093
1094 1094 def unlock():
1095 1095 for k, ce in self._filecache.items():
1096 1096 if k == 'dirstate' or k not in self.__dict__:
1097 1097 continue
1098 1098 ce.refresh()
1099 1099
1100 1100 l = self._lock(self.svfs, "lock", wait, unlock,
1101 1101 self.invalidate, _('repository %s') % self.origroot)
1102 1102 self._lockref = weakref.ref(l)
1103 1103 return l
1104 1104
1105 1105 def wlock(self, wait=True):
1106 1106 '''Lock the non-store parts of the repository (everything under
1107 1107 .hg except .hg/store) and return a weak reference to the lock.
1108 1108 Use this before modifying files in .hg.'''
1109 1109 l = self._wlockref and self._wlockref()
1110 1110 if l is not None and l.held:
1111 1111 l.lock()
1112 1112 return l
1113 1113
1114 1114 def unlock():
1115 1115 if self.dirstate.pendingparentchange():
1116 1116 self.dirstate.invalidate()
1117 1117 else:
1118 1118 self.dirstate.write()
1119 1119
1120 1120 self._filecache['dirstate'].refresh()
1121 1121
1122 1122 l = self._lock(self.vfs, "wlock", wait, unlock,
1123 1123 self.invalidatedirstate, _('working directory of %s') %
1124 1124 self.origroot)
1125 1125 self._wlockref = weakref.ref(l)
1126 1126 return l
1127 1127
1128 1128 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1129 1129 """
1130 1130 commit an individual file as part of a larger transaction
1131 1131 """
1132 1132
1133 1133 fname = fctx.path()
1134 1134 text = fctx.data()
1135 1135 flog = self.file(fname)
1136 1136 fparent1 = manifest1.get(fname, nullid)
1137 1137 fparent2 = manifest2.get(fname, nullid)
1138 1138
1139 1139 meta = {}
1140 1140 copy = fctx.renamed()
1141 1141 if copy and copy[0] != fname:
1142 1142 # Mark the new revision of this file as a copy of another
1143 1143 # file. This copy data will effectively act as a parent
1144 1144 # of this new revision. If this is a merge, the first
1145 1145 # parent will be the nullid (meaning "look up the copy data")
1146 1146 # and the second one will be the other parent. For example:
1147 1147 #
1148 1148 # 0 --- 1 --- 3 rev1 changes file foo
1149 1149 # \ / rev2 renames foo to bar and changes it
1150 1150 # \- 2 -/ rev3 should have bar with all changes and
1151 1151 # should record that bar descends from
1152 1152 # bar in rev2 and foo in rev1
1153 1153 #
1154 1154 # this allows this merge to succeed:
1155 1155 #
1156 1156 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1157 1157 # \ / merging rev3 and rev4 should use bar@rev2
1158 1158 # \- 2 --- 4 as the merge base
1159 1159 #
1160 1160
1161 1161 cfname = copy[0]
1162 1162 crev = manifest1.get(cfname)
1163 1163 newfparent = fparent2
1164 1164
1165 1165 if manifest2: # branch merge
1166 1166 if fparent2 == nullid or crev is None: # copied on remote side
1167 1167 if cfname in manifest2:
1168 1168 crev = manifest2[cfname]
1169 1169 newfparent = fparent1
1170 1170
1171 1171 # find source in nearest ancestor if we've lost track
1172 1172 if not crev:
1173 1173 self.ui.debug(" %s: searching for copy revision for %s\n" %
1174 1174 (fname, cfname))
1175 1175 for ancestor in self[None].ancestors():
1176 1176 if cfname in ancestor:
1177 1177 crev = ancestor[cfname].filenode()
1178 1178 break
1179 1179
1180 1180 if crev:
1181 1181 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1182 1182 meta["copy"] = cfname
1183 1183 meta["copyrev"] = hex(crev)
1184 1184 fparent1, fparent2 = nullid, newfparent
1185 1185 else:
1186 1186 self.ui.warn(_("warning: can't find ancestor for '%s' "
1187 1187 "copied from '%s'!\n") % (fname, cfname))
1188 1188
1189 1189 elif fparent1 == nullid:
1190 1190 fparent1, fparent2 = fparent2, nullid
1191 1191 elif fparent2 != nullid:
1192 1192 # is one parent an ancestor of the other?
1193 1193 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1194 1194 if fparent1 in fparentancestors:
1195 1195 fparent1, fparent2 = fparent2, nullid
1196 1196 elif fparent2 in fparentancestors:
1197 1197 fparent2 = nullid
1198 1198
1199 1199 # is the file changed?
1200 1200 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1201 1201 changelist.append(fname)
1202 1202 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1203 1203 # are just the flags changed during merge?
1204 1204 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1205 1205 changelist.append(fname)
1206 1206
1207 1207 return fparent1
1208 1208
1209 1209 @unfilteredmethod
1210 1210 def commit(self, text="", user=None, date=None, match=None, force=False,
1211 1211 editor=False, extra={}):
1212 1212 """Add a new revision to current repository.
1213 1213
1214 1214 Revision information is gathered from the working directory,
1215 1215 match can be used to filter the committed files. If editor is
1216 1216 supplied, it is called to get a commit message.
1217 1217 """
1218 1218
1219 1219 def fail(f, msg):
1220 1220 raise util.Abort('%s: %s' % (f, msg))
1221 1221
1222 1222 if not match:
1223 1223 match = matchmod.always(self.root, '')
1224 1224
1225 1225 if not force:
1226 1226 vdirs = []
1227 1227 match.explicitdir = vdirs.append
1228 1228 match.bad = fail
1229 1229
1230 1230 wlock = self.wlock()
1231 1231 try:
1232 1232 wctx = self[None]
1233 1233 merge = len(wctx.parents()) > 1
1234 1234
1235 1235 if (not force and merge and match and
1236 1236 (match.files() or match.anypats())):
1237 1237 raise util.Abort(_('cannot partially commit a merge '
1238 1238 '(do not specify files or patterns)'))
1239 1239
1240 1240 status = self.status(match=match, clean=force)
1241 1241 if force:
1242 1242 status.modified.extend(status.clean) # mq may commit clean files
1243 1243
1244 1244 # check subrepos
1245 1245 subs = []
1246 1246 commitsubs = set()
1247 1247 newstate = wctx.substate.copy()
1248 1248 # only manage subrepos and .hgsubstate if .hgsub is present
1249 1249 if '.hgsub' in wctx:
1250 1250 # we'll decide whether to track this ourselves, thanks
1251 1251 for c in status.modified, status.added, status.removed:
1252 1252 if '.hgsubstate' in c:
1253 1253 c.remove('.hgsubstate')
1254 1254
1255 1255 # compare current state to last committed state
1256 1256 # build new substate based on last committed state
1257 1257 oldstate = wctx.p1().substate
1258 1258 for s in sorted(newstate.keys()):
1259 1259 if not match(s):
1260 1260 # ignore working copy, use old state if present
1261 1261 if s in oldstate:
1262 1262 newstate[s] = oldstate[s]
1263 1263 continue
1264 1264 if not force:
1265 1265 raise util.Abort(
1266 1266 _("commit with new subrepo %s excluded") % s)
1267 1267 if wctx.sub(s).dirty(True):
1268 1268 if not self.ui.configbool('ui', 'commitsubrepos'):
1269 1269 raise util.Abort(
1270 1270 _("uncommitted changes in subrepo %s") % s,
1271 1271 hint=_("use --subrepos for recursive commit"))
1272 1272 subs.append(s)
1273 1273 commitsubs.add(s)
1274 1274 else:
1275 1275 bs = wctx.sub(s).basestate()
1276 1276 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1277 1277 if oldstate.get(s, (None, None, None))[1] != bs:
1278 1278 subs.append(s)
1279 1279
1280 1280 # check for removed subrepos
1281 1281 for p in wctx.parents():
1282 1282 r = [s for s in p.substate if s not in newstate]
1283 1283 subs += [s for s in r if match(s)]
1284 1284 if subs:
1285 1285 if (not match('.hgsub') and
1286 1286 '.hgsub' in (wctx.modified() + wctx.added())):
1287 1287 raise util.Abort(
1288 1288 _("can't commit subrepos without .hgsub"))
1289 1289 status.modified.insert(0, '.hgsubstate')
1290 1290
1291 1291 elif '.hgsub' in status.removed:
1292 1292 # clean up .hgsubstate when .hgsub is removed
1293 1293 if ('.hgsubstate' in wctx and
1294 1294 '.hgsubstate' not in (status.modified + status.added +
1295 1295 status.removed)):
1296 1296 status.removed.insert(0, '.hgsubstate')
1297 1297
1298 1298 # make sure all explicit patterns are matched
1299 1299 if not force and match.files():
1300 1300 matched = set(status.modified + status.added + status.removed)
1301 1301
1302 1302 for f in match.files():
1303 1303 f = self.dirstate.normalize(f)
1304 1304 if f == '.' or f in matched or f in wctx.substate:
1305 1305 continue
1306 1306 if f in status.deleted:
1307 1307 fail(f, _('file not found!'))
1308 1308 if f in vdirs: # visited directory
1309 1309 d = f + '/'
1310 1310 for mf in matched:
1311 1311 if mf.startswith(d):
1312 1312 break
1313 1313 else:
1314 1314 fail(f, _("no match under directory!"))
1315 1315 elif f not in self.dirstate:
1316 1316 fail(f, _("file not tracked!"))
1317 1317
1318 1318 cctx = context.workingctx(self, text, user, date, extra, status)
1319 1319
1320 1320 if (not force and not extra.get("close") and not merge
1321 1321 and not cctx.files()
1322 1322 and wctx.branch() == wctx.p1().branch()):
1323 1323 return None
1324 1324
1325 1325 if merge and cctx.deleted():
1326 1326 raise util.Abort(_("cannot commit merge with missing files"))
1327 1327
1328 1328 ms = mergemod.mergestate(self)
1329 1329 for f in status.modified:
1330 1330 if f in ms and ms[f] == 'u':
1331 1331 raise util.Abort(_("unresolved merge conflicts "
1332 1332 "(see hg help resolve)"))
1333 1333
1334 1334 if editor:
1335 1335 cctx._text = editor(self, cctx, subs)
1336 1336 edited = (text != cctx._text)
1337 1337
1338 1338 # Save commit message in case this transaction gets rolled back
1339 1339 # (e.g. by a pretxncommit hook). Leave the content alone on
1340 1340 # the assumption that the user will use the same editor again.
1341 1341 msgfn = self.savecommitmessage(cctx._text)
1342 1342
1343 1343 # commit subs and write new state
1344 1344 if subs:
1345 1345 for s in sorted(commitsubs):
1346 1346 sub = wctx.sub(s)
1347 1347 self.ui.status(_('committing subrepository %s\n') %
1348 1348 subrepo.subrelpath(sub))
1349 1349 sr = sub.commit(cctx._text, user, date)
1350 1350 newstate[s] = (newstate[s][0], sr)
1351 1351 subrepo.writestate(self, newstate)
1352 1352
1353 1353 p1, p2 = self.dirstate.parents()
1354 1354 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1355 1355 try:
1356 1356 self.hook("precommit", throw=True, parent1=hookp1,
1357 1357 parent2=hookp2)
1358 1358 ret = self.commitctx(cctx, True)
1359 1359 except: # re-raises
1360 1360 if edited:
1361 1361 self.ui.write(
1362 1362 _('note: commit message saved in %s\n') % msgfn)
1363 1363 raise
1364 1364
1365 1365 # update bookmarks, dirstate and mergestate
1366 1366 bookmarks.update(self, [p1, p2], ret)
1367 1367 cctx.markcommitted(ret)
1368 1368 ms.reset()
1369 1369 finally:
1370 1370 wlock.release()
1371 1371
1372 1372 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1373 1373 # hack for command that use a temporary commit (eg: histedit)
1374 1374 # temporary commit got stripped before hook release
1375 1375 if node in self:
1376 1376 self.hook("commit", node=node, parent1=parent1,
1377 1377 parent2=parent2)
1378 1378 self._afterlock(commithook)
1379 1379 return ret
1380 1380
1381 1381 @unfilteredmethod
1382 1382 def commitctx(self, ctx, error=False):
1383 1383 """Add a new revision to current repository.
1384 1384 Revision information is passed via the context argument.
1385 1385 """
1386 1386
1387 1387 tr = None
1388 1388 p1, p2 = ctx.p1(), ctx.p2()
1389 1389 user = ctx.user()
1390 1390
1391 1391 lock = self.lock()
1392 1392 try:
1393 1393 tr = self.transaction("commit")
1394 1394 trp = weakref.proxy(tr)
1395 1395
1396 1396 if ctx.files():
1397 1397 m1 = p1.manifest()
1398 1398 m2 = p2.manifest()
1399 1399 m = m1.copy()
1400 1400
1401 1401 # check in files
1402 1402 added = []
1403 1403 changed = []
1404 1404 removed = list(ctx.removed())
1405 1405 linkrev = len(self)
1406 1406 for f in sorted(ctx.modified() + ctx.added()):
1407 1407 self.ui.note(f + "\n")
1408 1408 try:
1409 1409 fctx = ctx[f]
1410 1410 if fctx is None:
1411 1411 removed.append(f)
1412 1412 else:
1413 1413 added.append(f)
1414 1414 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1415 1415 trp, changed)
1416 1416 m.setflag(f, fctx.flags())
1417 1417 except OSError, inst:
1418 1418 self.ui.warn(_("trouble committing %s!\n") % f)
1419 1419 raise
1420 1420 except IOError, inst:
1421 1421 errcode = getattr(inst, 'errno', errno.ENOENT)
1422 1422 if error or errcode and errcode != errno.ENOENT:
1423 1423 self.ui.warn(_("trouble committing %s!\n") % f)
1424 1424 raise
1425 1425
1426 1426 # update manifest
1427 1427 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1428 1428 drop = [f for f in removed if f in m]
1429 1429 for f in drop:
1430 1430 del m[f]
1431 1431 mn = self.manifest.add(m, trp, linkrev,
1432 1432 p1.manifestnode(), p2.manifestnode(),
1433 1433 added, drop)
1434 1434 files = changed + removed
1435 1435 else:
1436 1436 mn = p1.manifestnode()
1437 1437 files = []
1438 1438
1439 1439 # update changelog
1440 1440 self.changelog.delayupdate()
1441 1441 n = self.changelog.add(mn, files, ctx.description(),
1442 1442 trp, p1.node(), p2.node(),
1443 1443 user, ctx.date(), ctx.extra().copy())
1444 1444 p = lambda: self.changelog.writepending() and self.root or ""
1445 1445 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1446 1446 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1447 1447 parent2=xp2, pending=p)
1448 1448 self.changelog.finalize(trp)
1449 1449 # set the new commit is proper phase
1450 1450 targetphase = subrepo.newcommitphase(self.ui, ctx)
1451 1451 if targetphase:
1452 1452 # retract boundary do not alter parent changeset.
1453 1453 # if a parent have higher the resulting phase will
1454 1454 # be compliant anyway
1455 1455 #
1456 1456 # if minimal phase was 0 we don't need to retract anything
1457 1457 phases.retractboundary(self, tr, targetphase, [n])
1458 1458 tr.close()
1459 1459 branchmap.updatecache(self.filtered('served'))
1460 1460 return n
1461 1461 finally:
1462 1462 if tr:
1463 1463 tr.release()
1464 1464 lock.release()
1465 1465
1466 1466 @unfilteredmethod
1467 1467 def destroying(self):
1468 1468 '''Inform the repository that nodes are about to be destroyed.
1469 1469 Intended for use by strip and rollback, so there's a common
1470 1470 place for anything that has to be done before destroying history.
1471 1471
1472 1472 This is mostly useful for saving state that is in memory and waiting
1473 1473 to be flushed when the current lock is released. Because a call to
1474 1474 destroyed is imminent, the repo will be invalidated causing those
1475 1475 changes to stay in memory (waiting for the next unlock), or vanish
1476 1476 completely.
1477 1477 '''
1478 1478 # When using the same lock to commit and strip, the phasecache is left
1479 1479 # dirty after committing. Then when we strip, the repo is invalidated,
1480 1480 # causing those changes to disappear.
1481 1481 if '_phasecache' in vars(self):
1482 1482 self._phasecache.write()
1483 1483
1484 1484 @unfilteredmethod
1485 1485 def destroyed(self):
1486 1486 '''Inform the repository that nodes have been destroyed.
1487 1487 Intended for use by strip and rollback, so there's a common
1488 1488 place for anything that has to be done after destroying history.
1489 1489 '''
1490 1490 # When one tries to:
1491 1491 # 1) destroy nodes thus calling this method (e.g. strip)
1492 1492 # 2) use phasecache somewhere (e.g. commit)
1493 1493 #
1494 1494 # then 2) will fail because the phasecache contains nodes that were
1495 1495 # removed. We can either remove phasecache from the filecache,
1496 1496 # causing it to reload next time it is accessed, or simply filter
1497 1497 # the removed nodes now and write the updated cache.
1498 1498 self._phasecache.filterunknown(self)
1499 1499 self._phasecache.write()
1500 1500
1501 1501 # update the 'served' branch cache to help read only server process
1502 1502 # Thanks to branchcache collaboration this is done from the nearest
1503 1503 # filtered subset and it is expected to be fast.
1504 1504 branchmap.updatecache(self.filtered('served'))
1505 1505
1506 1506 # Ensure the persistent tag cache is updated. Doing it now
1507 1507 # means that the tag cache only has to worry about destroyed
1508 1508 # heads immediately after a strip/rollback. That in turn
1509 1509 # guarantees that "cachetip == currenttip" (comparing both rev
1510 1510 # and node) always means no nodes have been added or destroyed.
1511 1511
1512 1512 # XXX this is suboptimal when qrefresh'ing: we strip the current
1513 1513 # head, refresh the tag cache, then immediately add a new head.
1514 1514 # But I think doing it this way is necessary for the "instant
1515 1515 # tag cache retrieval" case to work.
1516 1516 self.invalidate()
1517 1517
1518 1518 def walk(self, match, node=None):
1519 1519 '''
1520 1520 walk recursively through the directory tree or a given
1521 1521 changeset, finding all files matched by the match
1522 1522 function
1523 1523 '''
1524 1524 return self[node].walk(match)
1525 1525
1526 1526 def status(self, node1='.', node2=None, match=None,
1527 1527 ignored=False, clean=False, unknown=False,
1528 1528 listsubrepos=False):
1529 1529 '''a convenience method that calls node1.status(node2)'''
1530 1530 return self[node1].status(node2, match, ignored, clean, unknown,
1531 1531 listsubrepos)
1532 1532
1533 1533 def heads(self, start=None):
1534 1534 heads = self.changelog.heads(start)
1535 1535 # sort the output in rev descending order
1536 1536 return sorted(heads, key=self.changelog.rev, reverse=True)
1537 1537
1538 1538 def branchheads(self, branch=None, start=None, closed=False):
1539 1539 '''return a (possibly filtered) list of heads for the given branch
1540 1540
1541 1541 Heads are returned in topological order, from newest to oldest.
1542 1542 If branch is None, use the dirstate branch.
1543 1543 If start is not None, return only heads reachable from start.
1544 1544 If closed is True, return heads that are marked as closed as well.
1545 1545 '''
1546 1546 if branch is None:
1547 1547 branch = self[None].branch()
1548 1548 branches = self.branchmap()
1549 1549 if branch not in branches:
1550 1550 return []
1551 1551 # the cache returns heads ordered lowest to highest
1552 1552 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1553 1553 if start is not None:
1554 1554 # filter out the heads that cannot be reached from startrev
1555 1555 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1556 1556 bheads = [h for h in bheads if h in fbheads]
1557 1557 return bheads
1558 1558
1559 1559 def branches(self, nodes):
1560 1560 if not nodes:
1561 1561 nodes = [self.changelog.tip()]
1562 1562 b = []
1563 1563 for n in nodes:
1564 1564 t = n
1565 1565 while True:
1566 1566 p = self.changelog.parents(n)
1567 1567 if p[1] != nullid or p[0] == nullid:
1568 1568 b.append((t, n, p[0], p[1]))
1569 1569 break
1570 1570 n = p[0]
1571 1571 return b
1572 1572
1573 1573 def between(self, pairs):
1574 1574 r = []
1575 1575
1576 1576 for top, bottom in pairs:
1577 1577 n, l, i = top, [], 0
1578 1578 f = 1
1579 1579
1580 1580 while n != bottom and n != nullid:
1581 1581 p = self.changelog.parents(n)[0]
1582 1582 if i == f:
1583 1583 l.append(n)
1584 1584 f = f * 2
1585 1585 n = p
1586 1586 i += 1
1587 1587
1588 1588 r.append(l)
1589 1589
1590 1590 return r
1591 1591
1592 1592 def checkpush(self, pushop):
1593 1593 """Extensions can override this function if additional checks have
1594 1594 to be performed before pushing, or call it if they override push
1595 1595 command.
1596 1596 """
1597 1597 pass
1598 1598
1599 1599 @unfilteredpropertycache
1600 1600 def prepushoutgoinghooks(self):
1601 1601 """Return util.hooks consists of "(repo, remote, outgoing)"
1602 1602 functions, which are called before pushing changesets.
1603 1603 """
1604 1604 return util.hooks()
1605 1605
1606 1606 def stream_in(self, remote, requirements):
1607 1607 lock = self.lock()
1608 1608 try:
1609 1609 # Save remote branchmap. We will use it later
1610 1610 # to speed up branchcache creation
1611 1611 rbranchmap = None
1612 1612 if remote.capable("branchmap"):
1613 1613 rbranchmap = remote.branchmap()
1614 1614
1615 1615 fp = remote.stream_out()
1616 1616 l = fp.readline()
1617 1617 try:
1618 1618 resp = int(l)
1619 1619 except ValueError:
1620 1620 raise error.ResponseError(
1621 1621 _('unexpected response from remote server:'), l)
1622 1622 if resp == 1:
1623 1623 raise util.Abort(_('operation forbidden by server'))
1624 1624 elif resp == 2:
1625 1625 raise util.Abort(_('locking the remote repository failed'))
1626 1626 elif resp != 0:
1627 1627 raise util.Abort(_('the server sent an unknown error code'))
1628 1628 self.ui.status(_('streaming all changes\n'))
1629 1629 l = fp.readline()
1630 1630 try:
1631 1631 total_files, total_bytes = map(int, l.split(' ', 1))
1632 1632 except (ValueError, TypeError):
1633 1633 raise error.ResponseError(
1634 1634 _('unexpected response from remote server:'), l)
1635 1635 self.ui.status(_('%d files to transfer, %s of data\n') %
1636 1636 (total_files, util.bytecount(total_bytes)))
1637 1637 handled_bytes = 0
1638 1638 self.ui.progress(_('clone'), 0, total=total_bytes)
1639 1639 start = time.time()
1640 1640
1641 1641 tr = self.transaction(_('clone'))
1642 1642 try:
1643 1643 for i in xrange(total_files):
1644 1644 # XXX doesn't support '\n' or '\r' in filenames
1645 1645 l = fp.readline()
1646 1646 try:
1647 1647 name, size = l.split('\0', 1)
1648 1648 size = int(size)
1649 1649 except (ValueError, TypeError):
1650 1650 raise error.ResponseError(
1651 1651 _('unexpected response from remote server:'), l)
1652 1652 if self.ui.debugflag:
1653 1653 self.ui.debug('adding %s (%s)\n' %
1654 1654 (name, util.bytecount(size)))
1655 1655 # for backwards compat, name was partially encoded
1656 1656 ofp = self.sopener(store.decodedir(name), 'w')
1657 1657 for chunk in util.filechunkiter(fp, limit=size):
1658 1658 handled_bytes += len(chunk)
1659 1659 self.ui.progress(_('clone'), handled_bytes,
1660 1660 total=total_bytes)
1661 1661 ofp.write(chunk)
1662 1662 ofp.close()
1663 1663 tr.close()
1664 1664 finally:
1665 1665 tr.release()
1666 1666
1667 1667 # Writing straight to files circumvented the inmemory caches
1668 1668 self.invalidate()
1669 1669
1670 1670 elapsed = time.time() - start
1671 1671 if elapsed <= 0:
1672 1672 elapsed = 0.001
1673 1673 self.ui.progress(_('clone'), None)
1674 1674 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1675 1675 (util.bytecount(total_bytes), elapsed,
1676 1676 util.bytecount(total_bytes / elapsed)))
1677 1677
1678 1678 # new requirements = old non-format requirements +
1679 1679 # new format-related
1680 1680 # requirements from the streamed-in repository
1681 1681 requirements.update(set(self.requirements) - self.supportedformats)
1682 1682 self._applyrequirements(requirements)
1683 1683 self._writerequirements()
1684 1684
1685 1685 if rbranchmap:
1686 1686 rbheads = []
1687 1687 closed = []
1688 1688 for bheads in rbranchmap.itervalues():
1689 1689 rbheads.extend(bheads)
1690 1690 for h in bheads:
1691 1691 r = self.changelog.rev(h)
1692 1692 b, c = self.changelog.branchinfo(r)
1693 1693 if c:
1694 1694 closed.append(h)
1695 1695
1696 1696 if rbheads:
1697 1697 rtiprev = max((int(self.changelog.rev(node))
1698 1698 for node in rbheads))
1699 1699 cache = branchmap.branchcache(rbranchmap,
1700 1700 self[rtiprev].node(),
1701 1701 rtiprev,
1702 1702 closednodes=closed)
1703 1703 # Try to stick it as low as possible
1704 1704 # filter above served are unlikely to be fetch from a clone
1705 1705 for candidate in ('base', 'immutable', 'served'):
1706 1706 rview = self.filtered(candidate)
1707 1707 if cache.validfor(rview):
1708 1708 self._branchcaches[candidate] = cache
1709 1709 cache.write(rview)
1710 1710 break
1711 1711 self.invalidate()
1712 1712 return len(self.heads()) + 1
1713 1713 finally:
1714 1714 lock.release()
1715 1715
1716 1716 def clone(self, remote, heads=[], stream=False):
1717 1717 '''clone remote repository.
1718 1718
1719 1719 keyword arguments:
1720 1720 heads: list of revs to clone (forces use of pull)
1721 1721 stream: use streaming clone if possible'''
1722 1722
1723 1723 # now, all clients that can request uncompressed clones can
1724 1724 # read repo formats supported by all servers that can serve
1725 1725 # them.
1726 1726
1727 1727 # if revlog format changes, client will have to check version
1728 1728 # and format flags on "stream" capability, and use
1729 1729 # uncompressed only if compatible.
1730 1730
1731 1731 if not stream:
1732 1732 # if the server explicitly prefers to stream (for fast LANs)
1733 1733 stream = remote.capable('stream-preferred')
1734 1734
1735 1735 if stream and not heads:
1736 1736 # 'stream' means remote revlog format is revlogv1 only
1737 1737 if remote.capable('stream'):
1738 1738 self.stream_in(remote, set(('revlogv1',)))
1739 1739 else:
1740 1740 # otherwise, 'streamreqs' contains the remote revlog format
1741 1741 streamreqs = remote.capable('streamreqs')
1742 1742 if streamreqs:
1743 1743 streamreqs = set(streamreqs.split(','))
1744 1744 # if we support it, stream in and adjust our requirements
1745 1745 if not streamreqs - self.supportedformats:
1746 1746 self.stream_in(remote, streamreqs)
1747 1747
1748 1748 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1749 1749 try:
1750 1750 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1751 1751 ret = exchange.pull(self, remote, heads).cgresult
1752 1752 finally:
1753 1753 self.ui.restoreconfig(quiet)
1754 1754 return ret
1755 1755
1756 1756 def pushkey(self, namespace, key, old, new):
1757 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1758 old=old, new=new)
1757 try:
1758 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1759 old=old, new=new)
1760 except error.HookAbort, exc:
1761 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1762 if exc.hint:
1763 self.ui.write_err(_("(%s)\n") % exc.hint)
1764 return False
1759 1765 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1760 1766 ret = pushkey.push(self, namespace, key, old, new)
1761 1767 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1762 1768 ret=ret)
1763 1769 return ret
1764 1770
1765 1771 def listkeys(self, namespace):
1766 1772 self.hook('prelistkeys', throw=True, namespace=namespace)
1767 1773 self.ui.debug('listing keys for "%s"\n' % namespace)
1768 1774 values = pushkey.list(self, namespace)
1769 1775 self.hook('listkeys', namespace=namespace, values=values)
1770 1776 return values
1771 1777
1772 1778 def debugwireargs(self, one, two, three=None, four=None, five=None):
1773 1779 '''used to test argument passing over the wire'''
1774 1780 return "%s %s %s %s %s" % (one, two, three, four, five)
1775 1781
1776 1782 def savecommitmessage(self, text):
1777 1783 fp = self.opener('last-message.txt', 'wb')
1778 1784 try:
1779 1785 fp.write(text)
1780 1786 finally:
1781 1787 fp.close()
1782 1788 return self.pathto(fp.name[len(self.root) + 1:])
1783 1789
1784 1790 # used to avoid circular references so destructors work
1785 1791 def aftertrans(files):
1786 1792 renamefiles = [tuple(t) for t in files]
1787 1793 def a():
1788 1794 for vfs, src, dest in renamefiles:
1789 1795 try:
1790 1796 vfs.rename(src, dest)
1791 1797 except OSError: # journal file does not yet exist
1792 1798 pass
1793 1799 return a
1794 1800
1795 1801 def undoname(fn):
1796 1802 base, name = os.path.split(fn)
1797 1803 assert name.startswith('journal')
1798 1804 return os.path.join(base, name.replace('journal', 'undo', 1))
1799 1805
1800 1806 def instance(ui, path, create):
1801 1807 return localrepository(ui, util.urllocalpath(path), create)
1802 1808
1803 1809 def islocal(path):
1804 1810 return True
@@ -1,489 +1,562 b''
1 1 #require serve
2 2
3 3 $ cat << EOF >> $HGRCPATH
4 4 > [ui]
5 5 > logtemplate={rev}:{node|short} {desc|firstline}
6 6 > [phases]
7 7 > publish=False
8 8 > [experimental]
9 9 > evolution=createmarkers,exchange
10 10 > EOF
11 11
12 12 initialize
13 13
14 14 $ hg init a
15 15 $ cd a
16 16 $ echo 'test' > test
17 17 $ hg commit -Am'test'
18 18 adding test
19 19
20 20 set bookmarks
21 21
22 22 $ hg bookmark X
23 23 $ hg bookmark Y
24 24 $ hg bookmark Z
25 25
26 26 import bookmark by name
27 27
28 28 $ hg init ../b
29 29 $ cd ../b
30 30 $ hg book Y
31 31 $ hg book
32 32 * Y -1:000000000000
33 33 $ hg pull ../a
34 34 pulling from ../a
35 35 requesting all changes
36 36 adding changesets
37 37 adding manifests
38 38 adding file changes
39 39 added 1 changesets with 1 changes to 1 files
40 40 adding remote bookmark X
41 41 updating bookmark Y
42 42 adding remote bookmark Z
43 43 (run 'hg update' to get a working copy)
44 44 $ hg bookmarks
45 45 X 0:4e3505fd9583
46 46 * Y 0:4e3505fd9583
47 47 Z 0:4e3505fd9583
48 48 $ hg debugpushkey ../a namespaces
49 49 bookmarks
50 50 namespaces
51 51 obsolete
52 52 phases
53 53 $ hg debugpushkey ../a bookmarks
54 54 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
55 55 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
56 56 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
57 57
58 58 delete the bookmark to repull it
59 59
60 60 $ hg book -d X
61 61 $ hg pull -B X ../a
62 62 pulling from ../a
63 63 no changes found
64 64 adding remote bookmark X
65 65
66 66 finally no-op pull
67 67
68 68 $ hg pull -B X ../a
69 69 pulling from ../a
70 70 no changes found
71 71 $ hg bookmark
72 72 X 0:4e3505fd9583
73 73 * Y 0:4e3505fd9583
74 74 Z 0:4e3505fd9583
75 75
76 76 export bookmark by name
77 77
78 78 $ hg bookmark W
79 79 $ hg bookmark foo
80 80 $ hg bookmark foobar
81 81 $ hg push -B W ../a
82 82 pushing to ../a
83 83 searching for changes
84 84 no changes found
85 85 exporting bookmark W
86 86 [1]
87 87 $ hg -R ../a bookmarks
88 88 W -1:000000000000
89 89 X 0:4e3505fd9583
90 90 Y 0:4e3505fd9583
91 91 * Z 0:4e3505fd9583
92 92
93 93 delete a remote bookmark
94 94
95 95 $ hg book -d W
96 96 $ hg push -B W ../a
97 97 pushing to ../a
98 98 searching for changes
99 99 no changes found
100 100 deleting remote bookmark W
101 101 [1]
102 102
103 103 push/pull name that doesn't exist
104 104
105 105 $ hg push -B badname ../a
106 106 pushing to ../a
107 107 searching for changes
108 108 bookmark badname does not exist on the local or remote repository!
109 109 no changes found
110 110 [2]
111 111 $ hg pull -B anotherbadname ../a
112 112 pulling from ../a
113 113 abort: remote bookmark anotherbadname not found!
114 114 [255]
115 115
116 116 divergent bookmarks
117 117
118 118 $ cd ../a
119 119 $ echo c1 > f1
120 120 $ hg ci -Am1
121 121 adding f1
122 122 $ hg book -f @
123 123 $ hg book -f X
124 124 $ hg book
125 125 @ 1:0d2164f0ce0d
126 126 * X 1:0d2164f0ce0d
127 127 Y 0:4e3505fd9583
128 128 Z 1:0d2164f0ce0d
129 129
130 130 $ cd ../b
131 131 $ hg up
132 132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
133 133 updating bookmark foobar
134 134 $ echo c2 > f2
135 135 $ hg ci -Am2
136 136 adding f2
137 137 $ hg book -if @
138 138 $ hg book -if X
139 139 $ hg book
140 140 @ 1:9b140be10808
141 141 X 1:9b140be10808
142 142 Y 0:4e3505fd9583
143 143 Z 0:4e3505fd9583
144 144 foo -1:000000000000
145 145 * foobar 1:9b140be10808
146 146
147 147 $ hg pull --config paths.foo=../a foo
148 148 pulling from $TESTTMP/a (glob)
149 149 searching for changes
150 150 adding changesets
151 151 adding manifests
152 152 adding file changes
153 153 added 1 changesets with 1 changes to 1 files (+1 heads)
154 154 divergent bookmark @ stored as @foo
155 155 divergent bookmark X stored as X@foo
156 156 updating bookmark Z
157 157 (run 'hg heads' to see heads, 'hg merge' to merge)
158 158 $ hg book
159 159 @ 1:9b140be10808
160 160 @foo 2:0d2164f0ce0d
161 161 X 1:9b140be10808
162 162 X@foo 2:0d2164f0ce0d
163 163 Y 0:4e3505fd9583
164 164 Z 2:0d2164f0ce0d
165 165 foo -1:000000000000
166 166 * foobar 1:9b140be10808
167 167 $ hg push -f ../a
168 168 pushing to ../a
169 169 searching for changes
170 170 adding changesets
171 171 adding manifests
172 172 adding file changes
173 173 added 1 changesets with 1 changes to 1 files (+1 heads)
174 174 $ hg -R ../a book
175 175 @ 1:0d2164f0ce0d
176 176 * X 1:0d2164f0ce0d
177 177 Y 0:4e3505fd9583
178 178 Z 1:0d2164f0ce0d
179 179
180 180 explicite pull should overwrite the local version (issue4439)
181 181
182 182 $ hg pull --config paths.foo=../a foo -B X
183 183 pulling from $TESTTMP/a (glob)
184 184 no changes found
185 185 divergent bookmark @ stored as @foo
186 186 importing bookmark X
187 187
188 188 reinstall state for further testing:
189 189
190 190 $ hg book -fr 9b140be10808 X
191 191
192 192 revsets should not ignore divergent bookmarks
193 193
194 194 $ hg bookmark -fr 1 Z
195 195 $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n'
196 196 0:4e3505fd9583 Y
197 197 1:9b140be10808 @ X Z foobar
198 198 2:0d2164f0ce0d @foo X@foo
199 199 $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
200 200 2:0d2164f0ce0d @foo X@foo
201 201 $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
202 202 2:0d2164f0ce0d @foo X@foo
203 203
204 204 update a remote bookmark from a non-head to a head
205 205
206 206 $ hg up -q Y
207 207 $ echo c3 > f2
208 208 $ hg ci -Am3
209 209 adding f2
210 210 created new head
211 211 $ hg push ../a
212 212 pushing to ../a
213 213 searching for changes
214 214 adding changesets
215 215 adding manifests
216 216 adding file changes
217 217 added 1 changesets with 1 changes to 1 files (+1 heads)
218 218 updating bookmark Y
219 219 $ hg -R ../a book
220 220 @ 1:0d2164f0ce0d
221 221 * X 1:0d2164f0ce0d
222 222 Y 3:f6fc62dde3c0
223 223 Z 1:0d2164f0ce0d
224 224
225 225 update a bookmark in the middle of a client pulling changes
226 226
227 227 $ cd ..
228 228 $ hg clone -q a pull-race
229 229 $ hg clone -q pull-race pull-race2
230 230 $ cd pull-race
231 231 $ hg up -q Y
232 232 $ echo c4 > f2
233 233 $ hg ci -Am4
234 234 $ echo c5 > f3
235 235 $ cat <<EOF > .hg/hgrc
236 236 > [hooks]
237 237 > outgoing.makecommit = hg ci -Am5; echo committed in pull-race
238 238 > EOF
239 239 $ cd ../pull-race2
240 240 $ hg pull
241 241 pulling from $TESTTMP/pull-race (glob)
242 242 searching for changes
243 243 adding changesets
244 244 adding f3
245 245 committed in pull-race
246 246 adding manifests
247 247 adding file changes
248 248 added 1 changesets with 1 changes to 1 files
249 249 updating bookmark Y
250 250 (run 'hg update' to get a working copy)
251 251 $ hg book
252 252 * @ 1:0d2164f0ce0d
253 253 X 1:0d2164f0ce0d
254 254 Y 4:b0a5eff05604
255 255 Z 1:0d2164f0ce0d
256 256 $ cd ../b
257 257
258 258 diverging a remote bookmark fails
259 259
260 260 $ hg up -q 4e3505fd9583
261 261 $ echo c4 > f2
262 262 $ hg ci -Am4
263 263 adding f2
264 264 created new head
265 265 $ echo c5 > f2
266 266 $ hg ci -Am5
267 267 $ hg log -G
268 268 @ 5:c922c0139ca0 5
269 269 |
270 270 o 4:4efff6d98829 4
271 271 |
272 272 | o 3:f6fc62dde3c0 3
273 273 |/
274 274 | o 2:0d2164f0ce0d 1
275 275 |/
276 276 | o 1:9b140be10808 2
277 277 |/
278 278 o 0:4e3505fd9583 test
279 279
280 280
281 281 $ hg book -f Y
282 282
283 283 $ cat <<EOF > ../a/.hg/hgrc
284 284 > [web]
285 285 > push_ssl = false
286 286 > allow_push = *
287 287 > EOF
288 288
289 289 $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid
290 290 $ cat ../hg2.pid >> $DAEMON_PIDS
291 291
292 292 $ hg push http://localhost:$HGPORT2/
293 293 pushing to http://localhost:$HGPORT2/
294 294 searching for changes
295 295 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
296 296 (merge or see "hg help push" for details about pushing new heads)
297 297 [255]
298 298 $ hg -R ../a book
299 299 @ 1:0d2164f0ce0d
300 300 * X 1:0d2164f0ce0d
301 301 Y 3:f6fc62dde3c0
302 302 Z 1:0d2164f0ce0d
303 303
304 304
305 305 Unrelated marker does not alter the decision
306 306
307 307 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
308 308 $ hg push http://localhost:$HGPORT2/
309 309 pushing to http://localhost:$HGPORT2/
310 310 searching for changes
311 311 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
312 312 (merge or see "hg help push" for details about pushing new heads)
313 313 [255]
314 314 $ hg -R ../a book
315 315 @ 1:0d2164f0ce0d
316 316 * X 1:0d2164f0ce0d
317 317 Y 3:f6fc62dde3c0
318 318 Z 1:0d2164f0ce0d
319 319
320 320 Update to a successor works
321 321
322 322 $ hg id --debug -r 3
323 323 f6fc62dde3c0771e29704af56ba4d8af77abcc2f
324 324 $ hg id --debug -r 4
325 325 4efff6d98829d9c824c621afd6e3f01865f5439f
326 326 $ hg id --debug -r 5
327 327 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
328 328 $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
329 329 $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
330 330 $ hg push http://localhost:$HGPORT2/
331 331 pushing to http://localhost:$HGPORT2/
332 332 searching for changes
333 333 remote: adding changesets
334 334 remote: adding manifests
335 335 remote: adding file changes
336 336 remote: added 2 changesets with 2 changes to 1 files (+1 heads)
337 337 updating bookmark Y
338 338 $ hg -R ../a book
339 339 @ 1:0d2164f0ce0d
340 340 * X 1:0d2164f0ce0d
341 341 Y 5:c922c0139ca0
342 342 Z 1:0d2164f0ce0d
343 343
344 344 hgweb
345 345
346 346 $ cat <<EOF > .hg/hgrc
347 347 > [web]
348 348 > push_ssl = false
349 349 > allow_push = *
350 350 > EOF
351 351
352 352 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
353 353 $ cat ../hg.pid >> $DAEMON_PIDS
354 354 $ cd ../a
355 355
356 356 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
357 357 bookmarks
358 358 namespaces
359 359 obsolete
360 360 phases
361 361 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
362 362 @ 9b140be1080824d768c5a4691a564088eede71f9
363 363 X 9b140be1080824d768c5a4691a564088eede71f9
364 364 Y c922c0139ca03858f655e4a2af4dd02796a63969
365 365 Z 9b140be1080824d768c5a4691a564088eede71f9
366 366 foo 0000000000000000000000000000000000000000
367 367 foobar 9b140be1080824d768c5a4691a564088eede71f9
368 368 $ hg out -B http://localhost:$HGPORT/
369 369 comparing with http://localhost:$HGPORT/
370 370 searching for changed bookmarks
371 371 no changed bookmarks found
372 372 [1]
373 373 $ hg push -B Z http://localhost:$HGPORT/
374 374 pushing to http://localhost:$HGPORT/
375 375 searching for changes
376 376 no changes found
377 377 updating bookmark Z
378 378 [1]
379 379 $ hg book -d Z
380 380 $ hg in -B http://localhost:$HGPORT/
381 381 comparing with http://localhost:$HGPORT/
382 382 searching for changed bookmarks
383 383 Z 0d2164f0ce0d
384 384 foo 000000000000
385 385 foobar 9b140be10808
386 386 $ hg pull -B Z http://localhost:$HGPORT/
387 387 pulling from http://localhost:$HGPORT/
388 388 no changes found
389 389 divergent bookmark @ stored as @1
390 390 divergent bookmark X stored as X@1
391 391 adding remote bookmark Z
392 392 adding remote bookmark foo
393 393 adding remote bookmark foobar
394 394 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
395 395 requesting all changes
396 396 adding changesets
397 397 adding manifests
398 398 adding file changes
399 399 added 5 changesets with 5 changes to 3 files (+2 heads)
400 400 updating to bookmark @
401 401 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
402 402 $ hg -R cloned-bookmarks bookmarks
403 403 * @ 1:9b140be10808
404 404 X 1:9b140be10808
405 405 Y 4:c922c0139ca0
406 406 Z 2:0d2164f0ce0d
407 407 foo -1:000000000000
408 408 foobar 1:9b140be10808
409 409
410 410 $ cd ..
411 411
412 412 Pushing a bookmark should only push the changes required by that
413 413 bookmark, not all outgoing changes:
414 414 $ hg clone http://localhost:$HGPORT/ addmarks
415 415 requesting all changes
416 416 adding changesets
417 417 adding manifests
418 418 adding file changes
419 419 added 5 changesets with 5 changes to 3 files (+2 heads)
420 420 updating to bookmark @
421 421 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
422 422 $ cd addmarks
423 423 $ echo foo > foo
424 424 $ hg add foo
425 425 $ hg commit -m 'add foo'
426 426 $ echo bar > bar
427 427 $ hg add bar
428 428 $ hg commit -m 'add bar'
429 429 $ hg co "tip^"
430 430 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
431 431 (leaving bookmark @)
432 432 $ hg book add-foo
433 433 $ hg book -r tip add-bar
434 434 Note: this push *must* push only a single changeset, as that's the point
435 435 of this test.
436 436 $ hg push -B add-foo --traceback
437 437 pushing to http://localhost:$HGPORT/
438 438 searching for changes
439 439 remote: adding changesets
440 440 remote: adding manifests
441 441 remote: adding file changes
442 442 remote: added 1 changesets with 1 changes to 1 files
443 443 exporting bookmark add-foo
444 444
445 445 pushing a new bookmark on a new head does not require -f if -B is specified
446 446
447 447 $ hg up -q X
448 448 $ hg book W
449 449 $ echo c5 > f2
450 450 $ hg ci -Am5
451 451 created new head
452 452 $ hg push -B W
453 453 pushing to http://localhost:$HGPORT/
454 454 searching for changes
455 455 remote: adding changesets
456 456 remote: adding manifests
457 457 remote: adding file changes
458 458 remote: added 1 changesets with 1 changes to 1 files (+1 heads)
459 459 exporting bookmark W
460 460 $ hg -R ../b id -r W
461 461 cc978a373a53 tip W
462 462
463 463 $ cd ..
464 464
465 465 pushing an unchanged bookmark should result in no changes
466 466
467 467 $ hg init unchanged-a
468 468 $ hg init unchanged-b
469 469 $ cd unchanged-a
470 470 $ echo initial > foo
471 471 $ hg commit -A -m initial
472 472 adding foo
473 473 $ hg bookmark @
474 474 $ hg push -B @ ../unchanged-b
475 475 pushing to ../unchanged-b
476 476 searching for changes
477 477 adding changesets
478 478 adding manifests
479 479 adding file changes
480 480 added 1 changesets with 1 changes to 1 files
481 481 exporting bookmark @
482 482
483 483 $ hg push -B @ ../unchanged-b
484 484 pushing to ../unchanged-b
485 485 searching for changes
486 486 no changes found
487 487 [1]
488 488
489 $ cd ..
489
490 Check hook preventing push (issue4455)
491 ======================================
492
493 $ hg bookmarks
494 * @ 0:55482a6fb4b1
495 $ hg log -G
496 @ 0:55482a6fb4b1 initial
497
498 $ hg init ../issue4455-dest
499 $ hg push ../issue4455-dest # changesets only
500 pushing to ../issue4455-dest
501 searching for changes
502 adding changesets
503 adding manifests
504 adding file changes
505 added 1 changesets with 1 changes to 1 files
506 $ cat >> .hg/hgrc << EOF
507 > [paths]
508 > local=../issue4455-dest/
509 > ssh=ssh://user@dummy/issue4455-dest
510 > http=http://localhost:$HGPORT/
511 > [ui]
512 > ssh=python "$TESTDIR/dummyssh"
513 > EOF
514 $ cat >> ../issue4455-dest/.hg/hgrc << EOF
515 > [hooks]
516 > prepushkey=false
517 > [web]
518 > push_ssl = false
519 > allow_push = *
520 > EOF
521 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
522 $ hg -R ../issue4455-dest serve -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log
523 $ cat ../issue4455.pid >> $DAEMON_PIDS
524
525 Local push
526 ----------
527
528 $ hg push -B @ local
529 pushing to $TESTTMP/issue4455-dest (glob)
530 searching for changes
531 no changes found
532 pushkey-abort: prepushkey hook exited with status 1
533 exporting bookmark @ failed!
534 [1]
535 $ hg -R ../issue4455-dest/ bookmarks
536 no bookmarks set
537
538 Using ssh
539 ---------
540
541 $ hg push -B @ ssh
542 pushing to ssh://user@dummy/issue4455-dest
543 searching for changes
544 no changes found
545 remote: pushkey-abort: prepushkey hook exited with status 1
546 exporting bookmark @ failed!
547 [1]
548 $ hg -R ../issue4455-dest/ bookmarks
549 no bookmarks set
550
551 Using http
552 ----------
553
554 $ hg push -B @ http
555 pushing to http://localhost:$HGPORT/
556 searching for changes
557 no changes found
558 remote: pushkey-abort: prepushkey hook exited with status 1
559 exporting bookmark @ failed!
560 [1]
561 $ hg -R ../issue4455-dest/ bookmarks
562 no bookmarks set
@@ -1,643 +1,644 b''
1 1 commit hooks can see env vars
2 2
3 3 $ hg init a
4 4 $ cd a
5 5 $ cat > .hg/hgrc <<EOF
6 6 > [hooks]
7 7 > commit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit"
8 8 > commit.b = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit.b"
9 9 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= python \"$TESTDIR/printenv.py\" precommit"
10 10 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxncommit"
11 11 > pretxncommit.tip = hg -q tip
12 12 > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
13 13 > pre-cat = python "$TESTDIR/printenv.py" pre-cat
14 14 > post-cat = python "$TESTDIR/printenv.py" post-cat
15 15 > EOF
16 16 $ echo a > a
17 17 $ hg add a
18 18 $ hg commit -m a
19 19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
20 20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
21 21 0:cb9a9f314b8b
22 22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
24 24
25 25 $ hg clone . ../b
26 26 updating to branch default
27 27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 28 $ cd ../b
29 29
30 30 changegroup hooks can see env vars
31 31
32 32 $ cat > .hg/hgrc <<EOF
33 33 > [hooks]
34 34 > prechangegroup = python "$TESTDIR/printenv.py" prechangegroup
35 35 > changegroup = python "$TESTDIR/printenv.py" changegroup
36 36 > incoming = python "$TESTDIR/printenv.py" incoming
37 37 > EOF
38 38
39 39 pretxncommit and commit hooks can see both parents of merge
40 40
41 41 $ cd ../a
42 42 $ echo b >> a
43 43 $ hg commit -m a1 -d "1 0"
44 44 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
45 45 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
46 46 1:ab228980c14d
47 47 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
48 48 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
49 49 $ hg update -C 0
50 50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 51 $ echo b > b
52 52 $ hg add b
53 53 $ hg commit -m b -d '1 0'
54 54 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
55 55 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
56 56 2:ee9deb46ab31
57 57 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
58 58 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
59 59 created new head
60 60 $ hg merge 1
61 61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 62 (branch merge, don't forget to commit)
63 63 $ hg commit -m merge -d '2 0'
64 64 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
65 65 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
66 66 3:07f3376c1e65
67 67 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
68 68 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
69 69
70 70 test generic hooks
71 71
72 72 $ hg id
73 73 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
74 74 abort: pre-identify hook exited with status 1
75 75 [255]
76 76 $ hg cat b
77 77 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
78 78 b
79 79 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
80 80
81 81 $ cd ../b
82 82 $ hg pull ../a
83 83 pulling from ../a
84 84 searching for changes
85 85 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
86 86 adding changesets
87 87 adding manifests
88 88 adding file changes
89 89 added 3 changesets with 2 changes to 2 files
90 90 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 91 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 92 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
93 93 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
94 94 (run 'hg update' to get a working copy)
95 95
96 96 tag hooks can see env vars
97 97
98 98 $ cd ../a
99 99 $ cat >> .hg/hgrc <<EOF
100 100 > pretag = python "$TESTDIR/printenv.py" pretag
101 101 > tag = sh -c "HG_PARENT1= HG_PARENT2= python \"$TESTDIR/printenv.py\" tag"
102 102 > EOF
103 103 $ hg tag -d '3 0' a
104 104 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
105 105 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 106 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
107 107 4:539e4b31b6dc
108 108 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
109 109 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
110 110 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
111 111 $ hg tag -l la
112 112 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
113 113 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
114 114
115 115 pretag hook can forbid tagging
116 116
117 117 $ echo "pretag.forbid = python \"$TESTDIR/printenv.py\" pretag.forbid 1" >> .hg/hgrc
118 118 $ hg tag -d '4 0' fa
119 119 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
120 120 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
121 121 abort: pretag.forbid hook exited with status 1
122 122 [255]
123 123 $ hg tag -l fla
124 124 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
125 125 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
126 126 abort: pretag.forbid hook exited with status 1
127 127 [255]
128 128
129 129 pretxncommit hook can see changeset, can roll back txn, changeset no
130 130 more there after
131 131
132 132 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
133 133 $ echo "pretxncommit.forbid1 = python \"$TESTDIR/printenv.py\" pretxncommit.forbid 1" >> .hg/hgrc
134 134 $ echo z > z
135 135 $ hg add z
136 136 $ hg -q tip
137 137 4:539e4b31b6dc
138 138 $ hg commit -m 'fail' -d '4 0'
139 139 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
140 140 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
141 141 5:6f611f8018c1
142 142 5:6f611f8018c1
143 143 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
144 144 transaction abort!
145 145 rollback completed
146 146 abort: pretxncommit.forbid1 hook exited with status 1
147 147 [255]
148 148 $ hg -q tip
149 149 4:539e4b31b6dc
150 150
151 151 precommit hook can prevent commit
152 152
153 153 $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
154 154 $ hg commit -m 'fail' -d '4 0'
155 155 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
156 156 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
157 157 abort: precommit.forbid hook exited with status 1
158 158 [255]
159 159 $ hg -q tip
160 160 4:539e4b31b6dc
161 161
162 162 preupdate hook can prevent update
163 163
164 164 $ echo "preupdate = python \"$TESTDIR/printenv.py\" preupdate" >> .hg/hgrc
165 165 $ hg update 1
166 166 preupdate hook: HG_PARENT1=ab228980c14d
167 167 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
168 168
169 169 update hook
170 170
171 171 $ echo "update = python \"$TESTDIR/printenv.py\" update" >> .hg/hgrc
172 172 $ hg update
173 173 preupdate hook: HG_PARENT1=539e4b31b6dc
174 174 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
175 175 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
176 176
177 177 pushkey hook
178 178
179 179 $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey" >> .hg/hgrc
180 180 $ cd ../b
181 181 $ hg bookmark -r null foo
182 182 $ hg push -B foo ../a
183 183 pushing to ../a
184 184 searching for changes
185 185 no changes found
186 186 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
187 187 exporting bookmark foo
188 188 [1]
189 189 $ cd ../a
190 190
191 191 listkeys hook
192 192
193 193 $ echo "listkeys = python \"$TESTDIR/printenv.py\" listkeys" >> .hg/hgrc
194 194 $ hg bookmark -r null bar
195 195 $ cd ../b
196 196 $ hg pull -B bar ../a
197 197 pulling from ../a
198 198 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
199 199 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
200 200 no changes found
201 201 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
202 202 adding remote bookmark bar
203 203 $ cd ../a
204 204
205 205 test that prepushkey can prevent incoming keys
206 206
207 207 $ echo "prepushkey = python \"$TESTDIR/printenv.py\" prepushkey.forbid 1" >> .hg/hgrc
208 208 $ cd ../b
209 209 $ hg bookmark -r null baz
210 210 $ hg push -B baz ../a
211 211 pushing to ../a
212 212 searching for changes
213 213 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
214 214 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
215 215 no changes found
216 216 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
217 217 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
218 abort: prepushkey hook exited with status 1
219 [255]
218 pushkey-abort: prepushkey hook exited with status 1
219 exporting bookmark baz failed!
220 [1]
220 221 $ cd ../a
221 222
222 223 test that prelistkeys can prevent listing keys
223 224
224 225 $ echo "prelistkeys = python \"$TESTDIR/printenv.py\" prelistkeys.forbid 1" >> .hg/hgrc
225 226 $ hg bookmark -r null quux
226 227 $ cd ../b
227 228 $ hg pull -B quux ../a
228 229 pulling from ../a
229 230 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
230 231 abort: prelistkeys hook exited with status 1
231 232 [255]
232 233 $ cd ../a
233 234 $ rm .hg/hgrc
234 235
235 236 prechangegroup hook can prevent incoming changes
236 237
237 238 $ cd ../b
238 239 $ hg -q tip
239 240 3:07f3376c1e65
240 241 $ cat > .hg/hgrc <<EOF
241 242 > [hooks]
242 243 > prechangegroup.forbid = python "$TESTDIR/printenv.py" prechangegroup.forbid 1
243 244 > EOF
244 245 $ hg pull ../a
245 246 pulling from ../a
246 247 searching for changes
247 248 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
248 249 abort: prechangegroup.forbid hook exited with status 1
249 250 [255]
250 251
251 252 pretxnchangegroup hook can see incoming changes, can roll back txn,
252 253 incoming changes no longer there after
253 254
254 255 $ cat > .hg/hgrc <<EOF
255 256 > [hooks]
256 257 > pretxnchangegroup.forbid0 = hg tip -q
257 258 > pretxnchangegroup.forbid1 = python "$TESTDIR/printenv.py" pretxnchangegroup.forbid 1
258 259 > EOF
259 260 $ hg pull ../a
260 261 pulling from ../a
261 262 searching for changes
262 263 adding changesets
263 264 adding manifests
264 265 adding file changes
265 266 added 1 changesets with 1 changes to 1 files
266 267 4:539e4b31b6dc
267 268 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
268 269 transaction abort!
269 270 rollback completed
270 271 abort: pretxnchangegroup.forbid1 hook exited with status 1
271 272 [255]
272 273 $ hg -q tip
273 274 3:07f3376c1e65
274 275
275 276 outgoing hooks can see env vars
276 277
277 278 $ rm .hg/hgrc
278 279 $ cat > ../a/.hg/hgrc <<EOF
279 280 > [hooks]
280 281 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
281 282 > outgoing = python "$TESTDIR/printenv.py" outgoing
282 283 > EOF
283 284 $ hg pull ../a
284 285 pulling from ../a
285 286 searching for changes
286 287 preoutgoing hook: HG_SOURCE=pull
287 288 adding changesets
288 289 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
289 290 adding manifests
290 291 adding file changes
291 292 added 1 changesets with 1 changes to 1 files
292 293 adding remote bookmark quux
293 294 (run 'hg update' to get a working copy)
294 295 $ hg rollback
295 296 repository tip rolled back to revision 3 (undo pull)
296 297
297 298 preoutgoing hook can prevent outgoing changes
298 299
299 300 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> ../a/.hg/hgrc
300 301 $ hg pull ../a
301 302 pulling from ../a
302 303 searching for changes
303 304 preoutgoing hook: HG_SOURCE=pull
304 305 preoutgoing.forbid hook: HG_SOURCE=pull
305 306 abort: preoutgoing.forbid hook exited with status 1
306 307 [255]
307 308
308 309 outgoing hooks work for local clones
309 310
310 311 $ cd ..
311 312 $ cat > a/.hg/hgrc <<EOF
312 313 > [hooks]
313 314 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
314 315 > outgoing = python "$TESTDIR/printenv.py" outgoing
315 316 > EOF
316 317 $ hg clone a c
317 318 preoutgoing hook: HG_SOURCE=clone
318 319 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
319 320 updating to branch default
320 321 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
321 322 $ rm -rf c
322 323
323 324 preoutgoing hook can prevent outgoing changes for local clones
324 325
325 326 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> a/.hg/hgrc
326 327 $ hg clone a zzz
327 328 preoutgoing hook: HG_SOURCE=clone
328 329 preoutgoing.forbid hook: HG_SOURCE=clone
329 330 abort: preoutgoing.forbid hook exited with status 1
330 331 [255]
331 332
332 333 $ cd "$TESTTMP/b"
333 334
334 335 $ cat > hooktests.py <<EOF
335 336 > from mercurial import util
336 337 >
337 338 > uncallable = 0
338 339 >
339 340 > def printargs(args):
340 341 > args.pop('ui', None)
341 342 > args.pop('repo', None)
342 343 > a = list(args.items())
343 344 > a.sort()
344 345 > print 'hook args:'
345 346 > for k, v in a:
346 347 > print ' ', k, v
347 348 >
348 349 > def passhook(**args):
349 350 > printargs(args)
350 351 >
351 352 > def failhook(**args):
352 353 > printargs(args)
353 354 > return True
354 355 >
355 356 > class LocalException(Exception):
356 357 > pass
357 358 >
358 359 > def raisehook(**args):
359 360 > raise LocalException('exception from hook')
360 361 >
361 362 > def aborthook(**args):
362 363 > raise util.Abort('raise abort from hook')
363 364 >
364 365 > def brokenhook(**args):
365 366 > return 1 + {}
366 367 >
367 368 > def verbosehook(ui, **args):
368 369 > ui.note('verbose output from hook\n')
369 370 >
370 371 > def printtags(ui, repo, **args):
371 372 > print sorted(repo.tags())
372 373 >
373 374 > class container:
374 375 > unreachable = 1
375 376 > EOF
376 377
377 378 test python hooks
378 379
379 380 #if windows
380 381 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
381 382 #else
382 383 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
383 384 #endif
384 385 $ export PYTHONPATH
385 386
386 387 $ echo '[hooks]' > ../a/.hg/hgrc
387 388 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
388 389 $ hg pull ../a 2>&1 | grep 'raised an exception'
389 390 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
390 391
391 392 $ echo '[hooks]' > ../a/.hg/hgrc
392 393 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
393 394 $ hg pull ../a 2>&1 | grep 'raised an exception'
394 395 error: preoutgoing.raise hook raised an exception: exception from hook
395 396
396 397 $ echo '[hooks]' > ../a/.hg/hgrc
397 398 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
398 399 $ hg pull ../a
399 400 pulling from ../a
400 401 searching for changes
401 402 error: preoutgoing.abort hook failed: raise abort from hook
402 403 abort: raise abort from hook
403 404 [255]
404 405
405 406 $ echo '[hooks]' > ../a/.hg/hgrc
406 407 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
407 408 $ hg pull ../a
408 409 pulling from ../a
409 410 searching for changes
410 411 hook args:
411 412 hooktype preoutgoing
412 413 source pull
413 414 abort: preoutgoing.fail hook failed
414 415 [255]
415 416
416 417 $ echo '[hooks]' > ../a/.hg/hgrc
417 418 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
418 419 $ hg pull ../a
419 420 pulling from ../a
420 421 searching for changes
421 422 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
422 423 [255]
423 424
424 425 $ echo '[hooks]' > ../a/.hg/hgrc
425 426 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
426 427 $ hg pull ../a
427 428 pulling from ../a
428 429 searching for changes
429 430 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
430 431 [255]
431 432
432 433 $ echo '[hooks]' > ../a/.hg/hgrc
433 434 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
434 435 $ hg pull ../a
435 436 pulling from ../a
436 437 searching for changes
437 438 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
438 439 [255]
439 440
440 441 $ echo '[hooks]' > ../a/.hg/hgrc
441 442 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
442 443 $ hg pull ../a
443 444 pulling from ../a
444 445 searching for changes
445 446 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
446 447 [255]
447 448
448 449 $ echo '[hooks]' > ../a/.hg/hgrc
449 450 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
450 451 $ hg pull ../a
451 452 pulling from ../a
452 453 searching for changes
453 454 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
454 455 [255]
455 456
456 457 $ echo '[hooks]' > ../a/.hg/hgrc
457 458 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
458 459 $ hg pull ../a
459 460 pulling from ../a
460 461 searching for changes
461 462 hook args:
462 463 hooktype preoutgoing
463 464 source pull
464 465 adding changesets
465 466 adding manifests
466 467 adding file changes
467 468 added 1 changesets with 1 changes to 1 files
468 469 adding remote bookmark quux
469 470 (run 'hg update' to get a working copy)
470 471
471 472 make sure --traceback works
472 473
473 474 $ echo '[hooks]' > .hg/hgrc
474 475 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
475 476
476 477 $ echo aa > a
477 478 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
478 479 Traceback (most recent call last):
479 480
480 481 $ cd ..
481 482 $ hg init c
482 483 $ cd c
483 484
484 485 $ cat > hookext.py <<EOF
485 486 > def autohook(**args):
486 487 > print "Automatically installed hook"
487 488 >
488 489 > def reposetup(ui, repo):
489 490 > repo.ui.setconfig("hooks", "commit.auto", autohook)
490 491 > EOF
491 492 $ echo '[extensions]' >> .hg/hgrc
492 493 $ echo 'hookext = hookext.py' >> .hg/hgrc
493 494
494 495 $ touch foo
495 496 $ hg add foo
496 497 $ hg ci -d '0 0' -m 'add foo'
497 498 Automatically installed hook
498 499 $ echo >> foo
499 500 $ hg ci --debug -d '0 0' -m 'change foo'
500 501 foo
501 502 calling hook commit.auto: hgext_hookext.autohook
502 503 Automatically installed hook
503 504 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
504 505
505 506 $ hg showconfig hooks
506 507 hooks.commit.auto=<function autohook at *> (glob)
507 508
508 509 test python hook configured with python:[file]:[hook] syntax
509 510
510 511 $ cd ..
511 512 $ mkdir d
512 513 $ cd d
513 514 $ hg init repo
514 515 $ mkdir hooks
515 516
516 517 $ cd hooks
517 518 $ cat > testhooks.py <<EOF
518 519 > def testhook(**args):
519 520 > print 'hook works'
520 521 > EOF
521 522 $ echo '[hooks]' > ../repo/.hg/hgrc
522 523 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
523 524
524 525 $ cd ../repo
525 526 $ hg commit -d '0 0'
526 527 hook works
527 528 nothing changed
528 529 [1]
529 530
530 531 $ echo '[hooks]' > .hg/hgrc
531 532 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
532 533 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
533 534
534 535 $ hg up null
535 536 loading update.ne hook failed:
536 537 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
537 538 [255]
538 539
539 540 $ hg id
540 541 loading pre-identify.npmd hook failed:
541 542 abort: No module named repo!
542 543 [255]
543 544
544 545 $ cd ../../b
545 546
546 547 make sure --traceback works on hook import failure
547 548
548 549 $ cat > importfail.py <<EOF
549 550 > import somebogusmodule
550 551 > # dereference something in the module to force demandimport to load it
551 552 > somebogusmodule.whatever
552 553 > EOF
553 554
554 555 $ echo '[hooks]' > .hg/hgrc
555 556 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
556 557
557 558 $ echo a >> a
558 559 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
559 560 exception from first failed import attempt:
560 561 Traceback (most recent call last):
561 562 ImportError: No module named somebogusmodule
562 563 exception from second failed import attempt:
563 564 Traceback (most recent call last):
564 565 ImportError: No module named hgext_importfail
565 566 Traceback (most recent call last):
566 567 Abort: precommit.importfail hook is invalid (import of "importfail" failed)
567 568 abort: precommit.importfail hook is invalid (import of "importfail" failed)
568 569
569 570 Issue1827: Hooks Update & Commit not completely post operation
570 571
571 572 commit and update hooks should run after command completion
572 573
573 574 $ echo '[hooks]' > .hg/hgrc
574 575 $ echo 'commit = hg id' >> .hg/hgrc
575 576 $ echo 'update = hg id' >> .hg/hgrc
576 577 $ echo bb > a
577 578 $ hg ci -ma
578 579 223eafe2750c tip
579 580 $ hg up 0
580 581 cb9a9f314b8b
581 582 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
582 583
583 584 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
584 585 that is passed to pre/post hooks
585 586
586 587 $ echo '[hooks]' > .hg/hgrc
587 588 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
588 589 $ hg id
589 590 cb9a9f314b8b
590 591 $ hg id --verbose
591 592 calling hook pre-identify: hooktests.verbosehook
592 593 verbose output from hook
593 594 cb9a9f314b8b
594 595
595 596 Ensure hooks can be prioritized
596 597
597 598 $ echo '[hooks]' > .hg/hgrc
598 599 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
599 600 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
600 601 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
601 602 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
602 603 $ hg id --verbose
603 604 calling hook pre-identify.b: hooktests.verbosehook
604 605 verbose output from hook
605 606 calling hook pre-identify.a: hooktests.verbosehook
606 607 verbose output from hook
607 608 calling hook pre-identify.c: hooktests.verbosehook
608 609 verbose output from hook
609 610 cb9a9f314b8b
610 611
611 612 new tags must be visible in pretxncommit (issue3210)
612 613
613 614 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
614 615 $ hg tag -f foo
615 616 ['a', 'foo', 'tip']
616 617
617 618 new commits must be visible in pretxnchangegroup (issue3428)
618 619
619 620 $ cd ..
620 621 $ hg init to
621 622 $ echo '[hooks]' >> to/.hg/hgrc
622 623 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
623 624 $ echo a >> to/a
624 625 $ hg --cwd to ci -Ama
625 626 adding a
626 627 $ hg clone to from
627 628 updating to branch default
628 629 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
629 630 $ echo aa >> from/a
630 631 $ hg --cwd from ci -mb
631 632 $ hg --cwd from push
632 633 pushing to $TESTTMP/to (glob)
633 634 searching for changes
634 635 adding changesets
635 636 adding manifests
636 637 adding file changes
637 638 added 1 changesets with 1 changes to 1 files
638 639 changeset: 1:9836a07b9b9d
639 640 tag: tip
640 641 user: test
641 642 date: Thu Jan 01 00:00:00 1970 +0000
642 643 summary: b
643 644
@@ -1,471 +1,471 b''
1 1
2 2
3 3 This test tries to exercise the ssh functionality with a dummy script
4 4
5 5 creating 'remote' repo
6 6
7 7 $ hg init remote
8 8 $ cd remote
9 9 $ echo this > foo
10 10 $ echo this > fooO
11 11 $ hg ci -A -m "init" foo fooO
12 12
13 13 insert a closed branch (issue4428)
14 14
15 15 $ hg up null
16 16 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
17 17 $ hg branch closed
18 18 marked working directory as branch closed
19 19 (branches are permanent and global, did you want a bookmark?)
20 20 $ hg ci -mc0
21 21 $ hg ci --close-branch -mc1
22 22 $ hg up -q default
23 23
24 24 configure for serving
25 25
26 26 $ cat <<EOF > .hg/hgrc
27 27 > [server]
28 28 > uncompressed = True
29 29 >
30 30 > [hooks]
31 31 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
32 32 > EOF
33 33 $ cd ..
34 34
35 35 repo not found error
36 36
37 37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
38 38 remote: abort: there is no Mercurial repository here (.hg not found)!
39 39 abort: no suitable response from remote hg!
40 40 [255]
41 41
42 42 non-existent absolute path
43 43
44 44 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
45 45 remote: abort: there is no Mercurial repository here (.hg not found)!
46 46 abort: no suitable response from remote hg!
47 47 [255]
48 48
49 49 clone remote via stream
50 50
51 51 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
52 52 streaming all changes
53 53 4 files to transfer, 615 bytes of data
54 54 transferred 615 bytes in * seconds (*) (glob)
55 55 searching for changes
56 56 no changes found
57 57 updating to branch default
58 58 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 59 $ cd local-stream
60 60 $ hg verify
61 61 checking changesets
62 62 checking manifests
63 63 crosschecking files in changesets and manifests
64 64 checking files
65 65 2 files, 3 changesets, 2 total revisions
66 66 $ hg branches
67 67 default 0:1160648e36ce
68 68 $ cd ..
69 69
70 70 clone bookmarks via stream
71 71
72 72 $ hg -R local-stream book mybook
73 73 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
74 74 streaming all changes
75 75 4 files to transfer, 615 bytes of data
76 76 transferred 615 bytes in * seconds (*) (glob)
77 77 searching for changes
78 78 no changes found
79 79 updating to branch default
80 80 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
81 81 $ cd stream2
82 82 $ hg book
83 83 mybook 0:1160648e36ce
84 84 $ cd ..
85 85 $ rm -rf local-stream stream2
86 86
87 87 clone remote via pull
88 88
89 89 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
90 90 requesting all changes
91 91 adding changesets
92 92 adding manifests
93 93 adding file changes
94 94 added 3 changesets with 2 changes to 2 files
95 95 updating to branch default
96 96 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 97
98 98 verify
99 99
100 100 $ cd local
101 101 $ hg verify
102 102 checking changesets
103 103 checking manifests
104 104 crosschecking files in changesets and manifests
105 105 checking files
106 106 2 files, 3 changesets, 2 total revisions
107 107 $ echo '[hooks]' >> .hg/hgrc
108 108 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
109 109
110 110 empty default pull
111 111
112 112 $ hg paths
113 113 default = ssh://user@dummy/remote
114 114 $ hg pull -e "python \"$TESTDIR/dummyssh\""
115 115 pulling from ssh://user@dummy/remote
116 116 searching for changes
117 117 no changes found
118 118
119 119 local change
120 120
121 121 $ echo bleah > foo
122 122 $ hg ci -m "add"
123 123
124 124 updating rc
125 125
126 126 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
127 127 $ echo "[ui]" >> .hg/hgrc
128 128 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
129 129
130 130 find outgoing
131 131
132 132 $ hg out ssh://user@dummy/remote
133 133 comparing with ssh://user@dummy/remote
134 134 searching for changes
135 135 changeset: 3:a28a9d1a809c
136 136 tag: tip
137 137 parent: 0:1160648e36ce
138 138 user: test
139 139 date: Thu Jan 01 00:00:00 1970 +0000
140 140 summary: add
141 141
142 142
143 143 find incoming on the remote side
144 144
145 145 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
146 146 comparing with ssh://user@dummy/local
147 147 searching for changes
148 148 changeset: 3:a28a9d1a809c
149 149 tag: tip
150 150 parent: 0:1160648e36ce
151 151 user: test
152 152 date: Thu Jan 01 00:00:00 1970 +0000
153 153 summary: add
154 154
155 155
156 156 find incoming on the remote side (using absolute path)
157 157
158 158 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
159 159 comparing with ssh://user@dummy/$TESTTMP/local
160 160 searching for changes
161 161 changeset: 3:a28a9d1a809c
162 162 tag: tip
163 163 parent: 0:1160648e36ce
164 164 user: test
165 165 date: Thu Jan 01 00:00:00 1970 +0000
166 166 summary: add
167 167
168 168
169 169 push
170 170
171 171 $ hg push
172 172 pushing to ssh://user@dummy/remote
173 173 searching for changes
174 174 remote: adding changesets
175 175 remote: adding manifests
176 176 remote: adding file changes
177 177 remote: added 1 changesets with 1 changes to 1 files
178 178 $ cd ../remote
179 179
180 180 check remote tip
181 181
182 182 $ hg tip
183 183 changeset: 3:a28a9d1a809c
184 184 tag: tip
185 185 parent: 0:1160648e36ce
186 186 user: test
187 187 date: Thu Jan 01 00:00:00 1970 +0000
188 188 summary: add
189 189
190 190 $ hg verify
191 191 checking changesets
192 192 checking manifests
193 193 crosschecking files in changesets and manifests
194 194 checking files
195 195 2 files, 4 changesets, 3 total revisions
196 196 $ hg cat -r tip foo
197 197 bleah
198 198 $ echo z > z
199 199 $ hg ci -A -m z z
200 200 created new head
201 201
202 202 test pushkeys and bookmarks
203 203
204 204 $ cd ../local
205 205 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
206 206 bookmarks
207 207 namespaces
208 208 phases
209 209 $ hg book foo -r 0
210 210 $ hg out -B
211 211 comparing with ssh://user@dummy/remote
212 212 searching for changed bookmarks
213 213 foo 1160648e36ce
214 214 $ hg push -B foo
215 215 pushing to ssh://user@dummy/remote
216 216 searching for changes
217 217 no changes found
218 218 exporting bookmark foo
219 219 [1]
220 220 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
221 221 foo 1160648e36cec0054048a7edc4110c6f84fde594
222 222 $ hg book -f foo
223 223 $ hg push --traceback
224 224 pushing to ssh://user@dummy/remote
225 225 searching for changes
226 226 no changes found
227 227 updating bookmark foo
228 228 [1]
229 229 $ hg book -d foo
230 230 $ hg in -B
231 231 comparing with ssh://user@dummy/remote
232 232 searching for changed bookmarks
233 233 foo a28a9d1a809c
234 234 $ hg book -f -r 0 foo
235 235 $ hg pull -B foo
236 236 pulling from ssh://user@dummy/remote
237 237 no changes found
238 238 updating bookmark foo
239 239 $ hg book -d foo
240 240 $ hg push -B foo
241 241 pushing to ssh://user@dummy/remote
242 242 searching for changes
243 243 no changes found
244 244 deleting remote bookmark foo
245 245 [1]
246 246
247 247 a bad, evil hook that prints to stdout
248 248
249 249 $ cat <<EOF > $TESTTMP/badhook
250 250 > import sys
251 251 > sys.stdout.write("KABOOM\n")
252 252 > EOF
253 253
254 254 $ echo '[hooks]' >> ../remote/.hg/hgrc
255 255 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
256 256 $ echo r > r
257 257 $ hg ci -A -m z r
258 258
259 259 push should succeed even though it has an unexpected response
260 260
261 261 $ hg push
262 262 pushing to ssh://user@dummy/remote
263 263 searching for changes
264 264 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
265 265 remote: adding changesets
266 266 remote: adding manifests
267 267 remote: adding file changes
268 268 remote: added 1 changesets with 1 changes to 1 files
269 269 remote: KABOOM
270 270 $ hg -R ../remote heads
271 271 changeset: 5:1383141674ec
272 272 tag: tip
273 273 parent: 3:a28a9d1a809c
274 274 user: test
275 275 date: Thu Jan 01 00:00:00 1970 +0000
276 276 summary: z
277 277
278 278 changeset: 4:6c0482d977a3
279 279 parent: 0:1160648e36ce
280 280 user: test
281 281 date: Thu Jan 01 00:00:00 1970 +0000
282 282 summary: z
283 283
284 284
285 285 clone bookmarks
286 286
287 287 $ hg -R ../remote bookmark test
288 288 $ hg -R ../remote bookmarks
289 289 * test 4:6c0482d977a3
290 290 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
291 291 requesting all changes
292 292 adding changesets
293 293 adding manifests
294 294 adding file changes
295 295 added 6 changesets with 5 changes to 4 files (+1 heads)
296 296 updating to branch default
297 297 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
298 298 $ hg -R local-bookmarks bookmarks
299 299 test 4:6c0482d977a3
300 300
301 301 passwords in ssh urls are not supported
302 302 (we use a glob here because different Python versions give different
303 303 results here)
304 304
305 305 $ hg push ssh://user:erroneouspwd@dummy/remote
306 306 pushing to ssh://user:*@dummy/remote (glob)
307 307 abort: password in URL not supported!
308 308 [255]
309 309
310 310 $ cd ..
311 311
312 312 hide outer repo
313 313 $ hg init
314 314
315 315 Test remote paths with spaces (issue2983):
316 316
317 317 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
318 318 $ touch "$TESTTMP/a repo/test"
319 319 $ hg -R 'a repo' commit -A -m "test"
320 320 adding test
321 321 $ hg -R 'a repo' tag tag
322 322 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
323 323 73649e48688a
324 324
325 325 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
326 326 abort: unknown revision 'noNoNO'!
327 327 [255]
328 328
329 329 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
330 330
331 331 $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
332 332 destination directory: a repo
333 333 abort: destination 'a repo' is not empty
334 334 [255]
335 335
336 336 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
337 337 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
338 338 parameters:
339 339
340 340 $ cat > ssh.sh << EOF
341 341 > userhost="\$1"
342 342 > SSH_ORIGINAL_COMMAND="\$2"
343 343 > export SSH_ORIGINAL_COMMAND
344 344 > PYTHONPATH="$PYTHONPATH"
345 345 > export PYTHONPATH
346 346 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
347 347 > EOF
348 348
349 349 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
350 350 73649e48688a
351 351
352 352 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
353 353 remote: Illegal repository "$TESTTMP/a'repo" (glob)
354 354 abort: no suitable response from remote hg!
355 355 [255]
356 356
357 357 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
358 358 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
359 359 abort: no suitable response from remote hg!
360 360 [255]
361 361
362 362 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
363 363 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
364 364 [255]
365 365
366 366 Test hg-ssh in read-only mode:
367 367
368 368 $ cat > ssh.sh << EOF
369 369 > userhost="\$1"
370 370 > SSH_ORIGINAL_COMMAND="\$2"
371 371 > export SSH_ORIGINAL_COMMAND
372 372 > PYTHONPATH="$PYTHONPATH"
373 373 > export PYTHONPATH
374 374 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
375 375 > EOF
376 376
377 377 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
378 378 requesting all changes
379 379 adding changesets
380 380 adding manifests
381 381 adding file changes
382 382 added 6 changesets with 5 changes to 4 files (+1 heads)
383 383 updating to branch default
384 384 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
385 385
386 386 $ cd read-only-local
387 387 $ echo "baz" > bar
388 388 $ hg ci -A -m "unpushable commit" bar
389 389 $ hg push --ssh "sh ../ssh.sh"
390 390 pushing to ssh://user@dummy/*/remote (glob)
391 391 searching for changes
392 392 remote: Permission denied
393 393 remote: abort: prechangegroup.hg-ssh hook failed
394 394 remote: Permission denied
395 remote: abort: prepushkey.hg-ssh hook failed
396 abort: unexpected response: empty string
397 [255]
395 remote: pushkey-abort: prepushkey.hg-ssh hook failed
396 updating 6c0482d977a3 to public failed!
397 [1]
398 398
399 399 $ cd ..
400 400
401 401 stderr from remote commands should be printed before stdout from local code (issue4336)
402 402
403 403 $ hg clone remote stderr-ordering
404 404 updating to branch default
405 405 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
406 406 $ cd stderr-ordering
407 407 $ cat >> localwrite.py << EOF
408 408 > from mercurial import exchange, extensions
409 409 >
410 410 > def wrappedpush(orig, repo, *args, **kwargs):
411 411 > res = orig(repo, *args, **kwargs)
412 412 > repo.ui.write('local stdout\n')
413 413 > return res
414 414 >
415 415 > def extsetup(ui):
416 416 > extensions.wrapfunction(exchange, 'push', wrappedpush)
417 417 > EOF
418 418
419 419 $ cat >> .hg/hgrc << EOF
420 420 > [paths]
421 421 > default-push = ssh://user@dummy/remote
422 422 > [ui]
423 423 > ssh = python "$TESTDIR/dummyssh"
424 424 > [extensions]
425 425 > localwrite = localwrite.py
426 426 > EOF
427 427
428 428 $ echo localwrite > foo
429 429 $ hg commit -m 'testing localwrite'
430 430 $ hg push
431 431 pushing to ssh://user@dummy/remote
432 432 searching for changes
433 433 remote: adding changesets
434 434 remote: adding manifests
435 435 remote: adding file changes
436 436 remote: added 1 changesets with 1 changes to 1 files
437 437 remote: KABOOM
438 438 local stdout
439 439
440 440 $ cd ..
441 441
442 442 $ cat dummylog
443 443 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
444 444 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
445 445 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
446 446 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
447 447 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
448 448 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
449 449 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
450 450 Got arguments 1:user@dummy 2:hg -R local serve --stdio
451 451 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
452 452 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
453 453 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
454 454 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
455 455 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
456 456 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
457 457 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
458 458 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
459 459 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
460 460 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
461 461 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
462 462 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
463 463 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
464 464 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
465 465 Got arguments 1:user@dummy 2:hg init 'a repo'
466 466 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
467 467 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
468 468 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
469 469 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
470 470 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
471 471 changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
General Comments 0
You need to be logged in to leave comments. Login now