##// END OF EJS Templates
localrepo: check HG_PENDING strictly...
FUJIWARA Katsunori -
r31054:59e69ed8 default
parent child Browse files
Show More
@@ -1,2048 +1,2047 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import inspect
12 import inspect
13 import os
13 import os
14 import random
14 import random
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 wdirrev,
23 wdirrev,
24 )
24 )
25 from . import (
25 from . import (
26 bookmarks,
26 bookmarks,
27 branchmap,
27 branchmap,
28 bundle2,
28 bundle2,
29 changegroup,
29 changegroup,
30 changelog,
30 changelog,
31 context,
31 context,
32 dirstate,
32 dirstate,
33 dirstateguard,
33 dirstateguard,
34 encoding,
34 encoding,
35 error,
35 error,
36 exchange,
36 exchange,
37 extensions,
37 extensions,
38 filelog,
38 filelog,
39 hook,
39 hook,
40 lock as lockmod,
40 lock as lockmod,
41 manifest,
41 manifest,
42 match as matchmod,
42 match as matchmod,
43 merge as mergemod,
43 merge as mergemod,
44 mergeutil,
44 mergeutil,
45 namespaces,
45 namespaces,
46 obsolete,
46 obsolete,
47 pathutil,
47 pathutil,
48 peer,
48 peer,
49 phases,
49 phases,
50 pushkey,
50 pushkey,
51 repoview,
51 repoview,
52 revset,
52 revset,
53 revsetlang,
53 revsetlang,
54 scmutil,
54 scmutil,
55 store,
55 store,
56 subrepo,
56 subrepo,
57 tags as tagsmod,
57 tags as tagsmod,
58 transaction,
58 transaction,
59 txnutil,
59 util,
60 util,
60 )
61 )
61
62
62 release = lockmod.release
63 release = lockmod.release
63 urlerr = util.urlerr
64 urlerr = util.urlerr
64 urlreq = util.urlreq
65 urlreq = util.urlreq
65
66
66 class repofilecache(scmutil.filecache):
67 class repofilecache(scmutil.filecache):
67 """All filecache usage on repo are done for logic that should be unfiltered
68 """All filecache usage on repo are done for logic that should be unfiltered
68 """
69 """
69
70
70 def __get__(self, repo, type=None):
71 def __get__(self, repo, type=None):
71 if repo is None:
72 if repo is None:
72 return self
73 return self
73 return super(repofilecache, self).__get__(repo.unfiltered(), type)
74 return super(repofilecache, self).__get__(repo.unfiltered(), type)
74 def __set__(self, repo, value):
75 def __set__(self, repo, value):
75 return super(repofilecache, self).__set__(repo.unfiltered(), value)
76 return super(repofilecache, self).__set__(repo.unfiltered(), value)
76 def __delete__(self, repo):
77 def __delete__(self, repo):
77 return super(repofilecache, self).__delete__(repo.unfiltered())
78 return super(repofilecache, self).__delete__(repo.unfiltered())
78
79
79 class storecache(repofilecache):
80 class storecache(repofilecache):
80 """filecache for files in the store"""
81 """filecache for files in the store"""
81 def join(self, obj, fname):
82 def join(self, obj, fname):
82 return obj.sjoin(fname)
83 return obj.sjoin(fname)
83
84
84 class unfilteredpropertycache(util.propertycache):
85 class unfilteredpropertycache(util.propertycache):
85 """propertycache that apply to unfiltered repo only"""
86 """propertycache that apply to unfiltered repo only"""
86
87
87 def __get__(self, repo, type=None):
88 def __get__(self, repo, type=None):
88 unfi = repo.unfiltered()
89 unfi = repo.unfiltered()
89 if unfi is repo:
90 if unfi is repo:
90 return super(unfilteredpropertycache, self).__get__(unfi)
91 return super(unfilteredpropertycache, self).__get__(unfi)
91 return getattr(unfi, self.name)
92 return getattr(unfi, self.name)
92
93
93 class filteredpropertycache(util.propertycache):
94 class filteredpropertycache(util.propertycache):
94 """propertycache that must take filtering in account"""
95 """propertycache that must take filtering in account"""
95
96
96 def cachevalue(self, obj, value):
97 def cachevalue(self, obj, value):
97 object.__setattr__(obj, self.name, value)
98 object.__setattr__(obj, self.name, value)
98
99
99
100
100 def hasunfilteredcache(repo, name):
101 def hasunfilteredcache(repo, name):
101 """check if a repo has an unfilteredpropertycache value for <name>"""
102 """check if a repo has an unfilteredpropertycache value for <name>"""
102 return name in vars(repo.unfiltered())
103 return name in vars(repo.unfiltered())
103
104
104 def unfilteredmethod(orig):
105 def unfilteredmethod(orig):
105 """decorate method that always need to be run on unfiltered version"""
106 """decorate method that always need to be run on unfiltered version"""
106 def wrapper(repo, *args, **kwargs):
107 def wrapper(repo, *args, **kwargs):
107 return orig(repo.unfiltered(), *args, **kwargs)
108 return orig(repo.unfiltered(), *args, **kwargs)
108 return wrapper
109 return wrapper
109
110
110 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
111 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
111 'unbundle'))
112 'unbundle'))
112 legacycaps = moderncaps.union(set(['changegroupsubset']))
113 legacycaps = moderncaps.union(set(['changegroupsubset']))
113
114
114 class localpeer(peer.peerrepository):
115 class localpeer(peer.peerrepository):
115 '''peer for a local repo; reflects only the most recent API'''
116 '''peer for a local repo; reflects only the most recent API'''
116
117
117 def __init__(self, repo, caps=moderncaps):
118 def __init__(self, repo, caps=moderncaps):
118 peer.peerrepository.__init__(self)
119 peer.peerrepository.__init__(self)
119 self._repo = repo.filtered('served')
120 self._repo = repo.filtered('served')
120 self.ui = repo.ui
121 self.ui = repo.ui
121 self._caps = repo._restrictcapabilities(caps)
122 self._caps = repo._restrictcapabilities(caps)
122 self.requirements = repo.requirements
123 self.requirements = repo.requirements
123 self.supportedformats = repo.supportedformats
124 self.supportedformats = repo.supportedformats
124
125
125 def close(self):
126 def close(self):
126 self._repo.close()
127 self._repo.close()
127
128
128 def _capabilities(self):
129 def _capabilities(self):
129 return self._caps
130 return self._caps
130
131
131 def local(self):
132 def local(self):
132 return self._repo
133 return self._repo
133
134
134 def canpush(self):
135 def canpush(self):
135 return True
136 return True
136
137
137 def url(self):
138 def url(self):
138 return self._repo.url()
139 return self._repo.url()
139
140
140 def lookup(self, key):
141 def lookup(self, key):
141 return self._repo.lookup(key)
142 return self._repo.lookup(key)
142
143
143 def branchmap(self):
144 def branchmap(self):
144 return self._repo.branchmap()
145 return self._repo.branchmap()
145
146
146 def heads(self):
147 def heads(self):
147 return self._repo.heads()
148 return self._repo.heads()
148
149
149 def known(self, nodes):
150 def known(self, nodes):
150 return self._repo.known(nodes)
151 return self._repo.known(nodes)
151
152
152 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
153 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
153 **kwargs):
154 **kwargs):
154 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
155 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
155 common=common, bundlecaps=bundlecaps,
156 common=common, bundlecaps=bundlecaps,
156 **kwargs)
157 **kwargs)
157 cb = util.chunkbuffer(chunks)
158 cb = util.chunkbuffer(chunks)
158
159
159 if bundlecaps is not None and 'HG20' in bundlecaps:
160 if bundlecaps is not None and 'HG20' in bundlecaps:
160 # When requesting a bundle2, getbundle returns a stream to make the
161 # When requesting a bundle2, getbundle returns a stream to make the
161 # wire level function happier. We need to build a proper object
162 # wire level function happier. We need to build a proper object
162 # from it in local peer.
163 # from it in local peer.
163 return bundle2.getunbundler(self.ui, cb)
164 return bundle2.getunbundler(self.ui, cb)
164 else:
165 else:
165 return changegroup.getunbundler('01', cb, None)
166 return changegroup.getunbundler('01', cb, None)
166
167
167 # TODO We might want to move the next two calls into legacypeer and add
168 # TODO We might want to move the next two calls into legacypeer and add
168 # unbundle instead.
169 # unbundle instead.
169
170
170 def unbundle(self, cg, heads, url):
171 def unbundle(self, cg, heads, url):
171 """apply a bundle on a repo
172 """apply a bundle on a repo
172
173
173 This function handles the repo locking itself."""
174 This function handles the repo locking itself."""
174 try:
175 try:
175 try:
176 try:
176 cg = exchange.readbundle(self.ui, cg, None)
177 cg = exchange.readbundle(self.ui, cg, None)
177 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
178 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
178 if util.safehasattr(ret, 'getchunks'):
179 if util.safehasattr(ret, 'getchunks'):
179 # This is a bundle20 object, turn it into an unbundler.
180 # This is a bundle20 object, turn it into an unbundler.
180 # This little dance should be dropped eventually when the
181 # This little dance should be dropped eventually when the
181 # API is finally improved.
182 # API is finally improved.
182 stream = util.chunkbuffer(ret.getchunks())
183 stream = util.chunkbuffer(ret.getchunks())
183 ret = bundle2.getunbundler(self.ui, stream)
184 ret = bundle2.getunbundler(self.ui, stream)
184 return ret
185 return ret
185 except Exception as exc:
186 except Exception as exc:
186 # If the exception contains output salvaged from a bundle2
187 # If the exception contains output salvaged from a bundle2
187 # reply, we need to make sure it is printed before continuing
188 # reply, we need to make sure it is printed before continuing
188 # to fail. So we build a bundle2 with such output and consume
189 # to fail. So we build a bundle2 with such output and consume
189 # it directly.
190 # it directly.
190 #
191 #
191 # This is not very elegant but allows a "simple" solution for
192 # This is not very elegant but allows a "simple" solution for
192 # issue4594
193 # issue4594
193 output = getattr(exc, '_bundle2salvagedoutput', ())
194 output = getattr(exc, '_bundle2salvagedoutput', ())
194 if output:
195 if output:
195 bundler = bundle2.bundle20(self._repo.ui)
196 bundler = bundle2.bundle20(self._repo.ui)
196 for out in output:
197 for out in output:
197 bundler.addpart(out)
198 bundler.addpart(out)
198 stream = util.chunkbuffer(bundler.getchunks())
199 stream = util.chunkbuffer(bundler.getchunks())
199 b = bundle2.getunbundler(self.ui, stream)
200 b = bundle2.getunbundler(self.ui, stream)
200 bundle2.processbundle(self._repo, b)
201 bundle2.processbundle(self._repo, b)
201 raise
202 raise
202 except error.PushRaced as exc:
203 except error.PushRaced as exc:
203 raise error.ResponseError(_('push failed:'), str(exc))
204 raise error.ResponseError(_('push failed:'), str(exc))
204
205
205 def lock(self):
206 def lock(self):
206 return self._repo.lock()
207 return self._repo.lock()
207
208
208 def addchangegroup(self, cg, source, url):
209 def addchangegroup(self, cg, source, url):
209 return cg.apply(self._repo, source, url)
210 return cg.apply(self._repo, source, url)
210
211
211 def pushkey(self, namespace, key, old, new):
212 def pushkey(self, namespace, key, old, new):
212 return self._repo.pushkey(namespace, key, old, new)
213 return self._repo.pushkey(namespace, key, old, new)
213
214
214 def listkeys(self, namespace):
215 def listkeys(self, namespace):
215 return self._repo.listkeys(namespace)
216 return self._repo.listkeys(namespace)
216
217
217 def debugwireargs(self, one, two, three=None, four=None, five=None):
218 def debugwireargs(self, one, two, three=None, four=None, five=None):
218 '''used to test argument passing over the wire'''
219 '''used to test argument passing over the wire'''
219 return "%s %s %s %s %s" % (one, two, three, four, five)
220 return "%s %s %s %s %s" % (one, two, three, four, five)
220
221
221 class locallegacypeer(localpeer):
222 class locallegacypeer(localpeer):
222 '''peer extension which implements legacy methods too; used for tests with
223 '''peer extension which implements legacy methods too; used for tests with
223 restricted capabilities'''
224 restricted capabilities'''
224
225
225 def __init__(self, repo):
226 def __init__(self, repo):
226 localpeer.__init__(self, repo, caps=legacycaps)
227 localpeer.__init__(self, repo, caps=legacycaps)
227
228
228 def branches(self, nodes):
229 def branches(self, nodes):
229 return self._repo.branches(nodes)
230 return self._repo.branches(nodes)
230
231
231 def between(self, pairs):
232 def between(self, pairs):
232 return self._repo.between(pairs)
233 return self._repo.between(pairs)
233
234
234 def changegroup(self, basenodes, source):
235 def changegroup(self, basenodes, source):
235 return changegroup.changegroup(self._repo, basenodes, source)
236 return changegroup.changegroup(self._repo, basenodes, source)
236
237
237 def changegroupsubset(self, bases, heads, source):
238 def changegroupsubset(self, bases, heads, source):
238 return changegroup.changegroupsubset(self._repo, bases, heads, source)
239 return changegroup.changegroupsubset(self._repo, bases, heads, source)
239
240
240 class localrepository(object):
241 class localrepository(object):
241
242
242 supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
243 supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
243 'manifestv2'))
244 'manifestv2'))
244 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
245 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
245 'dotencode'))
246 'dotencode'))
246 openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
247 openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
247 filtername = None
248 filtername = None
248
249
249 # a list of (ui, featureset) functions.
250 # a list of (ui, featureset) functions.
250 # only functions defined in module of enabled extensions are invoked
251 # only functions defined in module of enabled extensions are invoked
251 featuresetupfuncs = set()
252 featuresetupfuncs = set()
252
253
253 def __init__(self, baseui, path, create=False):
254 def __init__(self, baseui, path, create=False):
254 self.requirements = set()
255 self.requirements = set()
255 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
256 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
256 self.wopener = self.wvfs
257 self.wopener = self.wvfs
257 self.root = self.wvfs.base
258 self.root = self.wvfs.base
258 self.path = self.wvfs.join(".hg")
259 self.path = self.wvfs.join(".hg")
259 self.origroot = path
260 self.origroot = path
260 self.auditor = pathutil.pathauditor(self.root, self._checknested)
261 self.auditor = pathutil.pathauditor(self.root, self._checknested)
261 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
262 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
262 realfs=False)
263 realfs=False)
263 self.vfs = scmutil.vfs(self.path)
264 self.vfs = scmutil.vfs(self.path)
264 self.opener = self.vfs
265 self.opener = self.vfs
265 self.baseui = baseui
266 self.baseui = baseui
266 self.ui = baseui.copy()
267 self.ui = baseui.copy()
267 self.ui.copy = baseui.copy # prevent copying repo configuration
268 self.ui.copy = baseui.copy # prevent copying repo configuration
268 # A list of callback to shape the phase if no data were found.
269 # A list of callback to shape the phase if no data were found.
269 # Callback are in the form: func(repo, roots) --> processed root.
270 # Callback are in the form: func(repo, roots) --> processed root.
270 # This list it to be filled by extension during repo setup
271 # This list it to be filled by extension during repo setup
271 self._phasedefaults = []
272 self._phasedefaults = []
272 try:
273 try:
273 self.ui.readconfig(self.join("hgrc"), self.root)
274 self.ui.readconfig(self.join("hgrc"), self.root)
274 self._loadextensions()
275 self._loadextensions()
275 except IOError:
276 except IOError:
276 pass
277 pass
277
278
278 if self.featuresetupfuncs:
279 if self.featuresetupfuncs:
279 self.supported = set(self._basesupported) # use private copy
280 self.supported = set(self._basesupported) # use private copy
280 extmods = set(m.__name__ for n, m
281 extmods = set(m.__name__ for n, m
281 in extensions.extensions(self.ui))
282 in extensions.extensions(self.ui))
282 for setupfunc in self.featuresetupfuncs:
283 for setupfunc in self.featuresetupfuncs:
283 if setupfunc.__module__ in extmods:
284 if setupfunc.__module__ in extmods:
284 setupfunc(self.ui, self.supported)
285 setupfunc(self.ui, self.supported)
285 else:
286 else:
286 self.supported = self._basesupported
287 self.supported = self._basesupported
287
288
288 # Add compression engines.
289 # Add compression engines.
289 for name in util.compengines:
290 for name in util.compengines:
290 engine = util.compengines[name]
291 engine = util.compengines[name]
291 if engine.revlogheader():
292 if engine.revlogheader():
292 self.supported.add('exp-compression-%s' % name)
293 self.supported.add('exp-compression-%s' % name)
293
294
294 if not self.vfs.isdir():
295 if not self.vfs.isdir():
295 if create:
296 if create:
296 self.requirements = newreporequirements(self)
297 self.requirements = newreporequirements(self)
297
298
298 if not self.wvfs.exists():
299 if not self.wvfs.exists():
299 self.wvfs.makedirs()
300 self.wvfs.makedirs()
300 self.vfs.makedir(notindexed=True)
301 self.vfs.makedir(notindexed=True)
301
302
302 if 'store' in self.requirements:
303 if 'store' in self.requirements:
303 self.vfs.mkdir("store")
304 self.vfs.mkdir("store")
304
305
305 # create an invalid changelog
306 # create an invalid changelog
306 self.vfs.append(
307 self.vfs.append(
307 "00changelog.i",
308 "00changelog.i",
308 '\0\0\0\2' # represents revlogv2
309 '\0\0\0\2' # represents revlogv2
309 ' dummy changelog to prevent using the old repo layout'
310 ' dummy changelog to prevent using the old repo layout'
310 )
311 )
311 else:
312 else:
312 raise error.RepoError(_("repository %s not found") % path)
313 raise error.RepoError(_("repository %s not found") % path)
313 elif create:
314 elif create:
314 raise error.RepoError(_("repository %s already exists") % path)
315 raise error.RepoError(_("repository %s already exists") % path)
315 else:
316 else:
316 try:
317 try:
317 self.requirements = scmutil.readrequires(
318 self.requirements = scmutil.readrequires(
318 self.vfs, self.supported)
319 self.vfs, self.supported)
319 except IOError as inst:
320 except IOError as inst:
320 if inst.errno != errno.ENOENT:
321 if inst.errno != errno.ENOENT:
321 raise
322 raise
322
323
323 self.sharedpath = self.path
324 self.sharedpath = self.path
324 try:
325 try:
325 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
326 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
326 realpath=True)
327 realpath=True)
327 s = vfs.base
328 s = vfs.base
328 if not vfs.exists():
329 if not vfs.exists():
329 raise error.RepoError(
330 raise error.RepoError(
330 _('.hg/sharedpath points to nonexistent directory %s') % s)
331 _('.hg/sharedpath points to nonexistent directory %s') % s)
331 self.sharedpath = s
332 self.sharedpath = s
332 except IOError as inst:
333 except IOError as inst:
333 if inst.errno != errno.ENOENT:
334 if inst.errno != errno.ENOENT:
334 raise
335 raise
335
336
336 self.store = store.store(
337 self.store = store.store(
337 self.requirements, self.sharedpath, scmutil.vfs)
338 self.requirements, self.sharedpath, scmutil.vfs)
338 self.spath = self.store.path
339 self.spath = self.store.path
339 self.svfs = self.store.vfs
340 self.svfs = self.store.vfs
340 self.sjoin = self.store.join
341 self.sjoin = self.store.join
341 self.vfs.createmode = self.store.createmode
342 self.vfs.createmode = self.store.createmode
342 self._applyopenerreqs()
343 self._applyopenerreqs()
343 if create:
344 if create:
344 self._writerequirements()
345 self._writerequirements()
345
346
346 self._dirstatevalidatewarned = False
347 self._dirstatevalidatewarned = False
347
348
348 self._branchcaches = {}
349 self._branchcaches = {}
349 self._revbranchcache = None
350 self._revbranchcache = None
350 self.filterpats = {}
351 self.filterpats = {}
351 self._datafilters = {}
352 self._datafilters = {}
352 self._transref = self._lockref = self._wlockref = None
353 self._transref = self._lockref = self._wlockref = None
353
354
354 # A cache for various files under .hg/ that tracks file changes,
355 # A cache for various files under .hg/ that tracks file changes,
355 # (used by the filecache decorator)
356 # (used by the filecache decorator)
356 #
357 #
357 # Maps a property name to its util.filecacheentry
358 # Maps a property name to its util.filecacheentry
358 self._filecache = {}
359 self._filecache = {}
359
360
360 # hold sets of revision to be filtered
361 # hold sets of revision to be filtered
361 # should be cleared when something might have changed the filter value:
362 # should be cleared when something might have changed the filter value:
362 # - new changesets,
363 # - new changesets,
363 # - phase change,
364 # - phase change,
364 # - new obsolescence marker,
365 # - new obsolescence marker,
365 # - working directory parent change,
366 # - working directory parent change,
366 # - bookmark changes
367 # - bookmark changes
367 self.filteredrevcache = {}
368 self.filteredrevcache = {}
368
369
369 # generic mapping between names and nodes
370 # generic mapping between names and nodes
370 self.names = namespaces.namespaces()
371 self.names = namespaces.namespaces()
371
372
372 def close(self):
373 def close(self):
373 self._writecaches()
374 self._writecaches()
374
375
375 def _loadextensions(self):
376 def _loadextensions(self):
376 extensions.loadall(self.ui)
377 extensions.loadall(self.ui)
377
378
378 def _writecaches(self):
379 def _writecaches(self):
379 if self._revbranchcache:
380 if self._revbranchcache:
380 self._revbranchcache.write()
381 self._revbranchcache.write()
381
382
382 def _restrictcapabilities(self, caps):
383 def _restrictcapabilities(self, caps):
383 if self.ui.configbool('experimental', 'bundle2-advertise', True):
384 if self.ui.configbool('experimental', 'bundle2-advertise', True):
384 caps = set(caps)
385 caps = set(caps)
385 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
386 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
386 caps.add('bundle2=' + urlreq.quote(capsblob))
387 caps.add('bundle2=' + urlreq.quote(capsblob))
387 return caps
388 return caps
388
389
389 def _applyopenerreqs(self):
390 def _applyopenerreqs(self):
390 self.svfs.options = dict((r, 1) for r in self.requirements
391 self.svfs.options = dict((r, 1) for r in self.requirements
391 if r in self.openerreqs)
392 if r in self.openerreqs)
392 # experimental config: format.chunkcachesize
393 # experimental config: format.chunkcachesize
393 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
394 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
394 if chunkcachesize is not None:
395 if chunkcachesize is not None:
395 self.svfs.options['chunkcachesize'] = chunkcachesize
396 self.svfs.options['chunkcachesize'] = chunkcachesize
396 # experimental config: format.maxchainlen
397 # experimental config: format.maxchainlen
397 maxchainlen = self.ui.configint('format', 'maxchainlen')
398 maxchainlen = self.ui.configint('format', 'maxchainlen')
398 if maxchainlen is not None:
399 if maxchainlen is not None:
399 self.svfs.options['maxchainlen'] = maxchainlen
400 self.svfs.options['maxchainlen'] = maxchainlen
400 # experimental config: format.manifestcachesize
401 # experimental config: format.manifestcachesize
401 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
402 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
402 if manifestcachesize is not None:
403 if manifestcachesize is not None:
403 self.svfs.options['manifestcachesize'] = manifestcachesize
404 self.svfs.options['manifestcachesize'] = manifestcachesize
404 # experimental config: format.aggressivemergedeltas
405 # experimental config: format.aggressivemergedeltas
405 aggressivemergedeltas = self.ui.configbool('format',
406 aggressivemergedeltas = self.ui.configbool('format',
406 'aggressivemergedeltas', False)
407 'aggressivemergedeltas', False)
407 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
408 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
408 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
409 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
409
410
410 for r in self.requirements:
411 for r in self.requirements:
411 if r.startswith('exp-compression-'):
412 if r.startswith('exp-compression-'):
412 self.svfs.options['compengine'] = r[len('exp-compression-'):]
413 self.svfs.options['compengine'] = r[len('exp-compression-'):]
413
414
414 def _writerequirements(self):
415 def _writerequirements(self):
415 scmutil.writerequires(self.vfs, self.requirements)
416 scmutil.writerequires(self.vfs, self.requirements)
416
417
417 def _checknested(self, path):
418 def _checknested(self, path):
418 """Determine if path is a legal nested repository."""
419 """Determine if path is a legal nested repository."""
419 if not path.startswith(self.root):
420 if not path.startswith(self.root):
420 return False
421 return False
421 subpath = path[len(self.root) + 1:]
422 subpath = path[len(self.root) + 1:]
422 normsubpath = util.pconvert(subpath)
423 normsubpath = util.pconvert(subpath)
423
424
424 # XXX: Checking against the current working copy is wrong in
425 # XXX: Checking against the current working copy is wrong in
425 # the sense that it can reject things like
426 # the sense that it can reject things like
426 #
427 #
427 # $ hg cat -r 10 sub/x.txt
428 # $ hg cat -r 10 sub/x.txt
428 #
429 #
429 # if sub/ is no longer a subrepository in the working copy
430 # if sub/ is no longer a subrepository in the working copy
430 # parent revision.
431 # parent revision.
431 #
432 #
432 # However, it can of course also allow things that would have
433 # However, it can of course also allow things that would have
433 # been rejected before, such as the above cat command if sub/
434 # been rejected before, such as the above cat command if sub/
434 # is a subrepository now, but was a normal directory before.
435 # is a subrepository now, but was a normal directory before.
435 # The old path auditor would have rejected by mistake since it
436 # The old path auditor would have rejected by mistake since it
436 # panics when it sees sub/.hg/.
437 # panics when it sees sub/.hg/.
437 #
438 #
438 # All in all, checking against the working copy seems sensible
439 # All in all, checking against the working copy seems sensible
439 # since we want to prevent access to nested repositories on
440 # since we want to prevent access to nested repositories on
440 # the filesystem *now*.
441 # the filesystem *now*.
441 ctx = self[None]
442 ctx = self[None]
442 parts = util.splitpath(subpath)
443 parts = util.splitpath(subpath)
443 while parts:
444 while parts:
444 prefix = '/'.join(parts)
445 prefix = '/'.join(parts)
445 if prefix in ctx.substate:
446 if prefix in ctx.substate:
446 if prefix == normsubpath:
447 if prefix == normsubpath:
447 return True
448 return True
448 else:
449 else:
449 sub = ctx.sub(prefix)
450 sub = ctx.sub(prefix)
450 return sub.checknested(subpath[len(prefix) + 1:])
451 return sub.checknested(subpath[len(prefix) + 1:])
451 else:
452 else:
452 parts.pop()
453 parts.pop()
453 return False
454 return False
454
455
455 def peer(self):
456 def peer(self):
456 return localpeer(self) # not cached to avoid reference cycle
457 return localpeer(self) # not cached to avoid reference cycle
457
458
458 def unfiltered(self):
459 def unfiltered(self):
459 """Return unfiltered version of the repository
460 """Return unfiltered version of the repository
460
461
461 Intended to be overwritten by filtered repo."""
462 Intended to be overwritten by filtered repo."""
462 return self
463 return self
463
464
464 def filtered(self, name):
465 def filtered(self, name):
465 """Return a filtered version of a repository"""
466 """Return a filtered version of a repository"""
466 # build a new class with the mixin and the current class
467 # build a new class with the mixin and the current class
467 # (possibly subclass of the repo)
468 # (possibly subclass of the repo)
468 class proxycls(repoview.repoview, self.unfiltered().__class__):
469 class proxycls(repoview.repoview, self.unfiltered().__class__):
469 pass
470 pass
470 return proxycls(self, name)
471 return proxycls(self, name)
471
472
472 @repofilecache('bookmarks', 'bookmarks.current')
473 @repofilecache('bookmarks', 'bookmarks.current')
473 def _bookmarks(self):
474 def _bookmarks(self):
474 return bookmarks.bmstore(self)
475 return bookmarks.bmstore(self)
475
476
476 @property
477 @property
477 def _activebookmark(self):
478 def _activebookmark(self):
478 return self._bookmarks.active
479 return self._bookmarks.active
479
480
480 def bookmarkheads(self, bookmark):
481 def bookmarkheads(self, bookmark):
481 name = bookmark.split('@', 1)[0]
482 name = bookmark.split('@', 1)[0]
482 heads = []
483 heads = []
483 for mark, n in self._bookmarks.iteritems():
484 for mark, n in self._bookmarks.iteritems():
484 if mark.split('@', 1)[0] == name:
485 if mark.split('@', 1)[0] == name:
485 heads.append(n)
486 heads.append(n)
486 return heads
487 return heads
487
488
488 # _phaserevs and _phasesets depend on changelog. what we need is to
489 # _phaserevs and _phasesets depend on changelog. what we need is to
489 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
490 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
490 # can't be easily expressed in filecache mechanism.
491 # can't be easily expressed in filecache mechanism.
491 @storecache('phaseroots', '00changelog.i')
492 @storecache('phaseroots', '00changelog.i')
492 def _phasecache(self):
493 def _phasecache(self):
493 return phases.phasecache(self, self._phasedefaults)
494 return phases.phasecache(self, self._phasedefaults)
494
495
495 @storecache('obsstore')
496 @storecache('obsstore')
496 def obsstore(self):
497 def obsstore(self):
497 # read default format for new obsstore.
498 # read default format for new obsstore.
498 # developer config: format.obsstore-version
499 # developer config: format.obsstore-version
499 defaultformat = self.ui.configint('format', 'obsstore-version', None)
500 defaultformat = self.ui.configint('format', 'obsstore-version', None)
500 # rely on obsstore class default when possible.
501 # rely on obsstore class default when possible.
501 kwargs = {}
502 kwargs = {}
502 if defaultformat is not None:
503 if defaultformat is not None:
503 kwargs['defaultformat'] = defaultformat
504 kwargs['defaultformat'] = defaultformat
504 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
505 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
505 store = obsolete.obsstore(self.svfs, readonly=readonly,
506 store = obsolete.obsstore(self.svfs, readonly=readonly,
506 **kwargs)
507 **kwargs)
507 if store and readonly:
508 if store and readonly:
508 self.ui.warn(
509 self.ui.warn(
509 _('obsolete feature not enabled but %i markers found!\n')
510 _('obsolete feature not enabled but %i markers found!\n')
510 % len(list(store)))
511 % len(list(store)))
511 return store
512 return store
512
513
513 @storecache('00changelog.i')
514 @storecache('00changelog.i')
514 def changelog(self):
515 def changelog(self):
515 c = changelog.changelog(self.svfs)
516 c = changelog.changelog(self.svfs)
516 if 'HG_PENDING' in encoding.environ:
517 if txnutil.mayhavepending(self.root):
517 p = encoding.environ['HG_PENDING']
518 c.readpending('00changelog.i.a')
518 if p.startswith(self.root):
519 c.readpending('00changelog.i.a')
520 return c
519 return c
521
520
522 def _constructmanifest(self):
521 def _constructmanifest(self):
523 # This is a temporary function while we migrate from manifest to
522 # This is a temporary function while we migrate from manifest to
524 # manifestlog. It allows bundlerepo and unionrepo to intercept the
523 # manifestlog. It allows bundlerepo and unionrepo to intercept the
525 # manifest creation.
524 # manifest creation.
526 return manifest.manifestrevlog(self.svfs)
525 return manifest.manifestrevlog(self.svfs)
527
526
528 @storecache('00manifest.i')
527 @storecache('00manifest.i')
529 def manifestlog(self):
528 def manifestlog(self):
530 return manifest.manifestlog(self.svfs, self)
529 return manifest.manifestlog(self.svfs, self)
531
530
532 @repofilecache('dirstate')
531 @repofilecache('dirstate')
533 def dirstate(self):
532 def dirstate(self):
534 return dirstate.dirstate(self.vfs, self.ui, self.root,
533 return dirstate.dirstate(self.vfs, self.ui, self.root,
535 self._dirstatevalidate)
534 self._dirstatevalidate)
536
535
537 def _dirstatevalidate(self, node):
536 def _dirstatevalidate(self, node):
538 try:
537 try:
539 self.changelog.rev(node)
538 self.changelog.rev(node)
540 return node
539 return node
541 except error.LookupError:
540 except error.LookupError:
542 if not self._dirstatevalidatewarned:
541 if not self._dirstatevalidatewarned:
543 self._dirstatevalidatewarned = True
542 self._dirstatevalidatewarned = True
544 self.ui.warn(_("warning: ignoring unknown"
543 self.ui.warn(_("warning: ignoring unknown"
545 " working parent %s!\n") % short(node))
544 " working parent %s!\n") % short(node))
546 return nullid
545 return nullid
547
546
548 def __getitem__(self, changeid):
547 def __getitem__(self, changeid):
549 if changeid is None or changeid == wdirrev:
548 if changeid is None or changeid == wdirrev:
550 return context.workingctx(self)
549 return context.workingctx(self)
551 if isinstance(changeid, slice):
550 if isinstance(changeid, slice):
552 return [context.changectx(self, i)
551 return [context.changectx(self, i)
553 for i in xrange(*changeid.indices(len(self)))
552 for i in xrange(*changeid.indices(len(self)))
554 if i not in self.changelog.filteredrevs]
553 if i not in self.changelog.filteredrevs]
555 return context.changectx(self, changeid)
554 return context.changectx(self, changeid)
556
555
557 def __contains__(self, changeid):
556 def __contains__(self, changeid):
558 try:
557 try:
559 self[changeid]
558 self[changeid]
560 return True
559 return True
561 except error.RepoLookupError:
560 except error.RepoLookupError:
562 return False
561 return False
563
562
564 def __nonzero__(self):
563 def __nonzero__(self):
565 return True
564 return True
566
565
567 def __len__(self):
566 def __len__(self):
568 return len(self.changelog)
567 return len(self.changelog)
569
568
570 def __iter__(self):
569 def __iter__(self):
571 return iter(self.changelog)
570 return iter(self.changelog)
572
571
573 def revs(self, expr, *args):
572 def revs(self, expr, *args):
574 '''Find revisions matching a revset.
573 '''Find revisions matching a revset.
575
574
576 The revset is specified as a string ``expr`` that may contain
575 The revset is specified as a string ``expr`` that may contain
577 %-formatting to escape certain types. See ``revsetlang.formatspec``.
576 %-formatting to escape certain types. See ``revsetlang.formatspec``.
578
577
579 Revset aliases from the configuration are not expanded. To expand
578 Revset aliases from the configuration are not expanded. To expand
580 user aliases, consider calling ``scmutil.revrange()`` or
579 user aliases, consider calling ``scmutil.revrange()`` or
581 ``repo.anyrevs([expr], user=True)``.
580 ``repo.anyrevs([expr], user=True)``.
582
581
583 Returns a revset.abstractsmartset, which is a list-like interface
582 Returns a revset.abstractsmartset, which is a list-like interface
584 that contains integer revisions.
583 that contains integer revisions.
585 '''
584 '''
586 expr = revsetlang.formatspec(expr, *args)
585 expr = revsetlang.formatspec(expr, *args)
587 m = revset.match(None, expr)
586 m = revset.match(None, expr)
588 return m(self)
587 return m(self)
589
588
590 def set(self, expr, *args):
589 def set(self, expr, *args):
591 '''Find revisions matching a revset and emit changectx instances.
590 '''Find revisions matching a revset and emit changectx instances.
592
591
593 This is a convenience wrapper around ``revs()`` that iterates the
592 This is a convenience wrapper around ``revs()`` that iterates the
594 result and is a generator of changectx instances.
593 result and is a generator of changectx instances.
595
594
596 Revset aliases from the configuration are not expanded. To expand
595 Revset aliases from the configuration are not expanded. To expand
597 user aliases, consider calling ``scmutil.revrange()``.
596 user aliases, consider calling ``scmutil.revrange()``.
598 '''
597 '''
599 for r in self.revs(expr, *args):
598 for r in self.revs(expr, *args):
600 yield self[r]
599 yield self[r]
601
600
602 def anyrevs(self, specs, user=False):
601 def anyrevs(self, specs, user=False):
603 '''Find revisions matching one of the given revsets.
602 '''Find revisions matching one of the given revsets.
604
603
605 Revset aliases from the configuration are not expanded by default. To
604 Revset aliases from the configuration are not expanded by default. To
606 expand user aliases, specify ``user=True``.
605 expand user aliases, specify ``user=True``.
607 '''
606 '''
608 if user:
607 if user:
609 m = revset.matchany(self.ui, specs, repo=self)
608 m = revset.matchany(self.ui, specs, repo=self)
610 else:
609 else:
611 m = revset.matchany(None, specs)
610 m = revset.matchany(None, specs)
612 return m(self)
611 return m(self)
613
612
614 def url(self):
613 def url(self):
615 return 'file:' + self.root
614 return 'file:' + self.root
616
615
617 def hook(self, name, throw=False, **args):
616 def hook(self, name, throw=False, **args):
618 """Call a hook, passing this repo instance.
617 """Call a hook, passing this repo instance.
619
618
620 This a convenience method to aid invoking hooks. Extensions likely
619 This a convenience method to aid invoking hooks. Extensions likely
621 won't call this unless they have registered a custom hook or are
620 won't call this unless they have registered a custom hook or are
622 replacing code that is expected to call a hook.
621 replacing code that is expected to call a hook.
623 """
622 """
624 return hook.hook(self.ui, self, name, throw, **args)
623 return hook.hook(self.ui, self, name, throw, **args)
625
624
626 @unfilteredmethod
625 @unfilteredmethod
627 def _tag(self, names, node, message, local, user, date, extra=None,
626 def _tag(self, names, node, message, local, user, date, extra=None,
628 editor=False):
627 editor=False):
629 if isinstance(names, str):
628 if isinstance(names, str):
630 names = (names,)
629 names = (names,)
631
630
632 branches = self.branchmap()
631 branches = self.branchmap()
633 for name in names:
632 for name in names:
634 self.hook('pretag', throw=True, node=hex(node), tag=name,
633 self.hook('pretag', throw=True, node=hex(node), tag=name,
635 local=local)
634 local=local)
636 if name in branches:
635 if name in branches:
637 self.ui.warn(_("warning: tag %s conflicts with existing"
636 self.ui.warn(_("warning: tag %s conflicts with existing"
638 " branch name\n") % name)
637 " branch name\n") % name)
639
638
640 def writetags(fp, names, munge, prevtags):
639 def writetags(fp, names, munge, prevtags):
641 fp.seek(0, 2)
640 fp.seek(0, 2)
642 if prevtags and prevtags[-1] != '\n':
641 if prevtags and prevtags[-1] != '\n':
643 fp.write('\n')
642 fp.write('\n')
644 for name in names:
643 for name in names:
645 if munge:
644 if munge:
646 m = munge(name)
645 m = munge(name)
647 else:
646 else:
648 m = name
647 m = name
649
648
650 if (self._tagscache.tagtypes and
649 if (self._tagscache.tagtypes and
651 name in self._tagscache.tagtypes):
650 name in self._tagscache.tagtypes):
652 old = self.tags().get(name, nullid)
651 old = self.tags().get(name, nullid)
653 fp.write('%s %s\n' % (hex(old), m))
652 fp.write('%s %s\n' % (hex(old), m))
654 fp.write('%s %s\n' % (hex(node), m))
653 fp.write('%s %s\n' % (hex(node), m))
655 fp.close()
654 fp.close()
656
655
657 prevtags = ''
656 prevtags = ''
658 if local:
657 if local:
659 try:
658 try:
660 fp = self.vfs('localtags', 'r+')
659 fp = self.vfs('localtags', 'r+')
661 except IOError:
660 except IOError:
662 fp = self.vfs('localtags', 'a')
661 fp = self.vfs('localtags', 'a')
663 else:
662 else:
664 prevtags = fp.read()
663 prevtags = fp.read()
665
664
666 # local tags are stored in the current charset
665 # local tags are stored in the current charset
667 writetags(fp, names, None, prevtags)
666 writetags(fp, names, None, prevtags)
668 for name in names:
667 for name in names:
669 self.hook('tag', node=hex(node), tag=name, local=local)
668 self.hook('tag', node=hex(node), tag=name, local=local)
670 return
669 return
671
670
672 try:
671 try:
673 fp = self.wfile('.hgtags', 'rb+')
672 fp = self.wfile('.hgtags', 'rb+')
674 except IOError as e:
673 except IOError as e:
675 if e.errno != errno.ENOENT:
674 if e.errno != errno.ENOENT:
676 raise
675 raise
677 fp = self.wfile('.hgtags', 'ab')
676 fp = self.wfile('.hgtags', 'ab')
678 else:
677 else:
679 prevtags = fp.read()
678 prevtags = fp.read()
680
679
681 # committed tags are stored in UTF-8
680 # committed tags are stored in UTF-8
682 writetags(fp, names, encoding.fromlocal, prevtags)
681 writetags(fp, names, encoding.fromlocal, prevtags)
683
682
684 fp.close()
683 fp.close()
685
684
686 self.invalidatecaches()
685 self.invalidatecaches()
687
686
688 if '.hgtags' not in self.dirstate:
687 if '.hgtags' not in self.dirstate:
689 self[None].add(['.hgtags'])
688 self[None].add(['.hgtags'])
690
689
691 m = matchmod.exact(self.root, '', ['.hgtags'])
690 m = matchmod.exact(self.root, '', ['.hgtags'])
692 tagnode = self.commit(message, user, date, extra=extra, match=m,
691 tagnode = self.commit(message, user, date, extra=extra, match=m,
693 editor=editor)
692 editor=editor)
694
693
695 for name in names:
694 for name in names:
696 self.hook('tag', node=hex(node), tag=name, local=local)
695 self.hook('tag', node=hex(node), tag=name, local=local)
697
696
698 return tagnode
697 return tagnode
699
698
700 def tag(self, names, node, message, local, user, date, editor=False):
699 def tag(self, names, node, message, local, user, date, editor=False):
701 '''tag a revision with one or more symbolic names.
700 '''tag a revision with one or more symbolic names.
702
701
703 names is a list of strings or, when adding a single tag, names may be a
702 names is a list of strings or, when adding a single tag, names may be a
704 string.
703 string.
705
704
706 if local is True, the tags are stored in a per-repository file.
705 if local is True, the tags are stored in a per-repository file.
707 otherwise, they are stored in the .hgtags file, and a new
706 otherwise, they are stored in the .hgtags file, and a new
708 changeset is committed with the change.
707 changeset is committed with the change.
709
708
710 keyword arguments:
709 keyword arguments:
711
710
712 local: whether to store tags in non-version-controlled file
711 local: whether to store tags in non-version-controlled file
713 (default False)
712 (default False)
714
713
715 message: commit message to use if committing
714 message: commit message to use if committing
716
715
717 user: name of user to use if committing
716 user: name of user to use if committing
718
717
719 date: date tuple to use if committing'''
718 date: date tuple to use if committing'''
720
719
721 if not local:
720 if not local:
722 m = matchmod.exact(self.root, '', ['.hgtags'])
721 m = matchmod.exact(self.root, '', ['.hgtags'])
723 if any(self.status(match=m, unknown=True, ignored=True)):
722 if any(self.status(match=m, unknown=True, ignored=True)):
724 raise error.Abort(_('working copy of .hgtags is changed'),
723 raise error.Abort(_('working copy of .hgtags is changed'),
725 hint=_('please commit .hgtags manually'))
724 hint=_('please commit .hgtags manually'))
726
725
727 self.tags() # instantiate the cache
726 self.tags() # instantiate the cache
728 self._tag(names, node, message, local, user, date, editor=editor)
727 self._tag(names, node, message, local, user, date, editor=editor)
729
728
730 @filteredpropertycache
729 @filteredpropertycache
731 def _tagscache(self):
730 def _tagscache(self):
732 '''Returns a tagscache object that contains various tags related
731 '''Returns a tagscache object that contains various tags related
733 caches.'''
732 caches.'''
734
733
735 # This simplifies its cache management by having one decorated
734 # This simplifies its cache management by having one decorated
736 # function (this one) and the rest simply fetch things from it.
735 # function (this one) and the rest simply fetch things from it.
737 class tagscache(object):
736 class tagscache(object):
738 def __init__(self):
737 def __init__(self):
739 # These two define the set of tags for this repository. tags
738 # These two define the set of tags for this repository. tags
740 # maps tag name to node; tagtypes maps tag name to 'global' or
739 # maps tag name to node; tagtypes maps tag name to 'global' or
741 # 'local'. (Global tags are defined by .hgtags across all
740 # 'local'. (Global tags are defined by .hgtags across all
742 # heads, and local tags are defined in .hg/localtags.)
741 # heads, and local tags are defined in .hg/localtags.)
743 # They constitute the in-memory cache of tags.
742 # They constitute the in-memory cache of tags.
744 self.tags = self.tagtypes = None
743 self.tags = self.tagtypes = None
745
744
746 self.nodetagscache = self.tagslist = None
745 self.nodetagscache = self.tagslist = None
747
746
748 cache = tagscache()
747 cache = tagscache()
749 cache.tags, cache.tagtypes = self._findtags()
748 cache.tags, cache.tagtypes = self._findtags()
750
749
751 return cache
750 return cache
752
751
753 def tags(self):
752 def tags(self):
754 '''return a mapping of tag to node'''
753 '''return a mapping of tag to node'''
755 t = {}
754 t = {}
756 if self.changelog.filteredrevs:
755 if self.changelog.filteredrevs:
757 tags, tt = self._findtags()
756 tags, tt = self._findtags()
758 else:
757 else:
759 tags = self._tagscache.tags
758 tags = self._tagscache.tags
760 for k, v in tags.iteritems():
759 for k, v in tags.iteritems():
761 try:
760 try:
762 # ignore tags to unknown nodes
761 # ignore tags to unknown nodes
763 self.changelog.rev(v)
762 self.changelog.rev(v)
764 t[k] = v
763 t[k] = v
765 except (error.LookupError, ValueError):
764 except (error.LookupError, ValueError):
766 pass
765 pass
767 return t
766 return t
768
767
769 def _findtags(self):
768 def _findtags(self):
770 '''Do the hard work of finding tags. Return a pair of dicts
769 '''Do the hard work of finding tags. Return a pair of dicts
771 (tags, tagtypes) where tags maps tag name to node, and tagtypes
770 (tags, tagtypes) where tags maps tag name to node, and tagtypes
772 maps tag name to a string like \'global\' or \'local\'.
771 maps tag name to a string like \'global\' or \'local\'.
773 Subclasses or extensions are free to add their own tags, but
772 Subclasses or extensions are free to add their own tags, but
774 should be aware that the returned dicts will be retained for the
773 should be aware that the returned dicts will be retained for the
775 duration of the localrepo object.'''
774 duration of the localrepo object.'''
776
775
777 # XXX what tagtype should subclasses/extensions use? Currently
776 # XXX what tagtype should subclasses/extensions use? Currently
778 # mq and bookmarks add tags, but do not set the tagtype at all.
777 # mq and bookmarks add tags, but do not set the tagtype at all.
779 # Should each extension invent its own tag type? Should there
778 # Should each extension invent its own tag type? Should there
780 # be one tagtype for all such "virtual" tags? Or is the status
779 # be one tagtype for all such "virtual" tags? Or is the status
781 # quo fine?
780 # quo fine?
782
781
783 alltags = {} # map tag name to (node, hist)
782 alltags = {} # map tag name to (node, hist)
784 tagtypes = {}
783 tagtypes = {}
785
784
786 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
785 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
787 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
786 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
788
787
789 # Build the return dicts. Have to re-encode tag names because
788 # Build the return dicts. Have to re-encode tag names because
790 # the tags module always uses UTF-8 (in order not to lose info
789 # the tags module always uses UTF-8 (in order not to lose info
791 # writing to the cache), but the rest of Mercurial wants them in
790 # writing to the cache), but the rest of Mercurial wants them in
792 # local encoding.
791 # local encoding.
793 tags = {}
792 tags = {}
794 for (name, (node, hist)) in alltags.iteritems():
793 for (name, (node, hist)) in alltags.iteritems():
795 if node != nullid:
794 if node != nullid:
796 tags[encoding.tolocal(name)] = node
795 tags[encoding.tolocal(name)] = node
797 tags['tip'] = self.changelog.tip()
796 tags['tip'] = self.changelog.tip()
798 tagtypes = dict([(encoding.tolocal(name), value)
797 tagtypes = dict([(encoding.tolocal(name), value)
799 for (name, value) in tagtypes.iteritems()])
798 for (name, value) in tagtypes.iteritems()])
800 return (tags, tagtypes)
799 return (tags, tagtypes)
801
800
802 def tagtype(self, tagname):
801 def tagtype(self, tagname):
803 '''
802 '''
804 return the type of the given tag. result can be:
803 return the type of the given tag. result can be:
805
804
806 'local' : a local tag
805 'local' : a local tag
807 'global' : a global tag
806 'global' : a global tag
808 None : tag does not exist
807 None : tag does not exist
809 '''
808 '''
810
809
811 return self._tagscache.tagtypes.get(tagname)
810 return self._tagscache.tagtypes.get(tagname)
812
811
813 def tagslist(self):
812 def tagslist(self):
814 '''return a list of tags ordered by revision'''
813 '''return a list of tags ordered by revision'''
815 if not self._tagscache.tagslist:
814 if not self._tagscache.tagslist:
816 l = []
815 l = []
817 for t, n in self.tags().iteritems():
816 for t, n in self.tags().iteritems():
818 l.append((self.changelog.rev(n), t, n))
817 l.append((self.changelog.rev(n), t, n))
819 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
818 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
820
819
821 return self._tagscache.tagslist
820 return self._tagscache.tagslist
822
821
823 def nodetags(self, node):
822 def nodetags(self, node):
824 '''return the tags associated with a node'''
823 '''return the tags associated with a node'''
825 if not self._tagscache.nodetagscache:
824 if not self._tagscache.nodetagscache:
826 nodetagscache = {}
825 nodetagscache = {}
827 for t, n in self._tagscache.tags.iteritems():
826 for t, n in self._tagscache.tags.iteritems():
828 nodetagscache.setdefault(n, []).append(t)
827 nodetagscache.setdefault(n, []).append(t)
829 for tags in nodetagscache.itervalues():
828 for tags in nodetagscache.itervalues():
830 tags.sort()
829 tags.sort()
831 self._tagscache.nodetagscache = nodetagscache
830 self._tagscache.nodetagscache = nodetagscache
832 return self._tagscache.nodetagscache.get(node, [])
831 return self._tagscache.nodetagscache.get(node, [])
833
832
834 def nodebookmarks(self, node):
833 def nodebookmarks(self, node):
835 """return the list of bookmarks pointing to the specified node"""
834 """return the list of bookmarks pointing to the specified node"""
836 marks = []
835 marks = []
837 for bookmark, n in self._bookmarks.iteritems():
836 for bookmark, n in self._bookmarks.iteritems():
838 if n == node:
837 if n == node:
839 marks.append(bookmark)
838 marks.append(bookmark)
840 return sorted(marks)
839 return sorted(marks)
841
840
842 def branchmap(self):
841 def branchmap(self):
843 '''returns a dictionary {branch: [branchheads]} with branchheads
842 '''returns a dictionary {branch: [branchheads]} with branchheads
844 ordered by increasing revision number'''
843 ordered by increasing revision number'''
845 branchmap.updatecache(self)
844 branchmap.updatecache(self)
846 return self._branchcaches[self.filtername]
845 return self._branchcaches[self.filtername]
847
846
848 @unfilteredmethod
847 @unfilteredmethod
849 def revbranchcache(self):
848 def revbranchcache(self):
850 if not self._revbranchcache:
849 if not self._revbranchcache:
851 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
850 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
852 return self._revbranchcache
851 return self._revbranchcache
853
852
854 def branchtip(self, branch, ignoremissing=False):
853 def branchtip(self, branch, ignoremissing=False):
855 '''return the tip node for a given branch
854 '''return the tip node for a given branch
856
855
857 If ignoremissing is True, then this method will not raise an error.
856 If ignoremissing is True, then this method will not raise an error.
858 This is helpful for callers that only expect None for a missing branch
857 This is helpful for callers that only expect None for a missing branch
859 (e.g. namespace).
858 (e.g. namespace).
860
859
861 '''
860 '''
862 try:
861 try:
863 return self.branchmap().branchtip(branch)
862 return self.branchmap().branchtip(branch)
864 except KeyError:
863 except KeyError:
865 if not ignoremissing:
864 if not ignoremissing:
866 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
865 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
867 else:
866 else:
868 pass
867 pass
869
868
870 def lookup(self, key):
869 def lookup(self, key):
871 return self[key].node()
870 return self[key].node()
872
871
873 def lookupbranch(self, key, remote=None):
872 def lookupbranch(self, key, remote=None):
874 repo = remote or self
873 repo = remote or self
875 if key in repo.branchmap():
874 if key in repo.branchmap():
876 return key
875 return key
877
876
878 repo = (remote and remote.local()) and remote or self
877 repo = (remote and remote.local()) and remote or self
879 return repo[key].branch()
878 return repo[key].branch()
880
879
881 def known(self, nodes):
880 def known(self, nodes):
882 cl = self.changelog
881 cl = self.changelog
883 nm = cl.nodemap
882 nm = cl.nodemap
884 filtered = cl.filteredrevs
883 filtered = cl.filteredrevs
885 result = []
884 result = []
886 for n in nodes:
885 for n in nodes:
887 r = nm.get(n)
886 r = nm.get(n)
888 resp = not (r is None or r in filtered)
887 resp = not (r is None or r in filtered)
889 result.append(resp)
888 result.append(resp)
890 return result
889 return result
891
890
892 def local(self):
891 def local(self):
893 return self
892 return self
894
893
895 def publishing(self):
894 def publishing(self):
896 # it's safe (and desirable) to trust the publish flag unconditionally
895 # it's safe (and desirable) to trust the publish flag unconditionally
897 # so that we don't finalize changes shared between users via ssh or nfs
896 # so that we don't finalize changes shared between users via ssh or nfs
898 return self.ui.configbool('phases', 'publish', True, untrusted=True)
897 return self.ui.configbool('phases', 'publish', True, untrusted=True)
899
898
900 def cancopy(self):
899 def cancopy(self):
901 # so statichttprepo's override of local() works
900 # so statichttprepo's override of local() works
902 if not self.local():
901 if not self.local():
903 return False
902 return False
904 if not self.publishing():
903 if not self.publishing():
905 return True
904 return True
906 # if publishing we can't copy if there is filtered content
905 # if publishing we can't copy if there is filtered content
907 return not self.filtered('visible').changelog.filteredrevs
906 return not self.filtered('visible').changelog.filteredrevs
908
907
909 def shared(self):
908 def shared(self):
910 '''the type of shared repository (None if not shared)'''
909 '''the type of shared repository (None if not shared)'''
911 if self.sharedpath != self.path:
910 if self.sharedpath != self.path:
912 return 'store'
911 return 'store'
913 return None
912 return None
914
913
915 def join(self, f, *insidef):
914 def join(self, f, *insidef):
916 return self.vfs.join(os.path.join(f, *insidef))
915 return self.vfs.join(os.path.join(f, *insidef))
917
916
918 def wjoin(self, f, *insidef):
917 def wjoin(self, f, *insidef):
919 return self.vfs.reljoin(self.root, f, *insidef)
918 return self.vfs.reljoin(self.root, f, *insidef)
920
919
921 def file(self, f):
920 def file(self, f):
922 if f[0] == '/':
921 if f[0] == '/':
923 f = f[1:]
922 f = f[1:]
924 return filelog.filelog(self.svfs, f)
923 return filelog.filelog(self.svfs, f)
925
924
926 def changectx(self, changeid):
925 def changectx(self, changeid):
927 return self[changeid]
926 return self[changeid]
928
927
929 def setparents(self, p1, p2=nullid):
928 def setparents(self, p1, p2=nullid):
930 self.dirstate.beginparentchange()
929 self.dirstate.beginparentchange()
931 copies = self.dirstate.setparents(p1, p2)
930 copies = self.dirstate.setparents(p1, p2)
932 pctx = self[p1]
931 pctx = self[p1]
933 if copies:
932 if copies:
934 # Adjust copy records, the dirstate cannot do it, it
933 # Adjust copy records, the dirstate cannot do it, it
935 # requires access to parents manifests. Preserve them
934 # requires access to parents manifests. Preserve them
936 # only for entries added to first parent.
935 # only for entries added to first parent.
937 for f in copies:
936 for f in copies:
938 if f not in pctx and copies[f] in pctx:
937 if f not in pctx and copies[f] in pctx:
939 self.dirstate.copy(copies[f], f)
938 self.dirstate.copy(copies[f], f)
940 if p2 == nullid:
939 if p2 == nullid:
941 for f, s in sorted(self.dirstate.copies().items()):
940 for f, s in sorted(self.dirstate.copies().items()):
942 if f not in pctx and s not in pctx:
941 if f not in pctx and s not in pctx:
943 self.dirstate.copy(None, f)
942 self.dirstate.copy(None, f)
944 self.dirstate.endparentchange()
943 self.dirstate.endparentchange()
945
944
946 def filectx(self, path, changeid=None, fileid=None):
945 def filectx(self, path, changeid=None, fileid=None):
947 """changeid can be a changeset revision, node, or tag.
946 """changeid can be a changeset revision, node, or tag.
948 fileid can be a file revision or node."""
947 fileid can be a file revision or node."""
949 return context.filectx(self, path, changeid, fileid)
948 return context.filectx(self, path, changeid, fileid)
950
949
951 def getcwd(self):
950 def getcwd(self):
952 return self.dirstate.getcwd()
951 return self.dirstate.getcwd()
953
952
954 def pathto(self, f, cwd=None):
953 def pathto(self, f, cwd=None):
955 return self.dirstate.pathto(f, cwd)
954 return self.dirstate.pathto(f, cwd)
956
955
957 def wfile(self, f, mode='r'):
956 def wfile(self, f, mode='r'):
958 return self.wvfs(f, mode)
957 return self.wvfs(f, mode)
959
958
960 def _link(self, f):
959 def _link(self, f):
961 return self.wvfs.islink(f)
960 return self.wvfs.islink(f)
962
961
963 def _loadfilter(self, filter):
962 def _loadfilter(self, filter):
964 if filter not in self.filterpats:
963 if filter not in self.filterpats:
965 l = []
964 l = []
966 for pat, cmd in self.ui.configitems(filter):
965 for pat, cmd in self.ui.configitems(filter):
967 if cmd == '!':
966 if cmd == '!':
968 continue
967 continue
969 mf = matchmod.match(self.root, '', [pat])
968 mf = matchmod.match(self.root, '', [pat])
970 fn = None
969 fn = None
971 params = cmd
970 params = cmd
972 for name, filterfn in self._datafilters.iteritems():
971 for name, filterfn in self._datafilters.iteritems():
973 if cmd.startswith(name):
972 if cmd.startswith(name):
974 fn = filterfn
973 fn = filterfn
975 params = cmd[len(name):].lstrip()
974 params = cmd[len(name):].lstrip()
976 break
975 break
977 if not fn:
976 if not fn:
978 fn = lambda s, c, **kwargs: util.filter(s, c)
977 fn = lambda s, c, **kwargs: util.filter(s, c)
979 # Wrap old filters not supporting keyword arguments
978 # Wrap old filters not supporting keyword arguments
980 if not inspect.getargspec(fn)[2]:
979 if not inspect.getargspec(fn)[2]:
981 oldfn = fn
980 oldfn = fn
982 fn = lambda s, c, **kwargs: oldfn(s, c)
981 fn = lambda s, c, **kwargs: oldfn(s, c)
983 l.append((mf, fn, params))
982 l.append((mf, fn, params))
984 self.filterpats[filter] = l
983 self.filterpats[filter] = l
985 return self.filterpats[filter]
984 return self.filterpats[filter]
986
985
987 def _filter(self, filterpats, filename, data):
986 def _filter(self, filterpats, filename, data):
988 for mf, fn, cmd in filterpats:
987 for mf, fn, cmd in filterpats:
989 if mf(filename):
988 if mf(filename):
990 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
989 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
991 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
990 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
992 break
991 break
993
992
994 return data
993 return data
995
994
996 @unfilteredpropertycache
995 @unfilteredpropertycache
997 def _encodefilterpats(self):
996 def _encodefilterpats(self):
998 return self._loadfilter('encode')
997 return self._loadfilter('encode')
999
998
1000 @unfilteredpropertycache
999 @unfilteredpropertycache
1001 def _decodefilterpats(self):
1000 def _decodefilterpats(self):
1002 return self._loadfilter('decode')
1001 return self._loadfilter('decode')
1003
1002
1004 def adddatafilter(self, name, filter):
1003 def adddatafilter(self, name, filter):
1005 self._datafilters[name] = filter
1004 self._datafilters[name] = filter
1006
1005
1007 def wread(self, filename):
1006 def wread(self, filename):
1008 if self._link(filename):
1007 if self._link(filename):
1009 data = self.wvfs.readlink(filename)
1008 data = self.wvfs.readlink(filename)
1010 else:
1009 else:
1011 data = self.wvfs.read(filename)
1010 data = self.wvfs.read(filename)
1012 return self._filter(self._encodefilterpats, filename, data)
1011 return self._filter(self._encodefilterpats, filename, data)
1013
1012
1014 def wwrite(self, filename, data, flags, backgroundclose=False):
1013 def wwrite(self, filename, data, flags, backgroundclose=False):
1015 """write ``data`` into ``filename`` in the working directory
1014 """write ``data`` into ``filename`` in the working directory
1016
1015
1017 This returns length of written (maybe decoded) data.
1016 This returns length of written (maybe decoded) data.
1018 """
1017 """
1019 data = self._filter(self._decodefilterpats, filename, data)
1018 data = self._filter(self._decodefilterpats, filename, data)
1020 if 'l' in flags:
1019 if 'l' in flags:
1021 self.wvfs.symlink(data, filename)
1020 self.wvfs.symlink(data, filename)
1022 else:
1021 else:
1023 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1022 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1024 if 'x' in flags:
1023 if 'x' in flags:
1025 self.wvfs.setflags(filename, False, True)
1024 self.wvfs.setflags(filename, False, True)
1026 return len(data)
1025 return len(data)
1027
1026
1028 def wwritedata(self, filename, data):
1027 def wwritedata(self, filename, data):
1029 return self._filter(self._decodefilterpats, filename, data)
1028 return self._filter(self._decodefilterpats, filename, data)
1030
1029
1031 def currenttransaction(self):
1030 def currenttransaction(self):
1032 """return the current transaction or None if non exists"""
1031 """return the current transaction or None if non exists"""
1033 if self._transref:
1032 if self._transref:
1034 tr = self._transref()
1033 tr = self._transref()
1035 else:
1034 else:
1036 tr = None
1035 tr = None
1037
1036
1038 if tr and tr.running():
1037 if tr and tr.running():
1039 return tr
1038 return tr
1040 return None
1039 return None
1041
1040
1042 def transaction(self, desc, report=None):
1041 def transaction(self, desc, report=None):
1043 if (self.ui.configbool('devel', 'all-warnings')
1042 if (self.ui.configbool('devel', 'all-warnings')
1044 or self.ui.configbool('devel', 'check-locks')):
1043 or self.ui.configbool('devel', 'check-locks')):
1045 if self._currentlock(self._lockref) is None:
1044 if self._currentlock(self._lockref) is None:
1046 raise error.ProgrammingError('transaction requires locking')
1045 raise error.ProgrammingError('transaction requires locking')
1047 tr = self.currenttransaction()
1046 tr = self.currenttransaction()
1048 if tr is not None:
1047 if tr is not None:
1049 return tr.nest()
1048 return tr.nest()
1050
1049
1051 # abort here if the journal already exists
1050 # abort here if the journal already exists
1052 if self.svfs.exists("journal"):
1051 if self.svfs.exists("journal"):
1053 raise error.RepoError(
1052 raise error.RepoError(
1054 _("abandoned transaction found"),
1053 _("abandoned transaction found"),
1055 hint=_("run 'hg recover' to clean up transaction"))
1054 hint=_("run 'hg recover' to clean up transaction"))
1056
1055
1057 idbase = "%.40f#%f" % (random.random(), time.time())
1056 idbase = "%.40f#%f" % (random.random(), time.time())
1058 txnid = 'TXN:' + hashlib.sha1(idbase).hexdigest()
1057 txnid = 'TXN:' + hashlib.sha1(idbase).hexdigest()
1059 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1058 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1060
1059
1061 self._writejournal(desc)
1060 self._writejournal(desc)
1062 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1061 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1063 if report:
1062 if report:
1064 rp = report
1063 rp = report
1065 else:
1064 else:
1066 rp = self.ui.warn
1065 rp = self.ui.warn
1067 vfsmap = {'plain': self.vfs} # root of .hg/
1066 vfsmap = {'plain': self.vfs} # root of .hg/
1068 # we must avoid cyclic reference between repo and transaction.
1067 # we must avoid cyclic reference between repo and transaction.
1069 reporef = weakref.ref(self)
1068 reporef = weakref.ref(self)
1070 def validate(tr):
1069 def validate(tr):
1071 """will run pre-closing hooks"""
1070 """will run pre-closing hooks"""
1072 reporef().hook('pretxnclose', throw=True,
1071 reporef().hook('pretxnclose', throw=True,
1073 txnname=desc, **tr.hookargs)
1072 txnname=desc, **tr.hookargs)
1074 def releasefn(tr, success):
1073 def releasefn(tr, success):
1075 repo = reporef()
1074 repo = reporef()
1076 if success:
1075 if success:
1077 # this should be explicitly invoked here, because
1076 # this should be explicitly invoked here, because
1078 # in-memory changes aren't written out at closing
1077 # in-memory changes aren't written out at closing
1079 # transaction, if tr.addfilegenerator (via
1078 # transaction, if tr.addfilegenerator (via
1080 # dirstate.write or so) isn't invoked while
1079 # dirstate.write or so) isn't invoked while
1081 # transaction running
1080 # transaction running
1082 repo.dirstate.write(None)
1081 repo.dirstate.write(None)
1083 else:
1082 else:
1084 # discard all changes (including ones already written
1083 # discard all changes (including ones already written
1085 # out) in this transaction
1084 # out) in this transaction
1086 repo.dirstate.restorebackup(None, prefix='journal.')
1085 repo.dirstate.restorebackup(None, prefix='journal.')
1087
1086
1088 repo.invalidate(clearfilecache=True)
1087 repo.invalidate(clearfilecache=True)
1089
1088
1090 tr = transaction.transaction(rp, self.svfs, vfsmap,
1089 tr = transaction.transaction(rp, self.svfs, vfsmap,
1091 "journal",
1090 "journal",
1092 "undo",
1091 "undo",
1093 aftertrans(renames),
1092 aftertrans(renames),
1094 self.store.createmode,
1093 self.store.createmode,
1095 validator=validate,
1094 validator=validate,
1096 releasefn=releasefn)
1095 releasefn=releasefn)
1097
1096
1098 tr.hookargs['txnid'] = txnid
1097 tr.hookargs['txnid'] = txnid
1099 # note: writing the fncache only during finalize mean that the file is
1098 # note: writing the fncache only during finalize mean that the file is
1100 # outdated when running hooks. As fncache is used for streaming clone,
1099 # outdated when running hooks. As fncache is used for streaming clone,
1101 # this is not expected to break anything that happen during the hooks.
1100 # this is not expected to break anything that happen during the hooks.
1102 tr.addfinalize('flush-fncache', self.store.write)
1101 tr.addfinalize('flush-fncache', self.store.write)
1103 def txnclosehook(tr2):
1102 def txnclosehook(tr2):
1104 """To be run if transaction is successful, will schedule a hook run
1103 """To be run if transaction is successful, will schedule a hook run
1105 """
1104 """
1106 # Don't reference tr2 in hook() so we don't hold a reference.
1105 # Don't reference tr2 in hook() so we don't hold a reference.
1107 # This reduces memory consumption when there are multiple
1106 # This reduces memory consumption when there are multiple
1108 # transactions per lock. This can likely go away if issue5045
1107 # transactions per lock. This can likely go away if issue5045
1109 # fixes the function accumulation.
1108 # fixes the function accumulation.
1110 hookargs = tr2.hookargs
1109 hookargs = tr2.hookargs
1111
1110
1112 def hook():
1111 def hook():
1113 reporef().hook('txnclose', throw=False, txnname=desc,
1112 reporef().hook('txnclose', throw=False, txnname=desc,
1114 **hookargs)
1113 **hookargs)
1115 reporef()._afterlock(hook)
1114 reporef()._afterlock(hook)
1116 tr.addfinalize('txnclose-hook', txnclosehook)
1115 tr.addfinalize('txnclose-hook', txnclosehook)
1117 def txnaborthook(tr2):
1116 def txnaborthook(tr2):
1118 """To be run if transaction is aborted
1117 """To be run if transaction is aborted
1119 """
1118 """
1120 reporef().hook('txnabort', throw=False, txnname=desc,
1119 reporef().hook('txnabort', throw=False, txnname=desc,
1121 **tr2.hookargs)
1120 **tr2.hookargs)
1122 tr.addabort('txnabort-hook', txnaborthook)
1121 tr.addabort('txnabort-hook', txnaborthook)
1123 # avoid eager cache invalidation. in-memory data should be identical
1122 # avoid eager cache invalidation. in-memory data should be identical
1124 # to stored data if transaction has no error.
1123 # to stored data if transaction has no error.
1125 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1124 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1126 self._transref = weakref.ref(tr)
1125 self._transref = weakref.ref(tr)
1127 return tr
1126 return tr
1128
1127
1129 def _journalfiles(self):
1128 def _journalfiles(self):
1130 return ((self.svfs, 'journal'),
1129 return ((self.svfs, 'journal'),
1131 (self.vfs, 'journal.dirstate'),
1130 (self.vfs, 'journal.dirstate'),
1132 (self.vfs, 'journal.branch'),
1131 (self.vfs, 'journal.branch'),
1133 (self.vfs, 'journal.desc'),
1132 (self.vfs, 'journal.desc'),
1134 (self.vfs, 'journal.bookmarks'),
1133 (self.vfs, 'journal.bookmarks'),
1135 (self.svfs, 'journal.phaseroots'))
1134 (self.svfs, 'journal.phaseroots'))
1136
1135
1137 def undofiles(self):
1136 def undofiles(self):
1138 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1137 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1139
1138
1140 def _writejournal(self, desc):
1139 def _writejournal(self, desc):
1141 self.dirstate.savebackup(None, prefix='journal.')
1140 self.dirstate.savebackup(None, prefix='journal.')
1142 self.vfs.write("journal.branch",
1141 self.vfs.write("journal.branch",
1143 encoding.fromlocal(self.dirstate.branch()))
1142 encoding.fromlocal(self.dirstate.branch()))
1144 self.vfs.write("journal.desc",
1143 self.vfs.write("journal.desc",
1145 "%d\n%s\n" % (len(self), desc))
1144 "%d\n%s\n" % (len(self), desc))
1146 self.vfs.write("journal.bookmarks",
1145 self.vfs.write("journal.bookmarks",
1147 self.vfs.tryread("bookmarks"))
1146 self.vfs.tryread("bookmarks"))
1148 self.svfs.write("journal.phaseroots",
1147 self.svfs.write("journal.phaseroots",
1149 self.svfs.tryread("phaseroots"))
1148 self.svfs.tryread("phaseroots"))
1150
1149
1151 def recover(self):
1150 def recover(self):
1152 with self.lock():
1151 with self.lock():
1153 if self.svfs.exists("journal"):
1152 if self.svfs.exists("journal"):
1154 self.ui.status(_("rolling back interrupted transaction\n"))
1153 self.ui.status(_("rolling back interrupted transaction\n"))
1155 vfsmap = {'': self.svfs,
1154 vfsmap = {'': self.svfs,
1156 'plain': self.vfs,}
1155 'plain': self.vfs,}
1157 transaction.rollback(self.svfs, vfsmap, "journal",
1156 transaction.rollback(self.svfs, vfsmap, "journal",
1158 self.ui.warn)
1157 self.ui.warn)
1159 self.invalidate()
1158 self.invalidate()
1160 return True
1159 return True
1161 else:
1160 else:
1162 self.ui.warn(_("no interrupted transaction available\n"))
1161 self.ui.warn(_("no interrupted transaction available\n"))
1163 return False
1162 return False
1164
1163
1165 def rollback(self, dryrun=False, force=False):
1164 def rollback(self, dryrun=False, force=False):
1166 wlock = lock = dsguard = None
1165 wlock = lock = dsguard = None
1167 try:
1166 try:
1168 wlock = self.wlock()
1167 wlock = self.wlock()
1169 lock = self.lock()
1168 lock = self.lock()
1170 if self.svfs.exists("undo"):
1169 if self.svfs.exists("undo"):
1171 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1170 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1172
1171
1173 return self._rollback(dryrun, force, dsguard)
1172 return self._rollback(dryrun, force, dsguard)
1174 else:
1173 else:
1175 self.ui.warn(_("no rollback information available\n"))
1174 self.ui.warn(_("no rollback information available\n"))
1176 return 1
1175 return 1
1177 finally:
1176 finally:
1178 release(dsguard, lock, wlock)
1177 release(dsguard, lock, wlock)
1179
1178
1180 @unfilteredmethod # Until we get smarter cache management
1179 @unfilteredmethod # Until we get smarter cache management
1181 def _rollback(self, dryrun, force, dsguard):
1180 def _rollback(self, dryrun, force, dsguard):
1182 ui = self.ui
1181 ui = self.ui
1183 try:
1182 try:
1184 args = self.vfs.read('undo.desc').splitlines()
1183 args = self.vfs.read('undo.desc').splitlines()
1185 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1184 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1186 if len(args) >= 3:
1185 if len(args) >= 3:
1187 detail = args[2]
1186 detail = args[2]
1188 oldtip = oldlen - 1
1187 oldtip = oldlen - 1
1189
1188
1190 if detail and ui.verbose:
1189 if detail and ui.verbose:
1191 msg = (_('repository tip rolled back to revision %s'
1190 msg = (_('repository tip rolled back to revision %s'
1192 ' (undo %s: %s)\n')
1191 ' (undo %s: %s)\n')
1193 % (oldtip, desc, detail))
1192 % (oldtip, desc, detail))
1194 else:
1193 else:
1195 msg = (_('repository tip rolled back to revision %s'
1194 msg = (_('repository tip rolled back to revision %s'
1196 ' (undo %s)\n')
1195 ' (undo %s)\n')
1197 % (oldtip, desc))
1196 % (oldtip, desc))
1198 except IOError:
1197 except IOError:
1199 msg = _('rolling back unknown transaction\n')
1198 msg = _('rolling back unknown transaction\n')
1200 desc = None
1199 desc = None
1201
1200
1202 if not force and self['.'] != self['tip'] and desc == 'commit':
1201 if not force and self['.'] != self['tip'] and desc == 'commit':
1203 raise error.Abort(
1202 raise error.Abort(
1204 _('rollback of last commit while not checked out '
1203 _('rollback of last commit while not checked out '
1205 'may lose data'), hint=_('use -f to force'))
1204 'may lose data'), hint=_('use -f to force'))
1206
1205
1207 ui.status(msg)
1206 ui.status(msg)
1208 if dryrun:
1207 if dryrun:
1209 return 0
1208 return 0
1210
1209
1211 parents = self.dirstate.parents()
1210 parents = self.dirstate.parents()
1212 self.destroying()
1211 self.destroying()
1213 vfsmap = {'plain': self.vfs, '': self.svfs}
1212 vfsmap = {'plain': self.vfs, '': self.svfs}
1214 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1213 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1215 if self.vfs.exists('undo.bookmarks'):
1214 if self.vfs.exists('undo.bookmarks'):
1216 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1215 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1217 if self.svfs.exists('undo.phaseroots'):
1216 if self.svfs.exists('undo.phaseroots'):
1218 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1217 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1219 self.invalidate()
1218 self.invalidate()
1220
1219
1221 parentgone = (parents[0] not in self.changelog.nodemap or
1220 parentgone = (parents[0] not in self.changelog.nodemap or
1222 parents[1] not in self.changelog.nodemap)
1221 parents[1] not in self.changelog.nodemap)
1223 if parentgone:
1222 if parentgone:
1224 # prevent dirstateguard from overwriting already restored one
1223 # prevent dirstateguard from overwriting already restored one
1225 dsguard.close()
1224 dsguard.close()
1226
1225
1227 self.dirstate.restorebackup(None, prefix='undo.')
1226 self.dirstate.restorebackup(None, prefix='undo.')
1228 try:
1227 try:
1229 branch = self.vfs.read('undo.branch')
1228 branch = self.vfs.read('undo.branch')
1230 self.dirstate.setbranch(encoding.tolocal(branch))
1229 self.dirstate.setbranch(encoding.tolocal(branch))
1231 except IOError:
1230 except IOError:
1232 ui.warn(_('named branch could not be reset: '
1231 ui.warn(_('named branch could not be reset: '
1233 'current branch is still \'%s\'\n')
1232 'current branch is still \'%s\'\n')
1234 % self.dirstate.branch())
1233 % self.dirstate.branch())
1235
1234
1236 parents = tuple([p.rev() for p in self[None].parents()])
1235 parents = tuple([p.rev() for p in self[None].parents()])
1237 if len(parents) > 1:
1236 if len(parents) > 1:
1238 ui.status(_('working directory now based on '
1237 ui.status(_('working directory now based on '
1239 'revisions %d and %d\n') % parents)
1238 'revisions %d and %d\n') % parents)
1240 else:
1239 else:
1241 ui.status(_('working directory now based on '
1240 ui.status(_('working directory now based on '
1242 'revision %d\n') % parents)
1241 'revision %d\n') % parents)
1243 mergemod.mergestate.clean(self, self['.'].node())
1242 mergemod.mergestate.clean(self, self['.'].node())
1244
1243
1245 # TODO: if we know which new heads may result from this rollback, pass
1244 # TODO: if we know which new heads may result from this rollback, pass
1246 # them to destroy(), which will prevent the branchhead cache from being
1245 # them to destroy(), which will prevent the branchhead cache from being
1247 # invalidated.
1246 # invalidated.
1248 self.destroyed()
1247 self.destroyed()
1249 return 0
1248 return 0
1250
1249
1251 def invalidatecaches(self):
1250 def invalidatecaches(self):
1252
1251
1253 if '_tagscache' in vars(self):
1252 if '_tagscache' in vars(self):
1254 # can't use delattr on proxy
1253 # can't use delattr on proxy
1255 del self.__dict__['_tagscache']
1254 del self.__dict__['_tagscache']
1256
1255
1257 self.unfiltered()._branchcaches.clear()
1256 self.unfiltered()._branchcaches.clear()
1258 self.invalidatevolatilesets()
1257 self.invalidatevolatilesets()
1259
1258
1260 def invalidatevolatilesets(self):
1259 def invalidatevolatilesets(self):
1261 self.filteredrevcache.clear()
1260 self.filteredrevcache.clear()
1262 obsolete.clearobscaches(self)
1261 obsolete.clearobscaches(self)
1263
1262
1264 def invalidatedirstate(self):
1263 def invalidatedirstate(self):
1265 '''Invalidates the dirstate, causing the next call to dirstate
1264 '''Invalidates the dirstate, causing the next call to dirstate
1266 to check if it was modified since the last time it was read,
1265 to check if it was modified since the last time it was read,
1267 rereading it if it has.
1266 rereading it if it has.
1268
1267
1269 This is different to dirstate.invalidate() that it doesn't always
1268 This is different to dirstate.invalidate() that it doesn't always
1270 rereads the dirstate. Use dirstate.invalidate() if you want to
1269 rereads the dirstate. Use dirstate.invalidate() if you want to
1271 explicitly read the dirstate again (i.e. restoring it to a previous
1270 explicitly read the dirstate again (i.e. restoring it to a previous
1272 known good state).'''
1271 known good state).'''
1273 if hasunfilteredcache(self, 'dirstate'):
1272 if hasunfilteredcache(self, 'dirstate'):
1274 for k in self.dirstate._filecache:
1273 for k in self.dirstate._filecache:
1275 try:
1274 try:
1276 delattr(self.dirstate, k)
1275 delattr(self.dirstate, k)
1277 except AttributeError:
1276 except AttributeError:
1278 pass
1277 pass
1279 delattr(self.unfiltered(), 'dirstate')
1278 delattr(self.unfiltered(), 'dirstate')
1280
1279
1281 def invalidate(self, clearfilecache=False):
1280 def invalidate(self, clearfilecache=False):
1282 '''Invalidates both store and non-store parts other than dirstate
1281 '''Invalidates both store and non-store parts other than dirstate
1283
1282
1284 If a transaction is running, invalidation of store is omitted,
1283 If a transaction is running, invalidation of store is omitted,
1285 because discarding in-memory changes might cause inconsistency
1284 because discarding in-memory changes might cause inconsistency
1286 (e.g. incomplete fncache causes unintentional failure, but
1285 (e.g. incomplete fncache causes unintentional failure, but
1287 redundant one doesn't).
1286 redundant one doesn't).
1288 '''
1287 '''
1289 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1288 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1290 for k in self._filecache.keys():
1289 for k in self._filecache.keys():
1291 # dirstate is invalidated separately in invalidatedirstate()
1290 # dirstate is invalidated separately in invalidatedirstate()
1292 if k == 'dirstate':
1291 if k == 'dirstate':
1293 continue
1292 continue
1294
1293
1295 if clearfilecache:
1294 if clearfilecache:
1296 del self._filecache[k]
1295 del self._filecache[k]
1297 try:
1296 try:
1298 delattr(unfiltered, k)
1297 delattr(unfiltered, k)
1299 except AttributeError:
1298 except AttributeError:
1300 pass
1299 pass
1301 self.invalidatecaches()
1300 self.invalidatecaches()
1302 if not self.currenttransaction():
1301 if not self.currenttransaction():
1303 # TODO: Changing contents of store outside transaction
1302 # TODO: Changing contents of store outside transaction
1304 # causes inconsistency. We should make in-memory store
1303 # causes inconsistency. We should make in-memory store
1305 # changes detectable, and abort if changed.
1304 # changes detectable, and abort if changed.
1306 self.store.invalidatecaches()
1305 self.store.invalidatecaches()
1307
1306
1308 def invalidateall(self):
1307 def invalidateall(self):
1309 '''Fully invalidates both store and non-store parts, causing the
1308 '''Fully invalidates both store and non-store parts, causing the
1310 subsequent operation to reread any outside changes.'''
1309 subsequent operation to reread any outside changes.'''
1311 # extension should hook this to invalidate its caches
1310 # extension should hook this to invalidate its caches
1312 self.invalidate()
1311 self.invalidate()
1313 self.invalidatedirstate()
1312 self.invalidatedirstate()
1314
1313
1315 @unfilteredmethod
1314 @unfilteredmethod
1316 def _refreshfilecachestats(self, tr):
1315 def _refreshfilecachestats(self, tr):
1317 """Reload stats of cached files so that they are flagged as valid"""
1316 """Reload stats of cached files so that they are flagged as valid"""
1318 for k, ce in self._filecache.items():
1317 for k, ce in self._filecache.items():
1319 if k == 'dirstate' or k not in self.__dict__:
1318 if k == 'dirstate' or k not in self.__dict__:
1320 continue
1319 continue
1321 ce.refresh()
1320 ce.refresh()
1322
1321
1323 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1322 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1324 inheritchecker=None, parentenvvar=None):
1323 inheritchecker=None, parentenvvar=None):
1325 parentlock = None
1324 parentlock = None
1326 # the contents of parentenvvar are used by the underlying lock to
1325 # the contents of parentenvvar are used by the underlying lock to
1327 # determine whether it can be inherited
1326 # determine whether it can be inherited
1328 if parentenvvar is not None:
1327 if parentenvvar is not None:
1329 parentlock = encoding.environ.get(parentenvvar)
1328 parentlock = encoding.environ.get(parentenvvar)
1330 try:
1329 try:
1331 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1330 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1332 acquirefn=acquirefn, desc=desc,
1331 acquirefn=acquirefn, desc=desc,
1333 inheritchecker=inheritchecker,
1332 inheritchecker=inheritchecker,
1334 parentlock=parentlock)
1333 parentlock=parentlock)
1335 except error.LockHeld as inst:
1334 except error.LockHeld as inst:
1336 if not wait:
1335 if not wait:
1337 raise
1336 raise
1338 # show more details for new-style locks
1337 # show more details for new-style locks
1339 if ':' in inst.locker:
1338 if ':' in inst.locker:
1340 host, pid = inst.locker.split(":", 1)
1339 host, pid = inst.locker.split(":", 1)
1341 self.ui.warn(
1340 self.ui.warn(
1342 _("waiting for lock on %s held by process %r "
1341 _("waiting for lock on %s held by process %r "
1343 "on host %r\n") % (desc, pid, host))
1342 "on host %r\n") % (desc, pid, host))
1344 else:
1343 else:
1345 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1344 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1346 (desc, inst.locker))
1345 (desc, inst.locker))
1347 # default to 600 seconds timeout
1346 # default to 600 seconds timeout
1348 l = lockmod.lock(vfs, lockname,
1347 l = lockmod.lock(vfs, lockname,
1349 int(self.ui.config("ui", "timeout", "600")),
1348 int(self.ui.config("ui", "timeout", "600")),
1350 releasefn=releasefn, acquirefn=acquirefn,
1349 releasefn=releasefn, acquirefn=acquirefn,
1351 desc=desc)
1350 desc=desc)
1352 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1351 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1353 return l
1352 return l
1354
1353
1355 def _afterlock(self, callback):
1354 def _afterlock(self, callback):
1356 """add a callback to be run when the repository is fully unlocked
1355 """add a callback to be run when the repository is fully unlocked
1357
1356
1358 The callback will be executed when the outermost lock is released
1357 The callback will be executed when the outermost lock is released
1359 (with wlock being higher level than 'lock')."""
1358 (with wlock being higher level than 'lock')."""
1360 for ref in (self._wlockref, self._lockref):
1359 for ref in (self._wlockref, self._lockref):
1361 l = ref and ref()
1360 l = ref and ref()
1362 if l and l.held:
1361 if l and l.held:
1363 l.postrelease.append(callback)
1362 l.postrelease.append(callback)
1364 break
1363 break
1365 else: # no lock have been found.
1364 else: # no lock have been found.
1366 callback()
1365 callback()
1367
1366
1368 def lock(self, wait=True):
1367 def lock(self, wait=True):
1369 '''Lock the repository store (.hg/store) and return a weak reference
1368 '''Lock the repository store (.hg/store) and return a weak reference
1370 to the lock. Use this before modifying the store (e.g. committing or
1369 to the lock. Use this before modifying the store (e.g. committing or
1371 stripping). If you are opening a transaction, get a lock as well.)
1370 stripping). If you are opening a transaction, get a lock as well.)
1372
1371
1373 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1372 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1374 'wlock' first to avoid a dead-lock hazard.'''
1373 'wlock' first to avoid a dead-lock hazard.'''
1375 l = self._currentlock(self._lockref)
1374 l = self._currentlock(self._lockref)
1376 if l is not None:
1375 if l is not None:
1377 l.lock()
1376 l.lock()
1378 return l
1377 return l
1379
1378
1380 l = self._lock(self.svfs, "lock", wait, None,
1379 l = self._lock(self.svfs, "lock", wait, None,
1381 self.invalidate, _('repository %s') % self.origroot)
1380 self.invalidate, _('repository %s') % self.origroot)
1382 self._lockref = weakref.ref(l)
1381 self._lockref = weakref.ref(l)
1383 return l
1382 return l
1384
1383
1385 def _wlockchecktransaction(self):
1384 def _wlockchecktransaction(self):
1386 if self.currenttransaction() is not None:
1385 if self.currenttransaction() is not None:
1387 raise error.LockInheritanceContractViolation(
1386 raise error.LockInheritanceContractViolation(
1388 'wlock cannot be inherited in the middle of a transaction')
1387 'wlock cannot be inherited in the middle of a transaction')
1389
1388
1390 def wlock(self, wait=True):
1389 def wlock(self, wait=True):
1391 '''Lock the non-store parts of the repository (everything under
1390 '''Lock the non-store parts of the repository (everything under
1392 .hg except .hg/store) and return a weak reference to the lock.
1391 .hg except .hg/store) and return a weak reference to the lock.
1393
1392
1394 Use this before modifying files in .hg.
1393 Use this before modifying files in .hg.
1395
1394
1396 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1395 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1397 'wlock' first to avoid a dead-lock hazard.'''
1396 'wlock' first to avoid a dead-lock hazard.'''
1398 l = self._wlockref and self._wlockref()
1397 l = self._wlockref and self._wlockref()
1399 if l is not None and l.held:
1398 if l is not None and l.held:
1400 l.lock()
1399 l.lock()
1401 return l
1400 return l
1402
1401
1403 # We do not need to check for non-waiting lock acquisition. Such
1402 # We do not need to check for non-waiting lock acquisition. Such
1404 # acquisition would not cause dead-lock as they would just fail.
1403 # acquisition would not cause dead-lock as they would just fail.
1405 if wait and (self.ui.configbool('devel', 'all-warnings')
1404 if wait and (self.ui.configbool('devel', 'all-warnings')
1406 or self.ui.configbool('devel', 'check-locks')):
1405 or self.ui.configbool('devel', 'check-locks')):
1407 if self._currentlock(self._lockref) is not None:
1406 if self._currentlock(self._lockref) is not None:
1408 self.ui.develwarn('"wlock" acquired after "lock"')
1407 self.ui.develwarn('"wlock" acquired after "lock"')
1409
1408
1410 def unlock():
1409 def unlock():
1411 if self.dirstate.pendingparentchange():
1410 if self.dirstate.pendingparentchange():
1412 self.dirstate.invalidate()
1411 self.dirstate.invalidate()
1413 else:
1412 else:
1414 self.dirstate.write(None)
1413 self.dirstate.write(None)
1415
1414
1416 self._filecache['dirstate'].refresh()
1415 self._filecache['dirstate'].refresh()
1417
1416
1418 l = self._lock(self.vfs, "wlock", wait, unlock,
1417 l = self._lock(self.vfs, "wlock", wait, unlock,
1419 self.invalidatedirstate, _('working directory of %s') %
1418 self.invalidatedirstate, _('working directory of %s') %
1420 self.origroot,
1419 self.origroot,
1421 inheritchecker=self._wlockchecktransaction,
1420 inheritchecker=self._wlockchecktransaction,
1422 parentenvvar='HG_WLOCK_LOCKER')
1421 parentenvvar='HG_WLOCK_LOCKER')
1423 self._wlockref = weakref.ref(l)
1422 self._wlockref = weakref.ref(l)
1424 return l
1423 return l
1425
1424
1426 def _currentlock(self, lockref):
1425 def _currentlock(self, lockref):
1427 """Returns the lock if it's held, or None if it's not."""
1426 """Returns the lock if it's held, or None if it's not."""
1428 if lockref is None:
1427 if lockref is None:
1429 return None
1428 return None
1430 l = lockref()
1429 l = lockref()
1431 if l is None or not l.held:
1430 if l is None or not l.held:
1432 return None
1431 return None
1433 return l
1432 return l
1434
1433
1435 def currentwlock(self):
1434 def currentwlock(self):
1436 """Returns the wlock if it's held, or None if it's not."""
1435 """Returns the wlock if it's held, or None if it's not."""
1437 return self._currentlock(self._wlockref)
1436 return self._currentlock(self._wlockref)
1438
1437
1439 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1438 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1440 """
1439 """
1441 commit an individual file as part of a larger transaction
1440 commit an individual file as part of a larger transaction
1442 """
1441 """
1443
1442
1444 fname = fctx.path()
1443 fname = fctx.path()
1445 fparent1 = manifest1.get(fname, nullid)
1444 fparent1 = manifest1.get(fname, nullid)
1446 fparent2 = manifest2.get(fname, nullid)
1445 fparent2 = manifest2.get(fname, nullid)
1447 if isinstance(fctx, context.filectx):
1446 if isinstance(fctx, context.filectx):
1448 node = fctx.filenode()
1447 node = fctx.filenode()
1449 if node in [fparent1, fparent2]:
1448 if node in [fparent1, fparent2]:
1450 self.ui.debug('reusing %s filelog entry\n' % fname)
1449 self.ui.debug('reusing %s filelog entry\n' % fname)
1451 if manifest1.flags(fname) != fctx.flags():
1450 if manifest1.flags(fname) != fctx.flags():
1452 changelist.append(fname)
1451 changelist.append(fname)
1453 return node
1452 return node
1454
1453
1455 flog = self.file(fname)
1454 flog = self.file(fname)
1456 meta = {}
1455 meta = {}
1457 copy = fctx.renamed()
1456 copy = fctx.renamed()
1458 if copy and copy[0] != fname:
1457 if copy and copy[0] != fname:
1459 # Mark the new revision of this file as a copy of another
1458 # Mark the new revision of this file as a copy of another
1460 # file. This copy data will effectively act as a parent
1459 # file. This copy data will effectively act as a parent
1461 # of this new revision. If this is a merge, the first
1460 # of this new revision. If this is a merge, the first
1462 # parent will be the nullid (meaning "look up the copy data")
1461 # parent will be the nullid (meaning "look up the copy data")
1463 # and the second one will be the other parent. For example:
1462 # and the second one will be the other parent. For example:
1464 #
1463 #
1465 # 0 --- 1 --- 3 rev1 changes file foo
1464 # 0 --- 1 --- 3 rev1 changes file foo
1466 # \ / rev2 renames foo to bar and changes it
1465 # \ / rev2 renames foo to bar and changes it
1467 # \- 2 -/ rev3 should have bar with all changes and
1466 # \- 2 -/ rev3 should have bar with all changes and
1468 # should record that bar descends from
1467 # should record that bar descends from
1469 # bar in rev2 and foo in rev1
1468 # bar in rev2 and foo in rev1
1470 #
1469 #
1471 # this allows this merge to succeed:
1470 # this allows this merge to succeed:
1472 #
1471 #
1473 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1472 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1474 # \ / merging rev3 and rev4 should use bar@rev2
1473 # \ / merging rev3 and rev4 should use bar@rev2
1475 # \- 2 --- 4 as the merge base
1474 # \- 2 --- 4 as the merge base
1476 #
1475 #
1477
1476
1478 cfname = copy[0]
1477 cfname = copy[0]
1479 crev = manifest1.get(cfname)
1478 crev = manifest1.get(cfname)
1480 newfparent = fparent2
1479 newfparent = fparent2
1481
1480
1482 if manifest2: # branch merge
1481 if manifest2: # branch merge
1483 if fparent2 == nullid or crev is None: # copied on remote side
1482 if fparent2 == nullid or crev is None: # copied on remote side
1484 if cfname in manifest2:
1483 if cfname in manifest2:
1485 crev = manifest2[cfname]
1484 crev = manifest2[cfname]
1486 newfparent = fparent1
1485 newfparent = fparent1
1487
1486
1488 # Here, we used to search backwards through history to try to find
1487 # Here, we used to search backwards through history to try to find
1489 # where the file copy came from if the source of a copy was not in
1488 # where the file copy came from if the source of a copy was not in
1490 # the parent directory. However, this doesn't actually make sense to
1489 # the parent directory. However, this doesn't actually make sense to
1491 # do (what does a copy from something not in your working copy even
1490 # do (what does a copy from something not in your working copy even
1492 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1491 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1493 # the user that copy information was dropped, so if they didn't
1492 # the user that copy information was dropped, so if they didn't
1494 # expect this outcome it can be fixed, but this is the correct
1493 # expect this outcome it can be fixed, but this is the correct
1495 # behavior in this circumstance.
1494 # behavior in this circumstance.
1496
1495
1497 if crev:
1496 if crev:
1498 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1497 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1499 meta["copy"] = cfname
1498 meta["copy"] = cfname
1500 meta["copyrev"] = hex(crev)
1499 meta["copyrev"] = hex(crev)
1501 fparent1, fparent2 = nullid, newfparent
1500 fparent1, fparent2 = nullid, newfparent
1502 else:
1501 else:
1503 self.ui.warn(_("warning: can't find ancestor for '%s' "
1502 self.ui.warn(_("warning: can't find ancestor for '%s' "
1504 "copied from '%s'!\n") % (fname, cfname))
1503 "copied from '%s'!\n") % (fname, cfname))
1505
1504
1506 elif fparent1 == nullid:
1505 elif fparent1 == nullid:
1507 fparent1, fparent2 = fparent2, nullid
1506 fparent1, fparent2 = fparent2, nullid
1508 elif fparent2 != nullid:
1507 elif fparent2 != nullid:
1509 # is one parent an ancestor of the other?
1508 # is one parent an ancestor of the other?
1510 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1509 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1511 if fparent1 in fparentancestors:
1510 if fparent1 in fparentancestors:
1512 fparent1, fparent2 = fparent2, nullid
1511 fparent1, fparent2 = fparent2, nullid
1513 elif fparent2 in fparentancestors:
1512 elif fparent2 in fparentancestors:
1514 fparent2 = nullid
1513 fparent2 = nullid
1515
1514
1516 # is the file changed?
1515 # is the file changed?
1517 text = fctx.data()
1516 text = fctx.data()
1518 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1517 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1519 changelist.append(fname)
1518 changelist.append(fname)
1520 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1519 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1521 # are just the flags changed during merge?
1520 # are just the flags changed during merge?
1522 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1521 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1523 changelist.append(fname)
1522 changelist.append(fname)
1524
1523
1525 return fparent1
1524 return fparent1
1526
1525
1527 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1526 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1528 """check for commit arguments that aren't committable"""
1527 """check for commit arguments that aren't committable"""
1529 if match.isexact() or match.prefix():
1528 if match.isexact() or match.prefix():
1530 matched = set(status.modified + status.added + status.removed)
1529 matched = set(status.modified + status.added + status.removed)
1531
1530
1532 for f in match.files():
1531 for f in match.files():
1533 f = self.dirstate.normalize(f)
1532 f = self.dirstate.normalize(f)
1534 if f == '.' or f in matched or f in wctx.substate:
1533 if f == '.' or f in matched or f in wctx.substate:
1535 continue
1534 continue
1536 if f in status.deleted:
1535 if f in status.deleted:
1537 fail(f, _('file not found!'))
1536 fail(f, _('file not found!'))
1538 if f in vdirs: # visited directory
1537 if f in vdirs: # visited directory
1539 d = f + '/'
1538 d = f + '/'
1540 for mf in matched:
1539 for mf in matched:
1541 if mf.startswith(d):
1540 if mf.startswith(d):
1542 break
1541 break
1543 else:
1542 else:
1544 fail(f, _("no match under directory!"))
1543 fail(f, _("no match under directory!"))
1545 elif f not in self.dirstate:
1544 elif f not in self.dirstate:
1546 fail(f, _("file not tracked!"))
1545 fail(f, _("file not tracked!"))
1547
1546
1548 @unfilteredmethod
1547 @unfilteredmethod
1549 def commit(self, text="", user=None, date=None, match=None, force=False,
1548 def commit(self, text="", user=None, date=None, match=None, force=False,
1550 editor=False, extra=None):
1549 editor=False, extra=None):
1551 """Add a new revision to current repository.
1550 """Add a new revision to current repository.
1552
1551
1553 Revision information is gathered from the working directory,
1552 Revision information is gathered from the working directory,
1554 match can be used to filter the committed files. If editor is
1553 match can be used to filter the committed files. If editor is
1555 supplied, it is called to get a commit message.
1554 supplied, it is called to get a commit message.
1556 """
1555 """
1557 if extra is None:
1556 if extra is None:
1558 extra = {}
1557 extra = {}
1559
1558
1560 def fail(f, msg):
1559 def fail(f, msg):
1561 raise error.Abort('%s: %s' % (f, msg))
1560 raise error.Abort('%s: %s' % (f, msg))
1562
1561
1563 if not match:
1562 if not match:
1564 match = matchmod.always(self.root, '')
1563 match = matchmod.always(self.root, '')
1565
1564
1566 if not force:
1565 if not force:
1567 vdirs = []
1566 vdirs = []
1568 match.explicitdir = vdirs.append
1567 match.explicitdir = vdirs.append
1569 match.bad = fail
1568 match.bad = fail
1570
1569
1571 wlock = lock = tr = None
1570 wlock = lock = tr = None
1572 try:
1571 try:
1573 wlock = self.wlock()
1572 wlock = self.wlock()
1574 lock = self.lock() # for recent changelog (see issue4368)
1573 lock = self.lock() # for recent changelog (see issue4368)
1575
1574
1576 wctx = self[None]
1575 wctx = self[None]
1577 merge = len(wctx.parents()) > 1
1576 merge = len(wctx.parents()) > 1
1578
1577
1579 if not force and merge and match.ispartial():
1578 if not force and merge and match.ispartial():
1580 raise error.Abort(_('cannot partially commit a merge '
1579 raise error.Abort(_('cannot partially commit a merge '
1581 '(do not specify files or patterns)'))
1580 '(do not specify files or patterns)'))
1582
1581
1583 status = self.status(match=match, clean=force)
1582 status = self.status(match=match, clean=force)
1584 if force:
1583 if force:
1585 status.modified.extend(status.clean) # mq may commit clean files
1584 status.modified.extend(status.clean) # mq may commit clean files
1586
1585
1587 # check subrepos
1586 # check subrepos
1588 subs = []
1587 subs = []
1589 commitsubs = set()
1588 commitsubs = set()
1590 newstate = wctx.substate.copy()
1589 newstate = wctx.substate.copy()
1591 # only manage subrepos and .hgsubstate if .hgsub is present
1590 # only manage subrepos and .hgsubstate if .hgsub is present
1592 if '.hgsub' in wctx:
1591 if '.hgsub' in wctx:
1593 # we'll decide whether to track this ourselves, thanks
1592 # we'll decide whether to track this ourselves, thanks
1594 for c in status.modified, status.added, status.removed:
1593 for c in status.modified, status.added, status.removed:
1595 if '.hgsubstate' in c:
1594 if '.hgsubstate' in c:
1596 c.remove('.hgsubstate')
1595 c.remove('.hgsubstate')
1597
1596
1598 # compare current state to last committed state
1597 # compare current state to last committed state
1599 # build new substate based on last committed state
1598 # build new substate based on last committed state
1600 oldstate = wctx.p1().substate
1599 oldstate = wctx.p1().substate
1601 for s in sorted(newstate.keys()):
1600 for s in sorted(newstate.keys()):
1602 if not match(s):
1601 if not match(s):
1603 # ignore working copy, use old state if present
1602 # ignore working copy, use old state if present
1604 if s in oldstate:
1603 if s in oldstate:
1605 newstate[s] = oldstate[s]
1604 newstate[s] = oldstate[s]
1606 continue
1605 continue
1607 if not force:
1606 if not force:
1608 raise error.Abort(
1607 raise error.Abort(
1609 _("commit with new subrepo %s excluded") % s)
1608 _("commit with new subrepo %s excluded") % s)
1610 dirtyreason = wctx.sub(s).dirtyreason(True)
1609 dirtyreason = wctx.sub(s).dirtyreason(True)
1611 if dirtyreason:
1610 if dirtyreason:
1612 if not self.ui.configbool('ui', 'commitsubrepos'):
1611 if not self.ui.configbool('ui', 'commitsubrepos'):
1613 raise error.Abort(dirtyreason,
1612 raise error.Abort(dirtyreason,
1614 hint=_("use --subrepos for recursive commit"))
1613 hint=_("use --subrepos for recursive commit"))
1615 subs.append(s)
1614 subs.append(s)
1616 commitsubs.add(s)
1615 commitsubs.add(s)
1617 else:
1616 else:
1618 bs = wctx.sub(s).basestate()
1617 bs = wctx.sub(s).basestate()
1619 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1618 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1620 if oldstate.get(s, (None, None, None))[1] != bs:
1619 if oldstate.get(s, (None, None, None))[1] != bs:
1621 subs.append(s)
1620 subs.append(s)
1622
1621
1623 # check for removed subrepos
1622 # check for removed subrepos
1624 for p in wctx.parents():
1623 for p in wctx.parents():
1625 r = [s for s in p.substate if s not in newstate]
1624 r = [s for s in p.substate if s not in newstate]
1626 subs += [s for s in r if match(s)]
1625 subs += [s for s in r if match(s)]
1627 if subs:
1626 if subs:
1628 if (not match('.hgsub') and
1627 if (not match('.hgsub') and
1629 '.hgsub' in (wctx.modified() + wctx.added())):
1628 '.hgsub' in (wctx.modified() + wctx.added())):
1630 raise error.Abort(
1629 raise error.Abort(
1631 _("can't commit subrepos without .hgsub"))
1630 _("can't commit subrepos without .hgsub"))
1632 status.modified.insert(0, '.hgsubstate')
1631 status.modified.insert(0, '.hgsubstate')
1633
1632
1634 elif '.hgsub' in status.removed:
1633 elif '.hgsub' in status.removed:
1635 # clean up .hgsubstate when .hgsub is removed
1634 # clean up .hgsubstate when .hgsub is removed
1636 if ('.hgsubstate' in wctx and
1635 if ('.hgsubstate' in wctx and
1637 '.hgsubstate' not in (status.modified + status.added +
1636 '.hgsubstate' not in (status.modified + status.added +
1638 status.removed)):
1637 status.removed)):
1639 status.removed.insert(0, '.hgsubstate')
1638 status.removed.insert(0, '.hgsubstate')
1640
1639
1641 # make sure all explicit patterns are matched
1640 # make sure all explicit patterns are matched
1642 if not force:
1641 if not force:
1643 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1642 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1644
1643
1645 cctx = context.workingcommitctx(self, status,
1644 cctx = context.workingcommitctx(self, status,
1646 text, user, date, extra)
1645 text, user, date, extra)
1647
1646
1648 # internal config: ui.allowemptycommit
1647 # internal config: ui.allowemptycommit
1649 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1648 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1650 or extra.get('close') or merge or cctx.files()
1649 or extra.get('close') or merge or cctx.files()
1651 or self.ui.configbool('ui', 'allowemptycommit'))
1650 or self.ui.configbool('ui', 'allowemptycommit'))
1652 if not allowemptycommit:
1651 if not allowemptycommit:
1653 return None
1652 return None
1654
1653
1655 if merge and cctx.deleted():
1654 if merge and cctx.deleted():
1656 raise error.Abort(_("cannot commit merge with missing files"))
1655 raise error.Abort(_("cannot commit merge with missing files"))
1657
1656
1658 ms = mergemod.mergestate.read(self)
1657 ms = mergemod.mergestate.read(self)
1659 mergeutil.checkunresolved(ms)
1658 mergeutil.checkunresolved(ms)
1660
1659
1661 if editor:
1660 if editor:
1662 cctx._text = editor(self, cctx, subs)
1661 cctx._text = editor(self, cctx, subs)
1663 edited = (text != cctx._text)
1662 edited = (text != cctx._text)
1664
1663
1665 # Save commit message in case this transaction gets rolled back
1664 # Save commit message in case this transaction gets rolled back
1666 # (e.g. by a pretxncommit hook). Leave the content alone on
1665 # (e.g. by a pretxncommit hook). Leave the content alone on
1667 # the assumption that the user will use the same editor again.
1666 # the assumption that the user will use the same editor again.
1668 msgfn = self.savecommitmessage(cctx._text)
1667 msgfn = self.savecommitmessage(cctx._text)
1669
1668
1670 # commit subs and write new state
1669 # commit subs and write new state
1671 if subs:
1670 if subs:
1672 for s in sorted(commitsubs):
1671 for s in sorted(commitsubs):
1673 sub = wctx.sub(s)
1672 sub = wctx.sub(s)
1674 self.ui.status(_('committing subrepository %s\n') %
1673 self.ui.status(_('committing subrepository %s\n') %
1675 subrepo.subrelpath(sub))
1674 subrepo.subrelpath(sub))
1676 sr = sub.commit(cctx._text, user, date)
1675 sr = sub.commit(cctx._text, user, date)
1677 newstate[s] = (newstate[s][0], sr)
1676 newstate[s] = (newstate[s][0], sr)
1678 subrepo.writestate(self, newstate)
1677 subrepo.writestate(self, newstate)
1679
1678
1680 p1, p2 = self.dirstate.parents()
1679 p1, p2 = self.dirstate.parents()
1681 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1680 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1682 try:
1681 try:
1683 self.hook("precommit", throw=True, parent1=hookp1,
1682 self.hook("precommit", throw=True, parent1=hookp1,
1684 parent2=hookp2)
1683 parent2=hookp2)
1685 tr = self.transaction('commit')
1684 tr = self.transaction('commit')
1686 ret = self.commitctx(cctx, True)
1685 ret = self.commitctx(cctx, True)
1687 except: # re-raises
1686 except: # re-raises
1688 if edited:
1687 if edited:
1689 self.ui.write(
1688 self.ui.write(
1690 _('note: commit message saved in %s\n') % msgfn)
1689 _('note: commit message saved in %s\n') % msgfn)
1691 raise
1690 raise
1692 # update bookmarks, dirstate and mergestate
1691 # update bookmarks, dirstate and mergestate
1693 bookmarks.update(self, [p1, p2], ret)
1692 bookmarks.update(self, [p1, p2], ret)
1694 cctx.markcommitted(ret)
1693 cctx.markcommitted(ret)
1695 ms.reset()
1694 ms.reset()
1696 tr.close()
1695 tr.close()
1697
1696
1698 finally:
1697 finally:
1699 lockmod.release(tr, lock, wlock)
1698 lockmod.release(tr, lock, wlock)
1700
1699
1701 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1700 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1702 # hack for command that use a temporary commit (eg: histedit)
1701 # hack for command that use a temporary commit (eg: histedit)
1703 # temporary commit got stripped before hook release
1702 # temporary commit got stripped before hook release
1704 if self.changelog.hasnode(ret):
1703 if self.changelog.hasnode(ret):
1705 self.hook("commit", node=node, parent1=parent1,
1704 self.hook("commit", node=node, parent1=parent1,
1706 parent2=parent2)
1705 parent2=parent2)
1707 self._afterlock(commithook)
1706 self._afterlock(commithook)
1708 return ret
1707 return ret
1709
1708
1710 @unfilteredmethod
1709 @unfilteredmethod
1711 def commitctx(self, ctx, error=False):
1710 def commitctx(self, ctx, error=False):
1712 """Add a new revision to current repository.
1711 """Add a new revision to current repository.
1713 Revision information is passed via the context argument.
1712 Revision information is passed via the context argument.
1714 """
1713 """
1715
1714
1716 tr = None
1715 tr = None
1717 p1, p2 = ctx.p1(), ctx.p2()
1716 p1, p2 = ctx.p1(), ctx.p2()
1718 user = ctx.user()
1717 user = ctx.user()
1719
1718
1720 lock = self.lock()
1719 lock = self.lock()
1721 try:
1720 try:
1722 tr = self.transaction("commit")
1721 tr = self.transaction("commit")
1723 trp = weakref.proxy(tr)
1722 trp = weakref.proxy(tr)
1724
1723
1725 if ctx.manifestnode():
1724 if ctx.manifestnode():
1726 # reuse an existing manifest revision
1725 # reuse an existing manifest revision
1727 mn = ctx.manifestnode()
1726 mn = ctx.manifestnode()
1728 files = ctx.files()
1727 files = ctx.files()
1729 elif ctx.files():
1728 elif ctx.files():
1730 m1ctx = p1.manifestctx()
1729 m1ctx = p1.manifestctx()
1731 m2ctx = p2.manifestctx()
1730 m2ctx = p2.manifestctx()
1732 mctx = m1ctx.copy()
1731 mctx = m1ctx.copy()
1733
1732
1734 m = mctx.read()
1733 m = mctx.read()
1735 m1 = m1ctx.read()
1734 m1 = m1ctx.read()
1736 m2 = m2ctx.read()
1735 m2 = m2ctx.read()
1737
1736
1738 # check in files
1737 # check in files
1739 added = []
1738 added = []
1740 changed = []
1739 changed = []
1741 removed = list(ctx.removed())
1740 removed = list(ctx.removed())
1742 linkrev = len(self)
1741 linkrev = len(self)
1743 self.ui.note(_("committing files:\n"))
1742 self.ui.note(_("committing files:\n"))
1744 for f in sorted(ctx.modified() + ctx.added()):
1743 for f in sorted(ctx.modified() + ctx.added()):
1745 self.ui.note(f + "\n")
1744 self.ui.note(f + "\n")
1746 try:
1745 try:
1747 fctx = ctx[f]
1746 fctx = ctx[f]
1748 if fctx is None:
1747 if fctx is None:
1749 removed.append(f)
1748 removed.append(f)
1750 else:
1749 else:
1751 added.append(f)
1750 added.append(f)
1752 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1751 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1753 trp, changed)
1752 trp, changed)
1754 m.setflag(f, fctx.flags())
1753 m.setflag(f, fctx.flags())
1755 except OSError as inst:
1754 except OSError as inst:
1756 self.ui.warn(_("trouble committing %s!\n") % f)
1755 self.ui.warn(_("trouble committing %s!\n") % f)
1757 raise
1756 raise
1758 except IOError as inst:
1757 except IOError as inst:
1759 errcode = getattr(inst, 'errno', errno.ENOENT)
1758 errcode = getattr(inst, 'errno', errno.ENOENT)
1760 if error or errcode and errcode != errno.ENOENT:
1759 if error or errcode and errcode != errno.ENOENT:
1761 self.ui.warn(_("trouble committing %s!\n") % f)
1760 self.ui.warn(_("trouble committing %s!\n") % f)
1762 raise
1761 raise
1763
1762
1764 # update manifest
1763 # update manifest
1765 self.ui.note(_("committing manifest\n"))
1764 self.ui.note(_("committing manifest\n"))
1766 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1765 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1767 drop = [f for f in removed if f in m]
1766 drop = [f for f in removed if f in m]
1768 for f in drop:
1767 for f in drop:
1769 del m[f]
1768 del m[f]
1770 mn = mctx.write(trp, linkrev,
1769 mn = mctx.write(trp, linkrev,
1771 p1.manifestnode(), p2.manifestnode(),
1770 p1.manifestnode(), p2.manifestnode(),
1772 added, drop)
1771 added, drop)
1773 files = changed + removed
1772 files = changed + removed
1774 else:
1773 else:
1775 mn = p1.manifestnode()
1774 mn = p1.manifestnode()
1776 files = []
1775 files = []
1777
1776
1778 # update changelog
1777 # update changelog
1779 self.ui.note(_("committing changelog\n"))
1778 self.ui.note(_("committing changelog\n"))
1780 self.changelog.delayupdate(tr)
1779 self.changelog.delayupdate(tr)
1781 n = self.changelog.add(mn, files, ctx.description(),
1780 n = self.changelog.add(mn, files, ctx.description(),
1782 trp, p1.node(), p2.node(),
1781 trp, p1.node(), p2.node(),
1783 user, ctx.date(), ctx.extra().copy())
1782 user, ctx.date(), ctx.extra().copy())
1784 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1783 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1784 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1786 parent2=xp2)
1785 parent2=xp2)
1787 # set the new commit is proper phase
1786 # set the new commit is proper phase
1788 targetphase = subrepo.newcommitphase(self.ui, ctx)
1787 targetphase = subrepo.newcommitphase(self.ui, ctx)
1789 if targetphase:
1788 if targetphase:
1790 # retract boundary do not alter parent changeset.
1789 # retract boundary do not alter parent changeset.
1791 # if a parent have higher the resulting phase will
1790 # if a parent have higher the resulting phase will
1792 # be compliant anyway
1791 # be compliant anyway
1793 #
1792 #
1794 # if minimal phase was 0 we don't need to retract anything
1793 # if minimal phase was 0 we don't need to retract anything
1795 phases.retractboundary(self, tr, targetphase, [n])
1794 phases.retractboundary(self, tr, targetphase, [n])
1796 tr.close()
1795 tr.close()
1797 branchmap.updatecache(self.filtered('served'))
1796 branchmap.updatecache(self.filtered('served'))
1798 return n
1797 return n
1799 finally:
1798 finally:
1800 if tr:
1799 if tr:
1801 tr.release()
1800 tr.release()
1802 lock.release()
1801 lock.release()
1803
1802
1804 @unfilteredmethod
1803 @unfilteredmethod
1805 def destroying(self):
1804 def destroying(self):
1806 '''Inform the repository that nodes are about to be destroyed.
1805 '''Inform the repository that nodes are about to be destroyed.
1807 Intended for use by strip and rollback, so there's a common
1806 Intended for use by strip and rollback, so there's a common
1808 place for anything that has to be done before destroying history.
1807 place for anything that has to be done before destroying history.
1809
1808
1810 This is mostly useful for saving state that is in memory and waiting
1809 This is mostly useful for saving state that is in memory and waiting
1811 to be flushed when the current lock is released. Because a call to
1810 to be flushed when the current lock is released. Because a call to
1812 destroyed is imminent, the repo will be invalidated causing those
1811 destroyed is imminent, the repo will be invalidated causing those
1813 changes to stay in memory (waiting for the next unlock), or vanish
1812 changes to stay in memory (waiting for the next unlock), or vanish
1814 completely.
1813 completely.
1815 '''
1814 '''
1816 # When using the same lock to commit and strip, the phasecache is left
1815 # When using the same lock to commit and strip, the phasecache is left
1817 # dirty after committing. Then when we strip, the repo is invalidated,
1816 # dirty after committing. Then when we strip, the repo is invalidated,
1818 # causing those changes to disappear.
1817 # causing those changes to disappear.
1819 if '_phasecache' in vars(self):
1818 if '_phasecache' in vars(self):
1820 self._phasecache.write()
1819 self._phasecache.write()
1821
1820
1822 @unfilteredmethod
1821 @unfilteredmethod
1823 def destroyed(self):
1822 def destroyed(self):
1824 '''Inform the repository that nodes have been destroyed.
1823 '''Inform the repository that nodes have been destroyed.
1825 Intended for use by strip and rollback, so there's a common
1824 Intended for use by strip and rollback, so there's a common
1826 place for anything that has to be done after destroying history.
1825 place for anything that has to be done after destroying history.
1827 '''
1826 '''
1828 # When one tries to:
1827 # When one tries to:
1829 # 1) destroy nodes thus calling this method (e.g. strip)
1828 # 1) destroy nodes thus calling this method (e.g. strip)
1830 # 2) use phasecache somewhere (e.g. commit)
1829 # 2) use phasecache somewhere (e.g. commit)
1831 #
1830 #
1832 # then 2) will fail because the phasecache contains nodes that were
1831 # then 2) will fail because the phasecache contains nodes that were
1833 # removed. We can either remove phasecache from the filecache,
1832 # removed. We can either remove phasecache from the filecache,
1834 # causing it to reload next time it is accessed, or simply filter
1833 # causing it to reload next time it is accessed, or simply filter
1835 # the removed nodes now and write the updated cache.
1834 # the removed nodes now and write the updated cache.
1836 self._phasecache.filterunknown(self)
1835 self._phasecache.filterunknown(self)
1837 self._phasecache.write()
1836 self._phasecache.write()
1838
1837
1839 # update the 'served' branch cache to help read only server process
1838 # update the 'served' branch cache to help read only server process
1840 # Thanks to branchcache collaboration this is done from the nearest
1839 # Thanks to branchcache collaboration this is done from the nearest
1841 # filtered subset and it is expected to be fast.
1840 # filtered subset and it is expected to be fast.
1842 branchmap.updatecache(self.filtered('served'))
1841 branchmap.updatecache(self.filtered('served'))
1843
1842
1844 # Ensure the persistent tag cache is updated. Doing it now
1843 # Ensure the persistent tag cache is updated. Doing it now
1845 # means that the tag cache only has to worry about destroyed
1844 # means that the tag cache only has to worry about destroyed
1846 # heads immediately after a strip/rollback. That in turn
1845 # heads immediately after a strip/rollback. That in turn
1847 # guarantees that "cachetip == currenttip" (comparing both rev
1846 # guarantees that "cachetip == currenttip" (comparing both rev
1848 # and node) always means no nodes have been added or destroyed.
1847 # and node) always means no nodes have been added or destroyed.
1849
1848
1850 # XXX this is suboptimal when qrefresh'ing: we strip the current
1849 # XXX this is suboptimal when qrefresh'ing: we strip the current
1851 # head, refresh the tag cache, then immediately add a new head.
1850 # head, refresh the tag cache, then immediately add a new head.
1852 # But I think doing it this way is necessary for the "instant
1851 # But I think doing it this way is necessary for the "instant
1853 # tag cache retrieval" case to work.
1852 # tag cache retrieval" case to work.
1854 self.invalidate()
1853 self.invalidate()
1855
1854
1856 def walk(self, match, node=None):
1855 def walk(self, match, node=None):
1857 '''
1856 '''
1858 walk recursively through the directory tree or a given
1857 walk recursively through the directory tree or a given
1859 changeset, finding all files matched by the match
1858 changeset, finding all files matched by the match
1860 function
1859 function
1861 '''
1860 '''
1862 return self[node].walk(match)
1861 return self[node].walk(match)
1863
1862
1864 def status(self, node1='.', node2=None, match=None,
1863 def status(self, node1='.', node2=None, match=None,
1865 ignored=False, clean=False, unknown=False,
1864 ignored=False, clean=False, unknown=False,
1866 listsubrepos=False):
1865 listsubrepos=False):
1867 '''a convenience method that calls node1.status(node2)'''
1866 '''a convenience method that calls node1.status(node2)'''
1868 return self[node1].status(node2, match, ignored, clean, unknown,
1867 return self[node1].status(node2, match, ignored, clean, unknown,
1869 listsubrepos)
1868 listsubrepos)
1870
1869
1871 def heads(self, start=None):
1870 def heads(self, start=None):
1872 if start is None:
1871 if start is None:
1873 cl = self.changelog
1872 cl = self.changelog
1874 headrevs = reversed(cl.headrevs())
1873 headrevs = reversed(cl.headrevs())
1875 return [cl.node(rev) for rev in headrevs]
1874 return [cl.node(rev) for rev in headrevs]
1876
1875
1877 heads = self.changelog.heads(start)
1876 heads = self.changelog.heads(start)
1878 # sort the output in rev descending order
1877 # sort the output in rev descending order
1879 return sorted(heads, key=self.changelog.rev, reverse=True)
1878 return sorted(heads, key=self.changelog.rev, reverse=True)
1880
1879
1881 def branchheads(self, branch=None, start=None, closed=False):
1880 def branchheads(self, branch=None, start=None, closed=False):
1882 '''return a (possibly filtered) list of heads for the given branch
1881 '''return a (possibly filtered) list of heads for the given branch
1883
1882
1884 Heads are returned in topological order, from newest to oldest.
1883 Heads are returned in topological order, from newest to oldest.
1885 If branch is None, use the dirstate branch.
1884 If branch is None, use the dirstate branch.
1886 If start is not None, return only heads reachable from start.
1885 If start is not None, return only heads reachable from start.
1887 If closed is True, return heads that are marked as closed as well.
1886 If closed is True, return heads that are marked as closed as well.
1888 '''
1887 '''
1889 if branch is None:
1888 if branch is None:
1890 branch = self[None].branch()
1889 branch = self[None].branch()
1891 branches = self.branchmap()
1890 branches = self.branchmap()
1892 if branch not in branches:
1891 if branch not in branches:
1893 return []
1892 return []
1894 # the cache returns heads ordered lowest to highest
1893 # the cache returns heads ordered lowest to highest
1895 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1894 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1896 if start is not None:
1895 if start is not None:
1897 # filter out the heads that cannot be reached from startrev
1896 # filter out the heads that cannot be reached from startrev
1898 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1897 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1899 bheads = [h for h in bheads if h in fbheads]
1898 bheads = [h for h in bheads if h in fbheads]
1900 return bheads
1899 return bheads
1901
1900
1902 def branches(self, nodes):
1901 def branches(self, nodes):
1903 if not nodes:
1902 if not nodes:
1904 nodes = [self.changelog.tip()]
1903 nodes = [self.changelog.tip()]
1905 b = []
1904 b = []
1906 for n in nodes:
1905 for n in nodes:
1907 t = n
1906 t = n
1908 while True:
1907 while True:
1909 p = self.changelog.parents(n)
1908 p = self.changelog.parents(n)
1910 if p[1] != nullid or p[0] == nullid:
1909 if p[1] != nullid or p[0] == nullid:
1911 b.append((t, n, p[0], p[1]))
1910 b.append((t, n, p[0], p[1]))
1912 break
1911 break
1913 n = p[0]
1912 n = p[0]
1914 return b
1913 return b
1915
1914
1916 def between(self, pairs):
1915 def between(self, pairs):
1917 r = []
1916 r = []
1918
1917
1919 for top, bottom in pairs:
1918 for top, bottom in pairs:
1920 n, l, i = top, [], 0
1919 n, l, i = top, [], 0
1921 f = 1
1920 f = 1
1922
1921
1923 while n != bottom and n != nullid:
1922 while n != bottom and n != nullid:
1924 p = self.changelog.parents(n)[0]
1923 p = self.changelog.parents(n)[0]
1925 if i == f:
1924 if i == f:
1926 l.append(n)
1925 l.append(n)
1927 f = f * 2
1926 f = f * 2
1928 n = p
1927 n = p
1929 i += 1
1928 i += 1
1930
1929
1931 r.append(l)
1930 r.append(l)
1932
1931
1933 return r
1932 return r
1934
1933
1935 def checkpush(self, pushop):
1934 def checkpush(self, pushop):
1936 """Extensions can override this function if additional checks have
1935 """Extensions can override this function if additional checks have
1937 to be performed before pushing, or call it if they override push
1936 to be performed before pushing, or call it if they override push
1938 command.
1937 command.
1939 """
1938 """
1940 pass
1939 pass
1941
1940
1942 @unfilteredpropertycache
1941 @unfilteredpropertycache
1943 def prepushoutgoinghooks(self):
1942 def prepushoutgoinghooks(self):
1944 """Return util.hooks consists of a pushop with repo, remote, outgoing
1943 """Return util.hooks consists of a pushop with repo, remote, outgoing
1945 methods, which are called before pushing changesets.
1944 methods, which are called before pushing changesets.
1946 """
1945 """
1947 return util.hooks()
1946 return util.hooks()
1948
1947
1949 def pushkey(self, namespace, key, old, new):
1948 def pushkey(self, namespace, key, old, new):
1950 try:
1949 try:
1951 tr = self.currenttransaction()
1950 tr = self.currenttransaction()
1952 hookargs = {}
1951 hookargs = {}
1953 if tr is not None:
1952 if tr is not None:
1954 hookargs.update(tr.hookargs)
1953 hookargs.update(tr.hookargs)
1955 hookargs['namespace'] = namespace
1954 hookargs['namespace'] = namespace
1956 hookargs['key'] = key
1955 hookargs['key'] = key
1957 hookargs['old'] = old
1956 hookargs['old'] = old
1958 hookargs['new'] = new
1957 hookargs['new'] = new
1959 self.hook('prepushkey', throw=True, **hookargs)
1958 self.hook('prepushkey', throw=True, **hookargs)
1960 except error.HookAbort as exc:
1959 except error.HookAbort as exc:
1961 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1960 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1962 if exc.hint:
1961 if exc.hint:
1963 self.ui.write_err(_("(%s)\n") % exc.hint)
1962 self.ui.write_err(_("(%s)\n") % exc.hint)
1964 return False
1963 return False
1965 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1964 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1966 ret = pushkey.push(self, namespace, key, old, new)
1965 ret = pushkey.push(self, namespace, key, old, new)
1967 def runhook():
1966 def runhook():
1968 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1967 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1969 ret=ret)
1968 ret=ret)
1970 self._afterlock(runhook)
1969 self._afterlock(runhook)
1971 return ret
1970 return ret
1972
1971
1973 def listkeys(self, namespace):
1972 def listkeys(self, namespace):
1974 self.hook('prelistkeys', throw=True, namespace=namespace)
1973 self.hook('prelistkeys', throw=True, namespace=namespace)
1975 self.ui.debug('listing keys for "%s"\n' % namespace)
1974 self.ui.debug('listing keys for "%s"\n' % namespace)
1976 values = pushkey.list(self, namespace)
1975 values = pushkey.list(self, namespace)
1977 self.hook('listkeys', namespace=namespace, values=values)
1976 self.hook('listkeys', namespace=namespace, values=values)
1978 return values
1977 return values
1979
1978
1980 def debugwireargs(self, one, two, three=None, four=None, five=None):
1979 def debugwireargs(self, one, two, three=None, four=None, five=None):
1981 '''used to test argument passing over the wire'''
1980 '''used to test argument passing over the wire'''
1982 return "%s %s %s %s %s" % (one, two, three, four, five)
1981 return "%s %s %s %s %s" % (one, two, three, four, five)
1983
1982
1984 def savecommitmessage(self, text):
1983 def savecommitmessage(self, text):
1985 fp = self.vfs('last-message.txt', 'wb')
1984 fp = self.vfs('last-message.txt', 'wb')
1986 try:
1985 try:
1987 fp.write(text)
1986 fp.write(text)
1988 finally:
1987 finally:
1989 fp.close()
1988 fp.close()
1990 return self.pathto(fp.name[len(self.root) + 1:])
1989 return self.pathto(fp.name[len(self.root) + 1:])
1991
1990
1992 # used to avoid circular references so destructors work
1991 # used to avoid circular references so destructors work
1993 def aftertrans(files):
1992 def aftertrans(files):
1994 renamefiles = [tuple(t) for t in files]
1993 renamefiles = [tuple(t) for t in files]
1995 def a():
1994 def a():
1996 for vfs, src, dest in renamefiles:
1995 for vfs, src, dest in renamefiles:
1997 try:
1996 try:
1998 vfs.rename(src, dest)
1997 vfs.rename(src, dest)
1999 except OSError: # journal file does not yet exist
1998 except OSError: # journal file does not yet exist
2000 pass
1999 pass
2001 return a
2000 return a
2002
2001
2003 def undoname(fn):
2002 def undoname(fn):
2004 base, name = os.path.split(fn)
2003 base, name = os.path.split(fn)
2005 assert name.startswith('journal')
2004 assert name.startswith('journal')
2006 return os.path.join(base, name.replace('journal', 'undo', 1))
2005 return os.path.join(base, name.replace('journal', 'undo', 1))
2007
2006
2008 def instance(ui, path, create):
2007 def instance(ui, path, create):
2009 return localrepository(ui, util.urllocalpath(path), create)
2008 return localrepository(ui, util.urllocalpath(path), create)
2010
2009
2011 def islocal(path):
2010 def islocal(path):
2012 return True
2011 return True
2013
2012
2014 def newreporequirements(repo):
2013 def newreporequirements(repo):
2015 """Determine the set of requirements for a new local repository.
2014 """Determine the set of requirements for a new local repository.
2016
2015
2017 Extensions can wrap this function to specify custom requirements for
2016 Extensions can wrap this function to specify custom requirements for
2018 new repositories.
2017 new repositories.
2019 """
2018 """
2020 ui = repo.ui
2019 ui = repo.ui
2021 requirements = set(['revlogv1'])
2020 requirements = set(['revlogv1'])
2022 if ui.configbool('format', 'usestore', True):
2021 if ui.configbool('format', 'usestore', True):
2023 requirements.add('store')
2022 requirements.add('store')
2024 if ui.configbool('format', 'usefncache', True):
2023 if ui.configbool('format', 'usefncache', True):
2025 requirements.add('fncache')
2024 requirements.add('fncache')
2026 if ui.configbool('format', 'dotencode', True):
2025 if ui.configbool('format', 'dotencode', True):
2027 requirements.add('dotencode')
2026 requirements.add('dotencode')
2028
2027
2029 compengine = ui.config('experimental', 'format.compression', 'zlib')
2028 compengine = ui.config('experimental', 'format.compression', 'zlib')
2030 if compengine not in util.compengines:
2029 if compengine not in util.compengines:
2031 raise error.Abort(_('compression engine %s defined by '
2030 raise error.Abort(_('compression engine %s defined by '
2032 'experimental.format.compression not available') %
2031 'experimental.format.compression not available') %
2033 compengine,
2032 compengine,
2034 hint=_('run "hg debuginstall" to list available '
2033 hint=_('run "hg debuginstall" to list available '
2035 'compression engines'))
2034 'compression engines'))
2036
2035
2037 # zlib is the historical default and doesn't need an explicit requirement.
2036 # zlib is the historical default and doesn't need an explicit requirement.
2038 if compengine != 'zlib':
2037 if compengine != 'zlib':
2039 requirements.add('exp-compression-%s' % compengine)
2038 requirements.add('exp-compression-%s' % compengine)
2040
2039
2041 if scmutil.gdinitconfig(ui):
2040 if scmutil.gdinitconfig(ui):
2042 requirements.add('generaldelta')
2041 requirements.add('generaldelta')
2043 if ui.configbool('experimental', 'treemanifest', False):
2042 if ui.configbool('experimental', 'treemanifest', False):
2044 requirements.add('treemanifest')
2043 requirements.add('treemanifest')
2045 if ui.configbool('experimental', 'manifestv2', False):
2044 if ui.configbool('experimental', 'manifestv2', False):
2046 requirements.add('manifestv2')
2045 requirements.add('manifestv2')
2047
2046
2048 return requirements
2047 return requirements
@@ -1,891 +1,935 b''
1 commit hooks can see env vars
1 commit hooks can see env vars
2 (and post-transaction one are run unlocked)
2 (and post-transaction one are run unlocked)
3
3
4
4
5 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
5 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
6 > def showargs(ui, repo, hooktype, **kwargs):
6 > def showargs(ui, repo, hooktype, **kwargs):
7 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
7 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
8 > EOF
8 > EOF
9
9
10 $ hg init a
10 $ hg init a
11 $ cd a
11 $ cd a
12 $ cat > .hg/hgrc <<EOF
12 $ cat > .hg/hgrc <<EOF
13 > [hooks]
13 > [hooks]
14 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
14 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
15 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
15 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
16 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
16 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
17 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
17 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
18 > pretxncommit.tip = hg -q tip
18 > pretxncommit.tip = hg -q tip
19 > pre-identify = sh -c "printenv.py pre-identify 1"
19 > pre-identify = sh -c "printenv.py pre-identify 1"
20 > pre-cat = sh -c "printenv.py pre-cat"
20 > pre-cat = sh -c "printenv.py pre-cat"
21 > post-cat = sh -c "printenv.py post-cat"
21 > post-cat = sh -c "printenv.py post-cat"
22 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
22 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
23 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
23 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
24 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
24 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
25 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
25 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
26 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
26 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
27 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
27 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
28 > EOF
28 > EOF
29 $ echo a > a
29 $ echo a > a
30 $ hg add a
30 $ hg add a
31 $ hg commit -m a
31 $ hg commit -m a
32 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
32 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
33 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
33 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
34 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
34 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
35 0:cb9a9f314b8b
35 0:cb9a9f314b8b
36 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
36 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
37 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
37 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
38 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
38 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
39 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
39 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
40
40
41 $ hg clone . ../b
41 $ hg clone . ../b
42 updating to branch default
42 updating to branch default
43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
44 $ cd ../b
44 $ cd ../b
45
45
46 changegroup hooks can see env vars
46 changegroup hooks can see env vars
47
47
48 $ cat > .hg/hgrc <<EOF
48 $ cat > .hg/hgrc <<EOF
49 > [hooks]
49 > [hooks]
50 > prechangegroup = sh -c "printenv.py prechangegroup"
50 > prechangegroup = sh -c "printenv.py prechangegroup"
51 > changegroup = sh -c "printenv.py changegroup"
51 > changegroup = sh -c "printenv.py changegroup"
52 > incoming = sh -c "printenv.py incoming"
52 > incoming = sh -c "printenv.py incoming"
53 > EOF
53 > EOF
54
54
55 pretxncommit and commit hooks can see both parents of merge
55 pretxncommit and commit hooks can see both parents of merge
56
56
57 $ cd ../a
57 $ cd ../a
58 $ echo b >> a
58 $ echo b >> a
59 $ hg commit -m a1 -d "1 0"
59 $ hg commit -m a1 -d "1 0"
60 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
60 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
61 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
61 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
62 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
62 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
63 1:ab228980c14d
63 1:ab228980c14d
64 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
64 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
65 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
65 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
66 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
66 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
67 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
67 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
68 $ hg update -C 0
68 $ hg update -C 0
69 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
69 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
70 $ echo b > b
70 $ echo b > b
71 $ hg add b
71 $ hg add b
72 $ hg commit -m b -d '1 0'
72 $ hg commit -m b -d '1 0'
73 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
73 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
74 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
74 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
75 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
75 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
76 2:ee9deb46ab31
76 2:ee9deb46ab31
77 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
77 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
78 created new head
78 created new head
79 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
79 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
80 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
80 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
81 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
81 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
82 $ hg merge 1
82 $ hg merge 1
83 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
83 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 (branch merge, don't forget to commit)
84 (branch merge, don't forget to commit)
85 $ hg commit -m merge -d '2 0'
85 $ hg commit -m merge -d '2 0'
86 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
86 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
87 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
87 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
88 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
88 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
89 3:07f3376c1e65
89 3:07f3376c1e65
90 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
90 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
91 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
91 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
92 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
92 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
93 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
93 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
94
94
95 test generic hooks
95 test generic hooks
96
96
97 $ hg id
97 $ hg id
98 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
98 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
99 abort: pre-identify hook exited with status 1
99 abort: pre-identify hook exited with status 1
100 [255]
100 [255]
101 $ hg cat b
101 $ hg cat b
102 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
102 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
103 b
103 b
104 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
104 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
105
105
106 $ cd ../b
106 $ cd ../b
107 $ hg pull ../a
107 $ hg pull ../a
108 pulling from ../a
108 pulling from ../a
109 searching for changes
109 searching for changes
110 prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
110 prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
111 adding changesets
111 adding changesets
112 adding manifests
112 adding manifests
113 adding file changes
113 adding file changes
114 added 3 changesets with 2 changes to 2 files
114 added 3 changesets with 2 changes to 2 files
115 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
115 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
116 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
116 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
117 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
117 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
118 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
118 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
119 (run 'hg update' to get a working copy)
119 (run 'hg update' to get a working copy)
120
120
121 tag hooks can see env vars
121 tag hooks can see env vars
122
122
123 $ cd ../a
123 $ cd ../a
124 $ cat >> .hg/hgrc <<EOF
124 $ cat >> .hg/hgrc <<EOF
125 > pretag = sh -c "printenv.py pretag"
125 > pretag = sh -c "printenv.py pretag"
126 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
126 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
127 > EOF
127 > EOF
128 $ hg tag -d '3 0' a
128 $ hg tag -d '3 0' a
129 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
129 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
130 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
130 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
131 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
131 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
132 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
132 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
133 4:539e4b31b6dc
133 4:539e4b31b6dc
134 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
134 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
135 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
135 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
136 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
136 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
137 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
137 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
138 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
138 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
139 $ hg tag -l la
139 $ hg tag -l la
140 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
140 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
141 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
141 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
142
142
143 pretag hook can forbid tagging
143 pretag hook can forbid tagging
144
144
145 $ cat >> .hg/hgrc <<EOF
145 $ cat >> .hg/hgrc <<EOF
146 > pretag.forbid = sh -c "printenv.py pretag.forbid 1"
146 > pretag.forbid = sh -c "printenv.py pretag.forbid 1"
147 > EOF
147 > EOF
148 $ hg tag -d '4 0' fa
148 $ hg tag -d '4 0' fa
149 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
149 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
150 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
150 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
151 abort: pretag.forbid hook exited with status 1
151 abort: pretag.forbid hook exited with status 1
152 [255]
152 [255]
153 $ hg tag -l fla
153 $ hg tag -l fla
154 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
154 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
155 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
155 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
156 abort: pretag.forbid hook exited with status 1
156 abort: pretag.forbid hook exited with status 1
157 [255]
157 [255]
158
158
159 pretxncommit hook can see changeset, can roll back txn, changeset no
159 pretxncommit hook can see changeset, can roll back txn, changeset no
160 more there after
160 more there after
161
161
162 $ cat >> .hg/hgrc <<EOF
162 $ cat >> .hg/hgrc <<EOF
163 > pretxncommit.forbid0 = sh -c "hg tip -q"
163 > pretxncommit.forbid0 = sh -c "hg tip -q"
164 > pretxncommit.forbid1 = sh -c "printenv.py pretxncommit.forbid 1"
164 > pretxncommit.forbid1 = sh -c "printenv.py pretxncommit.forbid 1"
165 > EOF
165 > EOF
166 $ echo z > z
166 $ echo z > z
167 $ hg add z
167 $ hg add z
168 $ hg -q tip
168 $ hg -q tip
169 4:539e4b31b6dc
169 4:539e4b31b6dc
170 $ hg commit -m 'fail' -d '4 0'
170 $ hg commit -m 'fail' -d '4 0'
171 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
171 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
172 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
172 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
173 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
173 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
174 5:6f611f8018c1
174 5:6f611f8018c1
175 5:6f611f8018c1
175 5:6f611f8018c1
176 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
176 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
177 transaction abort!
177 transaction abort!
178 txnabort python hook: txnid,txnname
178 txnabort python hook: txnid,txnname
179 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
179 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
180 rollback completed
180 rollback completed
181 abort: pretxncommit.forbid1 hook exited with status 1
181 abort: pretxncommit.forbid1 hook exited with status 1
182 [255]
182 [255]
183 $ hg -q tip
183 $ hg -q tip
184 4:539e4b31b6dc
184 4:539e4b31b6dc
185
185
186 (Check that no 'changelog.i.a' file were left behind)
186 (Check that no 'changelog.i.a' file were left behind)
187
187
188 $ ls -1 .hg/store/
188 $ ls -1 .hg/store/
189 00changelog.i
189 00changelog.i
190 00manifest.i
190 00manifest.i
191 data
191 data
192 fncache
192 fncache
193 journal.phaseroots
193 journal.phaseroots
194 phaseroots
194 phaseroots
195 undo
195 undo
196 undo.backup.fncache
196 undo.backup.fncache
197 undo.backupfiles
197 undo.backupfiles
198 undo.phaseroots
198 undo.phaseroots
199
199
200
200
201 precommit hook can prevent commit
201 precommit hook can prevent commit
202
202
203 $ cat >> .hg/hgrc <<EOF
203 $ cat >> .hg/hgrc <<EOF
204 > precommit.forbid = sh -c "printenv.py precommit.forbid 1"
204 > precommit.forbid = sh -c "printenv.py precommit.forbid 1"
205 > EOF
205 > EOF
206 $ hg commit -m 'fail' -d '4 0'
206 $ hg commit -m 'fail' -d '4 0'
207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
209 abort: precommit.forbid hook exited with status 1
209 abort: precommit.forbid hook exited with status 1
210 [255]
210 [255]
211 $ hg -q tip
211 $ hg -q tip
212 4:539e4b31b6dc
212 4:539e4b31b6dc
213
213
214 preupdate hook can prevent update
214 preupdate hook can prevent update
215
215
216 $ cat >> .hg/hgrc <<EOF
216 $ cat >> .hg/hgrc <<EOF
217 > preupdate = sh -c "printenv.py preupdate"
217 > preupdate = sh -c "printenv.py preupdate"
218 > EOF
218 > EOF
219 $ hg update 1
219 $ hg update 1
220 preupdate hook: HG_PARENT1=ab228980c14d
220 preupdate hook: HG_PARENT1=ab228980c14d
221 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
221 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
222
222
223 update hook
223 update hook
224
224
225 $ cat >> .hg/hgrc <<EOF
225 $ cat >> .hg/hgrc <<EOF
226 > update = sh -c "printenv.py update"
226 > update = sh -c "printenv.py update"
227 > EOF
227 > EOF
228 $ hg update
228 $ hg update
229 preupdate hook: HG_PARENT1=539e4b31b6dc
229 preupdate hook: HG_PARENT1=539e4b31b6dc
230 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
230 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
231 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
231 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
232
232
233 pushkey hook
233 pushkey hook
234
234
235 $ cat >> .hg/hgrc <<EOF
235 $ cat >> .hg/hgrc <<EOF
236 > pushkey = sh -c "printenv.py pushkey"
236 > pushkey = sh -c "printenv.py pushkey"
237 > EOF
237 > EOF
238 $ cd ../b
238 $ cd ../b
239 $ hg bookmark -r null foo
239 $ hg bookmark -r null foo
240 $ hg push -B foo ../a
240 $ hg push -B foo ../a
241 pushing to ../a
241 pushing to ../a
242 searching for changes
242 searching for changes
243 no changes found
243 no changes found
244 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
244 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
245 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/a (glob)
245 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/a (glob)
246 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
246 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
247 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/a (glob)
247 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/a (glob)
248 exporting bookmark foo
248 exporting bookmark foo
249 [1]
249 [1]
250 $ cd ../a
250 $ cd ../a
251
251
252 listkeys hook
252 listkeys hook
253
253
254 $ cat >> .hg/hgrc <<EOF
254 $ cat >> .hg/hgrc <<EOF
255 > listkeys = sh -c "printenv.py listkeys"
255 > listkeys = sh -c "printenv.py listkeys"
256 > EOF
256 > EOF
257 $ hg bookmark -r null bar
257 $ hg bookmark -r null bar
258 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
258 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
259 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
259 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
260 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
260 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
261 $ cd ../b
261 $ cd ../b
262 $ hg pull -B bar ../a
262 $ hg pull -B bar ../a
263 pulling from ../a
263 pulling from ../a
264 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
264 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
265 no changes found
265 no changes found
266 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
266 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
267 adding remote bookmark bar
267 adding remote bookmark bar
268 $ cd ../a
268 $ cd ../a
269
269
270 test that prepushkey can prevent incoming keys
270 test that prepushkey can prevent incoming keys
271
271
272 $ cat >> .hg/hgrc <<EOF
272 $ cat >> .hg/hgrc <<EOF
273 > prepushkey = sh -c "printenv.py prepushkey.forbid 1"
273 > prepushkey = sh -c "printenv.py prepushkey.forbid 1"
274 > EOF
274 > EOF
275 $ cd ../b
275 $ cd ../b
276 $ hg bookmark -r null baz
276 $ hg bookmark -r null baz
277 $ hg push -B baz ../a
277 $ hg push -B baz ../a
278 pushing to ../a
278 pushing to ../a
279 searching for changes
279 searching for changes
280 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
280 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
281 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
281 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
282 no changes found
282 no changes found
283 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
283 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
284 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
284 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
285 pushkey-abort: prepushkey hook exited with status 1
285 pushkey-abort: prepushkey hook exited with status 1
286 abort: exporting bookmark baz failed!
286 abort: exporting bookmark baz failed!
287 [255]
287 [255]
288 $ cd ../a
288 $ cd ../a
289
289
290 test that prelistkeys can prevent listing keys
290 test that prelistkeys can prevent listing keys
291
291
292 $ cat >> .hg/hgrc <<EOF
292 $ cat >> .hg/hgrc <<EOF
293 > prelistkeys = sh -c "printenv.py prelistkeys.forbid 1"
293 > prelistkeys = sh -c "printenv.py prelistkeys.forbid 1"
294 > EOF
294 > EOF
295 $ hg bookmark -r null quux
295 $ hg bookmark -r null quux
296 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
296 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
297 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
297 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
298 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
298 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
299 $ cd ../b
299 $ cd ../b
300 $ hg pull -B quux ../a
300 $ hg pull -B quux ../a
301 pulling from ../a
301 pulling from ../a
302 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
302 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
303 abort: prelistkeys hook exited with status 1
303 abort: prelistkeys hook exited with status 1
304 [255]
304 [255]
305 $ cd ../a
305 $ cd ../a
306 $ rm .hg/hgrc
306 $ rm .hg/hgrc
307
307
308 prechangegroup hook can prevent incoming changes
308 prechangegroup hook can prevent incoming changes
309
309
310 $ cd ../b
310 $ cd ../b
311 $ hg -q tip
311 $ hg -q tip
312 3:07f3376c1e65
312 3:07f3376c1e65
313 $ cat > .hg/hgrc <<EOF
313 $ cat > .hg/hgrc <<EOF
314 > [hooks]
314 > [hooks]
315 > prechangegroup.forbid = sh -c "printenv.py prechangegroup.forbid 1"
315 > prechangegroup.forbid = sh -c "printenv.py prechangegroup.forbid 1"
316 > EOF
316 > EOF
317 $ hg pull ../a
317 $ hg pull ../a
318 pulling from ../a
318 pulling from ../a
319 searching for changes
319 searching for changes
320 prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
320 prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
321 abort: prechangegroup.forbid hook exited with status 1
321 abort: prechangegroup.forbid hook exited with status 1
322 [255]
322 [255]
323
323
324 pretxnchangegroup hook can see incoming changes, can roll back txn,
324 pretxnchangegroup hook can see incoming changes, can roll back txn,
325 incoming changes no longer there after
325 incoming changes no longer there after
326
326
327 $ cat > .hg/hgrc <<EOF
327 $ cat > .hg/hgrc <<EOF
328 > [hooks]
328 > [hooks]
329 > pretxnchangegroup.forbid0 = hg tip -q
329 > pretxnchangegroup.forbid0 = hg tip -q
330 > pretxnchangegroup.forbid1 = sh -c "printenv.py pretxnchangegroup.forbid 1"
330 > pretxnchangegroup.forbid1 = sh -c "printenv.py pretxnchangegroup.forbid 1"
331 > EOF
331 > EOF
332 $ hg pull ../a
332 $ hg pull ../a
333 pulling from ../a
333 pulling from ../a
334 searching for changes
334 searching for changes
335 adding changesets
335 adding changesets
336 adding manifests
336 adding manifests
337 adding file changes
337 adding file changes
338 added 1 changesets with 1 changes to 1 files
338 added 1 changesets with 1 changes to 1 files
339 4:539e4b31b6dc
339 4:539e4b31b6dc
340 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
340 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
341 transaction abort!
341 transaction abort!
342 rollback completed
342 rollback completed
343 abort: pretxnchangegroup.forbid1 hook exited with status 1
343 abort: pretxnchangegroup.forbid1 hook exited with status 1
344 [255]
344 [255]
345 $ hg -q tip
345 $ hg -q tip
346 3:07f3376c1e65
346 3:07f3376c1e65
347
347
348 outgoing hooks can see env vars
348 outgoing hooks can see env vars
349
349
350 $ rm .hg/hgrc
350 $ rm .hg/hgrc
351 $ cat > ../a/.hg/hgrc <<EOF
351 $ cat > ../a/.hg/hgrc <<EOF
352 > [hooks]
352 > [hooks]
353 > preoutgoing = sh -c "printenv.py preoutgoing"
353 > preoutgoing = sh -c "printenv.py preoutgoing"
354 > outgoing = sh -c "printenv.py outgoing"
354 > outgoing = sh -c "printenv.py outgoing"
355 > EOF
355 > EOF
356 $ hg pull ../a
356 $ hg pull ../a
357 pulling from ../a
357 pulling from ../a
358 searching for changes
358 searching for changes
359 preoutgoing hook: HG_SOURCE=pull
359 preoutgoing hook: HG_SOURCE=pull
360 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
360 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
361 adding changesets
361 adding changesets
362 adding manifests
362 adding manifests
363 adding file changes
363 adding file changes
364 added 1 changesets with 1 changes to 1 files
364 added 1 changesets with 1 changes to 1 files
365 adding remote bookmark quux
365 adding remote bookmark quux
366 (run 'hg update' to get a working copy)
366 (run 'hg update' to get a working copy)
367 $ hg rollback
367 $ hg rollback
368 repository tip rolled back to revision 3 (undo pull)
368 repository tip rolled back to revision 3 (undo pull)
369
369
370 preoutgoing hook can prevent outgoing changes
370 preoutgoing hook can prevent outgoing changes
371
371
372 $ cat >> ../a/.hg/hgrc <<EOF
372 $ cat >> ../a/.hg/hgrc <<EOF
373 > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1"
373 > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1"
374 > EOF
374 > EOF
375 $ hg pull ../a
375 $ hg pull ../a
376 pulling from ../a
376 pulling from ../a
377 searching for changes
377 searching for changes
378 preoutgoing hook: HG_SOURCE=pull
378 preoutgoing hook: HG_SOURCE=pull
379 preoutgoing.forbid hook: HG_SOURCE=pull
379 preoutgoing.forbid hook: HG_SOURCE=pull
380 abort: preoutgoing.forbid hook exited with status 1
380 abort: preoutgoing.forbid hook exited with status 1
381 [255]
381 [255]
382
382
383 outgoing hooks work for local clones
383 outgoing hooks work for local clones
384
384
385 $ cd ..
385 $ cd ..
386 $ cat > a/.hg/hgrc <<EOF
386 $ cat > a/.hg/hgrc <<EOF
387 > [hooks]
387 > [hooks]
388 > preoutgoing = sh -c "printenv.py preoutgoing"
388 > preoutgoing = sh -c "printenv.py preoutgoing"
389 > outgoing = sh -c "printenv.py outgoing"
389 > outgoing = sh -c "printenv.py outgoing"
390 > EOF
390 > EOF
391 $ hg clone a c
391 $ hg clone a c
392 preoutgoing hook: HG_SOURCE=clone
392 preoutgoing hook: HG_SOURCE=clone
393 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
393 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
394 updating to branch default
394 updating to branch default
395 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
395 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
396 $ rm -rf c
396 $ rm -rf c
397
397
398 preoutgoing hook can prevent outgoing changes for local clones
398 preoutgoing hook can prevent outgoing changes for local clones
399
399
400 $ cat >> a/.hg/hgrc <<EOF
400 $ cat >> a/.hg/hgrc <<EOF
401 > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1"
401 > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1"
402 > EOF
402 > EOF
403 $ hg clone a zzz
403 $ hg clone a zzz
404 preoutgoing hook: HG_SOURCE=clone
404 preoutgoing hook: HG_SOURCE=clone
405 preoutgoing.forbid hook: HG_SOURCE=clone
405 preoutgoing.forbid hook: HG_SOURCE=clone
406 abort: preoutgoing.forbid hook exited with status 1
406 abort: preoutgoing.forbid hook exited with status 1
407 [255]
407 [255]
408
408
409 $ cd "$TESTTMP/b"
409 $ cd "$TESTTMP/b"
410
410
411 $ cat > hooktests.py <<EOF
411 $ cat > hooktests.py <<EOF
412 > from mercurial import error
412 > from mercurial import error
413 >
413 >
414 > uncallable = 0
414 > uncallable = 0
415 >
415 >
416 > def printargs(args):
416 > def printargs(args):
417 > args.pop('ui', None)
417 > args.pop('ui', None)
418 > args.pop('repo', None)
418 > args.pop('repo', None)
419 > a = list(args.items())
419 > a = list(args.items())
420 > a.sort()
420 > a.sort()
421 > print 'hook args:'
421 > print 'hook args:'
422 > for k, v in a:
422 > for k, v in a:
423 > print ' ', k, v
423 > print ' ', k, v
424 >
424 >
425 > def passhook(**args):
425 > def passhook(**args):
426 > printargs(args)
426 > printargs(args)
427 >
427 >
428 > def failhook(**args):
428 > def failhook(**args):
429 > printargs(args)
429 > printargs(args)
430 > return True
430 > return True
431 >
431 >
432 > class LocalException(Exception):
432 > class LocalException(Exception):
433 > pass
433 > pass
434 >
434 >
435 > def raisehook(**args):
435 > def raisehook(**args):
436 > raise LocalException('exception from hook')
436 > raise LocalException('exception from hook')
437 >
437 >
438 > def aborthook(**args):
438 > def aborthook(**args):
439 > raise error.Abort('raise abort from hook')
439 > raise error.Abort('raise abort from hook')
440 >
440 >
441 > def brokenhook(**args):
441 > def brokenhook(**args):
442 > return 1 + {}
442 > return 1 + {}
443 >
443 >
444 > def verbosehook(ui, **args):
444 > def verbosehook(ui, **args):
445 > ui.note('verbose output from hook\n')
445 > ui.note('verbose output from hook\n')
446 >
446 >
447 > def printtags(ui, repo, **args):
447 > def printtags(ui, repo, **args):
448 > print sorted(repo.tags())
448 > print sorted(repo.tags())
449 >
449 >
450 > class container:
450 > class container:
451 > unreachable = 1
451 > unreachable = 1
452 > EOF
452 > EOF
453
453
454 $ cat > syntaxerror.py << EOF
454 $ cat > syntaxerror.py << EOF
455 > (foo
455 > (foo
456 > EOF
456 > EOF
457
457
458 test python hooks
458 test python hooks
459
459
460 #if windows
460 #if windows
461 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
461 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
462 #else
462 #else
463 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
463 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
464 #endif
464 #endif
465 $ export PYTHONPATH
465 $ export PYTHONPATH
466
466
467 $ echo '[hooks]' > ../a/.hg/hgrc
467 $ echo '[hooks]' > ../a/.hg/hgrc
468 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
468 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
469 $ hg pull ../a 2>&1 | grep 'raised an exception'
469 $ hg pull ../a 2>&1 | grep 'raised an exception'
470 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
470 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
471
471
472 $ echo '[hooks]' > ../a/.hg/hgrc
472 $ echo '[hooks]' > ../a/.hg/hgrc
473 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
473 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
474 $ hg pull ../a 2>&1 | grep 'raised an exception'
474 $ hg pull ../a 2>&1 | grep 'raised an exception'
475 error: preoutgoing.raise hook raised an exception: exception from hook
475 error: preoutgoing.raise hook raised an exception: exception from hook
476
476
477 $ echo '[hooks]' > ../a/.hg/hgrc
477 $ echo '[hooks]' > ../a/.hg/hgrc
478 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
478 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
479 $ hg pull ../a
479 $ hg pull ../a
480 pulling from ../a
480 pulling from ../a
481 searching for changes
481 searching for changes
482 error: preoutgoing.abort hook failed: raise abort from hook
482 error: preoutgoing.abort hook failed: raise abort from hook
483 abort: raise abort from hook
483 abort: raise abort from hook
484 [255]
484 [255]
485
485
486 $ echo '[hooks]' > ../a/.hg/hgrc
486 $ echo '[hooks]' > ../a/.hg/hgrc
487 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
487 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
488 $ hg pull ../a
488 $ hg pull ../a
489 pulling from ../a
489 pulling from ../a
490 searching for changes
490 searching for changes
491 hook args:
491 hook args:
492 hooktype preoutgoing
492 hooktype preoutgoing
493 source pull
493 source pull
494 abort: preoutgoing.fail hook failed
494 abort: preoutgoing.fail hook failed
495 [255]
495 [255]
496
496
497 $ echo '[hooks]' > ../a/.hg/hgrc
497 $ echo '[hooks]' > ../a/.hg/hgrc
498 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
498 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
499 $ hg pull ../a
499 $ hg pull ../a
500 pulling from ../a
500 pulling from ../a
501 searching for changes
501 searching for changes
502 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
502 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
503 [255]
503 [255]
504
504
505 $ echo '[hooks]' > ../a/.hg/hgrc
505 $ echo '[hooks]' > ../a/.hg/hgrc
506 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
506 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
507 $ hg pull ../a
507 $ hg pull ../a
508 pulling from ../a
508 pulling from ../a
509 searching for changes
509 searching for changes
510 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
510 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
511 [255]
511 [255]
512
512
513 $ echo '[hooks]' > ../a/.hg/hgrc
513 $ echo '[hooks]' > ../a/.hg/hgrc
514 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
514 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
515 $ hg pull ../a
515 $ hg pull ../a
516 pulling from ../a
516 pulling from ../a
517 searching for changes
517 searching for changes
518 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
518 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
519 [255]
519 [255]
520
520
521 $ echo '[hooks]' > ../a/.hg/hgrc
521 $ echo '[hooks]' > ../a/.hg/hgrc
522 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
522 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
523 $ hg pull ../a
523 $ hg pull ../a
524 pulling from ../a
524 pulling from ../a
525 searching for changes
525 searching for changes
526 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
526 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
527 (run with --traceback for stack trace)
527 (run with --traceback for stack trace)
528 [255]
528 [255]
529
529
530 $ echo '[hooks]' > ../a/.hg/hgrc
530 $ echo '[hooks]' > ../a/.hg/hgrc
531 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
531 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
532 $ hg pull ../a
532 $ hg pull ../a
533 pulling from ../a
533 pulling from ../a
534 searching for changes
534 searching for changes
535 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
535 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
536 (run with --traceback for stack trace)
536 (run with --traceback for stack trace)
537 [255]
537 [255]
538
538
539 $ echo '[hooks]' > ../a/.hg/hgrc
539 $ echo '[hooks]' > ../a/.hg/hgrc
540 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
540 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
541 $ hg pull ../a
541 $ hg pull ../a
542 pulling from ../a
542 pulling from ../a
543 searching for changes
543 searching for changes
544 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
544 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
545 (run with --traceback for stack trace)
545 (run with --traceback for stack trace)
546 [255]
546 [255]
547
547
548 The second egrep is to filter out lines like ' ^', which are slightly
548 The second egrep is to filter out lines like ' ^', which are slightly
549 different between Python 2.6 and Python 2.7.
549 different between Python 2.6 and Python 2.7.
550 $ hg pull ../a --traceback 2>&1 | egrep -v '^( +File| [_a-zA-Z*(])' | egrep -v '^( )+(\^)?$'
550 $ hg pull ../a --traceback 2>&1 | egrep -v '^( +File| [_a-zA-Z*(])' | egrep -v '^( )+(\^)?$'
551 pulling from ../a
551 pulling from ../a
552 searching for changes
552 searching for changes
553 exception from first failed import attempt:
553 exception from first failed import attempt:
554 Traceback (most recent call last):
554 Traceback (most recent call last):
555 SyntaxError: * (glob)
555 SyntaxError: * (glob)
556 exception from second failed import attempt:
556 exception from second failed import attempt:
557 Traceback (most recent call last):
557 Traceback (most recent call last):
558 ImportError: No module named hgext_syntaxerror
558 ImportError: No module named hgext_syntaxerror
559 Traceback (most recent call last):
559 Traceback (most recent call last):
560 HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
560 HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
561 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
561 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
562
562
563 $ echo '[hooks]' > ../a/.hg/hgrc
563 $ echo '[hooks]' > ../a/.hg/hgrc
564 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
564 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
565 $ hg pull ../a
565 $ hg pull ../a
566 pulling from ../a
566 pulling from ../a
567 searching for changes
567 searching for changes
568 hook args:
568 hook args:
569 hooktype preoutgoing
569 hooktype preoutgoing
570 source pull
570 source pull
571 adding changesets
571 adding changesets
572 adding manifests
572 adding manifests
573 adding file changes
573 adding file changes
574 added 1 changesets with 1 changes to 1 files
574 added 1 changesets with 1 changes to 1 files
575 adding remote bookmark quux
575 adding remote bookmark quux
576 (run 'hg update' to get a working copy)
576 (run 'hg update' to get a working copy)
577
577
578 post- python hooks that fail to *run* don't cause an abort
578 post- python hooks that fail to *run* don't cause an abort
579 $ rm ../a/.hg/hgrc
579 $ rm ../a/.hg/hgrc
580 $ echo '[hooks]' > .hg/hgrc
580 $ echo '[hooks]' > .hg/hgrc
581 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
581 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
582 $ hg pull ../a
582 $ hg pull ../a
583 pulling from ../a
583 pulling from ../a
584 searching for changes
584 searching for changes
585 no changes found
585 no changes found
586 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
586 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
587 (run with --traceback for stack trace)
587 (run with --traceback for stack trace)
588
588
589 but post- python hooks that fail to *load* do
589 but post- python hooks that fail to *load* do
590 $ echo '[hooks]' > .hg/hgrc
590 $ echo '[hooks]' > .hg/hgrc
591 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
591 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
592 $ hg pull ../a
592 $ hg pull ../a
593 pulling from ../a
593 pulling from ../a
594 searching for changes
594 searching for changes
595 no changes found
595 no changes found
596 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
596 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
597 [255]
597 [255]
598
598
599 $ echo '[hooks]' > .hg/hgrc
599 $ echo '[hooks]' > .hg/hgrc
600 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
600 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
601 $ hg pull ../a
601 $ hg pull ../a
602 pulling from ../a
602 pulling from ../a
603 searching for changes
603 searching for changes
604 no changes found
604 no changes found
605 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
605 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
606 (run with --traceback for stack trace)
606 (run with --traceback for stack trace)
607 [255]
607 [255]
608
608
609 $ echo '[hooks]' > .hg/hgrc
609 $ echo '[hooks]' > .hg/hgrc
610 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
610 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
611 $ hg pull ../a
611 $ hg pull ../a
612 pulling from ../a
612 pulling from ../a
613 searching for changes
613 searching for changes
614 no changes found
614 no changes found
615 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
615 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
616 [255]
616 [255]
617
617
618 make sure --traceback works
618 make sure --traceback works
619
619
620 $ echo '[hooks]' > .hg/hgrc
620 $ echo '[hooks]' > .hg/hgrc
621 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
621 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
622
622
623 $ echo aa > a
623 $ echo aa > a
624 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
624 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
625 Traceback (most recent call last):
625 Traceback (most recent call last):
626
626
627 $ cd ..
627 $ cd ..
628 $ hg init c
628 $ hg init c
629 $ cd c
629 $ cd c
630
630
631 $ cat > hookext.py <<EOF
631 $ cat > hookext.py <<EOF
632 > def autohook(**args):
632 > def autohook(**args):
633 > print "Automatically installed hook"
633 > print "Automatically installed hook"
634 >
634 >
635 > def reposetup(ui, repo):
635 > def reposetup(ui, repo):
636 > repo.ui.setconfig("hooks", "commit.auto", autohook)
636 > repo.ui.setconfig("hooks", "commit.auto", autohook)
637 > EOF
637 > EOF
638 $ echo '[extensions]' >> .hg/hgrc
638 $ echo '[extensions]' >> .hg/hgrc
639 $ echo 'hookext = hookext.py' >> .hg/hgrc
639 $ echo 'hookext = hookext.py' >> .hg/hgrc
640
640
641 $ touch foo
641 $ touch foo
642 $ hg add foo
642 $ hg add foo
643 $ hg ci -d '0 0' -m 'add foo'
643 $ hg ci -d '0 0' -m 'add foo'
644 Automatically installed hook
644 Automatically installed hook
645 $ echo >> foo
645 $ echo >> foo
646 $ hg ci --debug -d '0 0' -m 'change foo'
646 $ hg ci --debug -d '0 0' -m 'change foo'
647 committing files:
647 committing files:
648 foo
648 foo
649 committing manifest
649 committing manifest
650 committing changelog
650 committing changelog
651 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
651 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
652 calling hook commit.auto: hgext_hookext.autohook
652 calling hook commit.auto: hgext_hookext.autohook
653 Automatically installed hook
653 Automatically installed hook
654
654
655 $ hg showconfig hooks
655 $ hg showconfig hooks
656 hooks.commit.auto=<function autohook at *> (glob)
656 hooks.commit.auto=<function autohook at *> (glob)
657
657
658 test python hook configured with python:[file]:[hook] syntax
658 test python hook configured with python:[file]:[hook] syntax
659
659
660 $ cd ..
660 $ cd ..
661 $ mkdir d
661 $ mkdir d
662 $ cd d
662 $ cd d
663 $ hg init repo
663 $ hg init repo
664 $ mkdir hooks
664 $ mkdir hooks
665
665
666 $ cd hooks
666 $ cd hooks
667 $ cat > testhooks.py <<EOF
667 $ cat > testhooks.py <<EOF
668 > def testhook(**args):
668 > def testhook(**args):
669 > print 'hook works'
669 > print 'hook works'
670 > EOF
670 > EOF
671 $ echo '[hooks]' > ../repo/.hg/hgrc
671 $ echo '[hooks]' > ../repo/.hg/hgrc
672 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
672 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
673
673
674 $ cd ../repo
674 $ cd ../repo
675 $ hg commit -d '0 0'
675 $ hg commit -d '0 0'
676 hook works
676 hook works
677 nothing changed
677 nothing changed
678 [1]
678 [1]
679
679
680 $ echo '[hooks]' > .hg/hgrc
680 $ echo '[hooks]' > .hg/hgrc
681 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
681 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
682 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
682 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
683
683
684 $ hg up null
684 $ hg up null
685 loading update.ne hook failed:
685 loading update.ne hook failed:
686 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
686 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
687 [255]
687 [255]
688
688
689 $ hg id
689 $ hg id
690 loading pre-identify.npmd hook failed:
690 loading pre-identify.npmd hook failed:
691 abort: No module named repo!
691 abort: No module named repo!
692 [255]
692 [255]
693
693
694 $ cd ../../b
694 $ cd ../../b
695
695
696 make sure --traceback works on hook import failure
696 make sure --traceback works on hook import failure
697
697
698 $ cat > importfail.py <<EOF
698 $ cat > importfail.py <<EOF
699 > import somebogusmodule
699 > import somebogusmodule
700 > # dereference something in the module to force demandimport to load it
700 > # dereference something in the module to force demandimport to load it
701 > somebogusmodule.whatever
701 > somebogusmodule.whatever
702 > EOF
702 > EOF
703
703
704 $ echo '[hooks]' > .hg/hgrc
704 $ echo '[hooks]' > .hg/hgrc
705 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
705 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
706
706
707 $ echo a >> a
707 $ echo a >> a
708 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
708 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
709 exception from first failed import attempt:
709 exception from first failed import attempt:
710 Traceback (most recent call last):
710 Traceback (most recent call last):
711 ImportError: No module named somebogusmodule
711 ImportError: No module named somebogusmodule
712 exception from second failed import attempt:
712 exception from second failed import attempt:
713 Traceback (most recent call last):
713 Traceback (most recent call last):
714 ImportError: No module named hgext_importfail
714 ImportError: No module named hgext_importfail
715 Traceback (most recent call last):
715 Traceback (most recent call last):
716 HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
716 HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
717 abort: precommit.importfail hook is invalid: import of "importfail" failed
717 abort: precommit.importfail hook is invalid: import of "importfail" failed
718
718
719 Issue1827: Hooks Update & Commit not completely post operation
719 Issue1827: Hooks Update & Commit not completely post operation
720
720
721 commit and update hooks should run after command completion. The largefiles
721 commit and update hooks should run after command completion. The largefiles
722 use demonstrates a recursive wlock, showing the hook doesn't run until the
722 use demonstrates a recursive wlock, showing the hook doesn't run until the
723 final release (and dirstate flush).
723 final release (and dirstate flush).
724
724
725 $ echo '[hooks]' > .hg/hgrc
725 $ echo '[hooks]' > .hg/hgrc
726 $ echo 'commit = hg id' >> .hg/hgrc
726 $ echo 'commit = hg id' >> .hg/hgrc
727 $ echo 'update = hg id' >> .hg/hgrc
727 $ echo 'update = hg id' >> .hg/hgrc
728 $ echo bb > a
728 $ echo bb > a
729 $ hg ci -ma
729 $ hg ci -ma
730 223eafe2750c tip
730 223eafe2750c tip
731 $ hg up 0 --config extensions.largefiles=
731 $ hg up 0 --config extensions.largefiles=
732 cb9a9f314b8b
732 cb9a9f314b8b
733 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
733 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
734
734
735 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
735 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
736 that is passed to pre/post hooks
736 that is passed to pre/post hooks
737
737
738 $ echo '[hooks]' > .hg/hgrc
738 $ echo '[hooks]' > .hg/hgrc
739 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
739 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
740 $ hg id
740 $ hg id
741 cb9a9f314b8b
741 cb9a9f314b8b
742 $ hg id --verbose
742 $ hg id --verbose
743 calling hook pre-identify: hooktests.verbosehook
743 calling hook pre-identify: hooktests.verbosehook
744 verbose output from hook
744 verbose output from hook
745 cb9a9f314b8b
745 cb9a9f314b8b
746
746
747 Ensure hooks can be prioritized
747 Ensure hooks can be prioritized
748
748
749 $ echo '[hooks]' > .hg/hgrc
749 $ echo '[hooks]' > .hg/hgrc
750 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
750 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
751 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
751 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
752 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
752 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
753 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
753 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
754 $ hg id --verbose
754 $ hg id --verbose
755 calling hook pre-identify.b: hooktests.verbosehook
755 calling hook pre-identify.b: hooktests.verbosehook
756 verbose output from hook
756 verbose output from hook
757 calling hook pre-identify.a: hooktests.verbosehook
757 calling hook pre-identify.a: hooktests.verbosehook
758 verbose output from hook
758 verbose output from hook
759 calling hook pre-identify.c: hooktests.verbosehook
759 calling hook pre-identify.c: hooktests.verbosehook
760 verbose output from hook
760 verbose output from hook
761 cb9a9f314b8b
761 cb9a9f314b8b
762
762
763 new tags must be visible in pretxncommit (issue3210)
763 new tags must be visible in pretxncommit (issue3210)
764
764
765 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
765 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
766 $ hg tag -f foo
766 $ hg tag -f foo
767 ['a', 'foo', 'tip']
767 ['a', 'foo', 'tip']
768
768
769 post-init hooks must not crash (issue4983)
769 post-init hooks must not crash (issue4983)
770 This also creates the `to` repo for the next test block.
770 This also creates the `to` repo for the next test block.
771
771
772 $ cd ..
772 $ cd ..
773 $ cat << EOF >> hgrc-with-post-init-hook
773 $ cat << EOF >> hgrc-with-post-init-hook
774 > [hooks]
774 > [hooks]
775 > post-init = sh -c "printenv.py post-init"
775 > post-init = sh -c "printenv.py post-init"
776 > EOF
776 > EOF
777 $ HGRCPATH=hgrc-with-post-init-hook hg init to
777 $ HGRCPATH=hgrc-with-post-init-hook hg init to
778 post-init hook: HG_ARGS=init to HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0
778 post-init hook: HG_ARGS=init to HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0
779
779
780 new commits must be visible in pretxnchangegroup (issue3428)
780 new commits must be visible in pretxnchangegroup (issue3428)
781
781
782 $ echo '[hooks]' >> to/.hg/hgrc
782 $ echo '[hooks]' >> to/.hg/hgrc
783 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
783 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
784 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
784 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
785 $ echo a >> to/a
785 $ echo a >> to/a
786 $ hg --cwd to ci -Ama
786 $ hg --cwd to ci -Ama
787 adding a
787 adding a
788 $ hg clone to from
788 $ hg clone to from
789 updating to branch default
789 updating to branch default
790 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
790 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
791 $ echo aa >> from/a
791 $ echo aa >> from/a
792 $ hg --cwd from ci -mb
792 $ hg --cwd from ci -mb
793 $ hg --cwd from push
793 $ hg --cwd from push
794 pushing to $TESTTMP/to (glob)
794 pushing to $TESTTMP/to (glob)
795 searching for changes
795 searching for changes
796 changeset: 0:cb9a9f314b8b
796 changeset: 0:cb9a9f314b8b
797 tag: tip
797 tag: tip
798 user: test
798 user: test
799 date: Thu Jan 01 00:00:00 1970 +0000
799 date: Thu Jan 01 00:00:00 1970 +0000
800 summary: a
800 summary: a
801
801
802 adding changesets
802 adding changesets
803 adding manifests
803 adding manifests
804 adding file changes
804 adding file changes
805 added 1 changesets with 1 changes to 1 files
805 added 1 changesets with 1 changes to 1 files
806 changeset: 1:9836a07b9b9d
806 changeset: 1:9836a07b9b9d
807 tag: tip
807 tag: tip
808 user: test
808 user: test
809 date: Thu Jan 01 00:00:00 1970 +0000
809 date: Thu Jan 01 00:00:00 1970 +0000
810 summary: b
810 summary: b
811
811
812
812
813 pretxnclose hook failure should abort the transaction
813 pretxnclose hook failure should abort the transaction
814
814
815 $ hg init txnfailure
815 $ hg init txnfailure
816 $ cd txnfailure
816 $ cd txnfailure
817 $ touch a && hg commit -Aqm a
817 $ touch a && hg commit -Aqm a
818 $ cat >> .hg/hgrc <<EOF
818 $ cat >> .hg/hgrc <<EOF
819 > [hooks]
819 > [hooks]
820 > pretxnclose.error = exit 1
820 > pretxnclose.error = exit 1
821 > EOF
821 > EOF
822 $ hg strip -r 0 --config extensions.strip=
822 $ hg strip -r 0 --config extensions.strip=
823 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
823 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
824 saved backup bundle to * (glob)
824 saved backup bundle to * (glob)
825 transaction abort!
825 transaction abort!
826 rollback completed
826 rollback completed
827 strip failed, backup bundle stored in * (glob)
827 strip failed, backup bundle stored in * (glob)
828 abort: pretxnclose.error hook exited with status 1
828 abort: pretxnclose.error hook exited with status 1
829 [255]
829 [255]
830 $ hg recover
830 $ hg recover
831 no interrupted transaction available
831 no interrupted transaction available
832 [1]
832 [1]
833 $ cd ..
833 $ cd ..
834
834
835 check whether HG_PENDING makes pending changes only in related
836 repositories visible to an external hook.
837
838 (emulate a transaction running concurrently by copied
839 .hg/store/00changelog.i.a in subsequent test)
840
841 $ cat > $TESTTMP/savepending.sh <<EOF
842 > cp .hg/store/00changelog.i.a .hg/store/00changelog.i.a.saved
843 > exit 1 # to avoid adding new revision for subsequent tests
844 > EOF
845 $ cd a
846 $ hg tip -q
847 4:539e4b31b6dc
848 $ hg --config hooks.pretxnclose="sh $TESTTMP/savepending.sh" commit -m "invisible"
849 transaction abort!
850 rollback completed
851 abort: pretxnclose hook exited with status 1
852 [255]
853 $ cp .hg/store/00changelog.i.a.saved .hg/store/00changelog.i.a
854
855 (check (in)visibility of new changeset while transaction running in
856 repo)
857
858 $ cat > $TESTTMP/checkpending.sh <<EOF
859 > echo '@a'
860 > hg -R $TESTTMP/a tip -q
861 > echo '@a/nested'
862 > hg -R $TESTTMP/a/nested tip -q
863 > exit 1 # to avoid adding new revision for subsequent tests
864 > EOF
865 $ hg init nested
866 $ cd nested
867 $ echo a > a
868 $ hg add a
869 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkpending.sh" commit -m '#0'
870 @a
871 4:539e4b31b6dc
872 @a/nested
873 0:bf5e395ced2c
874 transaction abort!
875 rollback completed
876 abort: pretxnclose hook exited with status 1
877 [255]
878
835 Hook from untrusted hgrc are reported as failure
879 Hook from untrusted hgrc are reported as failure
836 ================================================
880 ================================================
837
881
838 $ cat << EOF > $TESTTMP/untrusted.py
882 $ cat << EOF > $TESTTMP/untrusted.py
839 > from mercurial import scmutil, util
883 > from mercurial import scmutil, util
840 > def uisetup(ui):
884 > def uisetup(ui):
841 > class untrustedui(ui.__class__):
885 > class untrustedui(ui.__class__):
842 > def _trusted(self, fp, f):
886 > def _trusted(self, fp, f):
843 > if util.normpath(fp.name).endswith('untrusted/.hg/hgrc'):
887 > if util.normpath(fp.name).endswith('untrusted/.hg/hgrc'):
844 > return False
888 > return False
845 > return super(untrustedui, self)._trusted(fp, f)
889 > return super(untrustedui, self)._trusted(fp, f)
846 > ui.__class__ = untrustedui
890 > ui.__class__ = untrustedui
847 > EOF
891 > EOF
848 $ cat << EOF >> $HGRCPATH
892 $ cat << EOF >> $HGRCPATH
849 > [extensions]
893 > [extensions]
850 > untrusted=$TESTTMP/untrusted.py
894 > untrusted=$TESTTMP/untrusted.py
851 > EOF
895 > EOF
852 $ hg init untrusted
896 $ hg init untrusted
853 $ cd untrusted
897 $ cd untrusted
854
898
855 Non-blocking hook
899 Non-blocking hook
856 -----------------
900 -----------------
857
901
858 $ cat << EOF >> .hg/hgrc
902 $ cat << EOF >> .hg/hgrc
859 > [hooks]
903 > [hooks]
860 > txnclose.testing=echo txnclose hook called
904 > txnclose.testing=echo txnclose hook called
861 > EOF
905 > EOF
862 $ touch a && hg commit -Aqm a
906 $ touch a && hg commit -Aqm a
863 warning: untrusted hook txnclose not executed
907 warning: untrusted hook txnclose not executed
864 $ hg log
908 $ hg log
865 changeset: 0:3903775176ed
909 changeset: 0:3903775176ed
866 tag: tip
910 tag: tip
867 user: test
911 user: test
868 date: Thu Jan 01 00:00:00 1970 +0000
912 date: Thu Jan 01 00:00:00 1970 +0000
869 summary: a
913 summary: a
870
914
871
915
872 Non-blocking hook
916 Non-blocking hook
873 -----------------
917 -----------------
874
918
875 $ cat << EOF >> .hg/hgrc
919 $ cat << EOF >> .hg/hgrc
876 > [hooks]
920 > [hooks]
877 > pretxnclose.testing=echo pre-txnclose hook called
921 > pretxnclose.testing=echo pre-txnclose hook called
878 > EOF
922 > EOF
879 $ touch b && hg commit -Aqm a
923 $ touch b && hg commit -Aqm a
880 transaction abort!
924 transaction abort!
881 rollback completed
925 rollback completed
882 abort: untrusted hook pretxnclose not executed
926 abort: untrusted hook pretxnclose not executed
883 (see 'hg help config.trusted')
927 (see 'hg help config.trusted')
884 [255]
928 [255]
885 $ hg log
929 $ hg log
886 changeset: 0:3903775176ed
930 changeset: 0:3903775176ed
887 tag: tip
931 tag: tip
888 user: test
932 user: test
889 date: Thu Jan 01 00:00:00 1970 +0000
933 date: Thu Jan 01 00:00:00 1970 +0000
890 summary: a
934 summary: a
891
935
General Comments 0
You need to be logged in to leave comments. Login now