##// END OF EJS Templates
tag: run commit hook when lock is released (issue3344)
Mads Kiilerich -
r16680:d0e419b0 stable
parent child Browse files
Show More
@@ -1,2349 +1,2353
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error, revset
13 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 filecache = scmutil.filecache
21
21
22 class storecache(filecache):
22 class storecache(filecache):
23 """filecache for files in the store"""
23 """filecache for files in the store"""
24 def join(self, obj, fname):
24 def join(self, obj, fname):
25 return obj.sjoin(fname)
25 return obj.sjoin(fname)
26
26
27 class localrepository(repo.repository):
27 class localrepository(repo.repository):
28 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
28 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
29 'known', 'getbundle'))
29 'known', 'getbundle'))
30 supportedformats = set(('revlogv1', 'generaldelta'))
30 supportedformats = set(('revlogv1', 'generaldelta'))
31 supported = supportedformats | set(('store', 'fncache', 'shared',
31 supported = supportedformats | set(('store', 'fncache', 'shared',
32 'dotencode'))
32 'dotencode'))
33
33
34 def __init__(self, baseui, path=None, create=False):
34 def __init__(self, baseui, path=None, create=False):
35 repo.repository.__init__(self)
35 repo.repository.__init__(self)
36 self.root = os.path.realpath(util.expandpath(path))
36 self.root = os.path.realpath(util.expandpath(path))
37 self.path = os.path.join(self.root, ".hg")
37 self.path = os.path.join(self.root, ".hg")
38 self.origroot = path
38 self.origroot = path
39 self.auditor = scmutil.pathauditor(self.root, self._checknested)
39 self.auditor = scmutil.pathauditor(self.root, self._checknested)
40 self.opener = scmutil.opener(self.path)
40 self.opener = scmutil.opener(self.path)
41 self.wopener = scmutil.opener(self.root)
41 self.wopener = scmutil.opener(self.root)
42 self.baseui = baseui
42 self.baseui = baseui
43 self.ui = baseui.copy()
43 self.ui = baseui.copy()
44 self._dirtyphases = False
44 self._dirtyphases = False
45 # A list of callback to shape the phase if no data were found.
45 # A list of callback to shape the phase if no data were found.
46 # Callback are in the form: func(repo, roots) --> processed root.
46 # Callback are in the form: func(repo, roots) --> processed root.
47 # This list it to be filled by extension during repo setup
47 # This list it to be filled by extension during repo setup
48 self._phasedefaults = []
48 self._phasedefaults = []
49
49
50 try:
50 try:
51 self.ui.readconfig(self.join("hgrc"), self.root)
51 self.ui.readconfig(self.join("hgrc"), self.root)
52 extensions.loadall(self.ui)
52 extensions.loadall(self.ui)
53 except IOError:
53 except IOError:
54 pass
54 pass
55
55
56 if not os.path.isdir(self.path):
56 if not os.path.isdir(self.path):
57 if create:
57 if create:
58 if not os.path.exists(path):
58 if not os.path.exists(path):
59 util.makedirs(path)
59 util.makedirs(path)
60 util.makedir(self.path, notindexed=True)
60 util.makedir(self.path, notindexed=True)
61 requirements = ["revlogv1"]
61 requirements = ["revlogv1"]
62 if self.ui.configbool('format', 'usestore', True):
62 if self.ui.configbool('format', 'usestore', True):
63 os.mkdir(os.path.join(self.path, "store"))
63 os.mkdir(os.path.join(self.path, "store"))
64 requirements.append("store")
64 requirements.append("store")
65 if self.ui.configbool('format', 'usefncache', True):
65 if self.ui.configbool('format', 'usefncache', True):
66 requirements.append("fncache")
66 requirements.append("fncache")
67 if self.ui.configbool('format', 'dotencode', True):
67 if self.ui.configbool('format', 'dotencode', True):
68 requirements.append('dotencode')
68 requirements.append('dotencode')
69 # create an invalid changelog
69 # create an invalid changelog
70 self.opener.append(
70 self.opener.append(
71 "00changelog.i",
71 "00changelog.i",
72 '\0\0\0\2' # represents revlogv2
72 '\0\0\0\2' # represents revlogv2
73 ' dummy changelog to prevent using the old repo layout'
73 ' dummy changelog to prevent using the old repo layout'
74 )
74 )
75 if self.ui.configbool('format', 'generaldelta', False):
75 if self.ui.configbool('format', 'generaldelta', False):
76 requirements.append("generaldelta")
76 requirements.append("generaldelta")
77 requirements = set(requirements)
77 requirements = set(requirements)
78 else:
78 else:
79 raise error.RepoError(_("repository %s not found") % path)
79 raise error.RepoError(_("repository %s not found") % path)
80 elif create:
80 elif create:
81 raise error.RepoError(_("repository %s already exists") % path)
81 raise error.RepoError(_("repository %s already exists") % path)
82 else:
82 else:
83 try:
83 try:
84 requirements = scmutil.readrequires(self.opener, self.supported)
84 requirements = scmutil.readrequires(self.opener, self.supported)
85 except IOError, inst:
85 except IOError, inst:
86 if inst.errno != errno.ENOENT:
86 if inst.errno != errno.ENOENT:
87 raise
87 raise
88 requirements = set()
88 requirements = set()
89
89
90 self.sharedpath = self.path
90 self.sharedpath = self.path
91 try:
91 try:
92 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
92 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
93 if not os.path.exists(s):
93 if not os.path.exists(s):
94 raise error.RepoError(
94 raise error.RepoError(
95 _('.hg/sharedpath points to nonexistent directory %s') % s)
95 _('.hg/sharedpath points to nonexistent directory %s') % s)
96 self.sharedpath = s
96 self.sharedpath = s
97 except IOError, inst:
97 except IOError, inst:
98 if inst.errno != errno.ENOENT:
98 if inst.errno != errno.ENOENT:
99 raise
99 raise
100
100
101 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
101 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
102 self.spath = self.store.path
102 self.spath = self.store.path
103 self.sopener = self.store.opener
103 self.sopener = self.store.opener
104 self.sjoin = self.store.join
104 self.sjoin = self.store.join
105 self.opener.createmode = self.store.createmode
105 self.opener.createmode = self.store.createmode
106 self._applyrequirements(requirements)
106 self._applyrequirements(requirements)
107 if create:
107 if create:
108 self._writerequirements()
108 self._writerequirements()
109
109
110
110
111 self._branchcache = None
111 self._branchcache = None
112 self._branchcachetip = None
112 self._branchcachetip = None
113 self.filterpats = {}
113 self.filterpats = {}
114 self._datafilters = {}
114 self._datafilters = {}
115 self._transref = self._lockref = self._wlockref = None
115 self._transref = self._lockref = self._wlockref = None
116
116
117 # A cache for various files under .hg/ that tracks file changes,
117 # A cache for various files under .hg/ that tracks file changes,
118 # (used by the filecache decorator)
118 # (used by the filecache decorator)
119 #
119 #
120 # Maps a property name to its util.filecacheentry
120 # Maps a property name to its util.filecacheentry
121 self._filecache = {}
121 self._filecache = {}
122
122
123 def _applyrequirements(self, requirements):
123 def _applyrequirements(self, requirements):
124 self.requirements = requirements
124 self.requirements = requirements
125 openerreqs = set(('revlogv1', 'generaldelta'))
125 openerreqs = set(('revlogv1', 'generaldelta'))
126 self.sopener.options = dict((r, 1) for r in requirements
126 self.sopener.options = dict((r, 1) for r in requirements
127 if r in openerreqs)
127 if r in openerreqs)
128
128
129 def _writerequirements(self):
129 def _writerequirements(self):
130 reqfile = self.opener("requires", "w")
130 reqfile = self.opener("requires", "w")
131 for r in self.requirements:
131 for r in self.requirements:
132 reqfile.write("%s\n" % r)
132 reqfile.write("%s\n" % r)
133 reqfile.close()
133 reqfile.close()
134
134
135 def _checknested(self, path):
135 def _checknested(self, path):
136 """Determine if path is a legal nested repository."""
136 """Determine if path is a legal nested repository."""
137 if not path.startswith(self.root):
137 if not path.startswith(self.root):
138 return False
138 return False
139 subpath = path[len(self.root) + 1:]
139 subpath = path[len(self.root) + 1:]
140 normsubpath = util.pconvert(subpath)
140 normsubpath = util.pconvert(subpath)
141
141
142 # XXX: Checking against the current working copy is wrong in
142 # XXX: Checking against the current working copy is wrong in
143 # the sense that it can reject things like
143 # the sense that it can reject things like
144 #
144 #
145 # $ hg cat -r 10 sub/x.txt
145 # $ hg cat -r 10 sub/x.txt
146 #
146 #
147 # if sub/ is no longer a subrepository in the working copy
147 # if sub/ is no longer a subrepository in the working copy
148 # parent revision.
148 # parent revision.
149 #
149 #
150 # However, it can of course also allow things that would have
150 # However, it can of course also allow things that would have
151 # been rejected before, such as the above cat command if sub/
151 # been rejected before, such as the above cat command if sub/
152 # is a subrepository now, but was a normal directory before.
152 # is a subrepository now, but was a normal directory before.
153 # The old path auditor would have rejected by mistake since it
153 # The old path auditor would have rejected by mistake since it
154 # panics when it sees sub/.hg/.
154 # panics when it sees sub/.hg/.
155 #
155 #
156 # All in all, checking against the working copy seems sensible
156 # All in all, checking against the working copy seems sensible
157 # since we want to prevent access to nested repositories on
157 # since we want to prevent access to nested repositories on
158 # the filesystem *now*.
158 # the filesystem *now*.
159 ctx = self[None]
159 ctx = self[None]
160 parts = util.splitpath(subpath)
160 parts = util.splitpath(subpath)
161 while parts:
161 while parts:
162 prefix = '/'.join(parts)
162 prefix = '/'.join(parts)
163 if prefix in ctx.substate:
163 if prefix in ctx.substate:
164 if prefix == normsubpath:
164 if prefix == normsubpath:
165 return True
165 return True
166 else:
166 else:
167 sub = ctx.sub(prefix)
167 sub = ctx.sub(prefix)
168 return sub.checknested(subpath[len(prefix) + 1:])
168 return sub.checknested(subpath[len(prefix) + 1:])
169 else:
169 else:
170 parts.pop()
170 parts.pop()
171 return False
171 return False
172
172
173 @filecache('bookmarks')
173 @filecache('bookmarks')
174 def _bookmarks(self):
174 def _bookmarks(self):
175 return bookmarks.read(self)
175 return bookmarks.read(self)
176
176
177 @filecache('bookmarks.current')
177 @filecache('bookmarks.current')
178 def _bookmarkcurrent(self):
178 def _bookmarkcurrent(self):
179 return bookmarks.readcurrent(self)
179 return bookmarks.readcurrent(self)
180
180
181 def _writebookmarks(self, marks):
181 def _writebookmarks(self, marks):
182 bookmarks.write(self)
182 bookmarks.write(self)
183
183
184 @storecache('phaseroots')
184 @storecache('phaseroots')
185 def _phaseroots(self):
185 def _phaseroots(self):
186 self._dirtyphases = False
186 self._dirtyphases = False
187 phaseroots = phases.readroots(self)
187 phaseroots = phases.readroots(self)
188 phases.filterunknown(self, phaseroots)
188 phases.filterunknown(self, phaseroots)
189 return phaseroots
189 return phaseroots
190
190
191 @propertycache
191 @propertycache
192 def _phaserev(self):
192 def _phaserev(self):
193 cache = [phases.public] * len(self)
193 cache = [phases.public] * len(self)
194 for phase in phases.trackedphases:
194 for phase in phases.trackedphases:
195 roots = map(self.changelog.rev, self._phaseroots[phase])
195 roots = map(self.changelog.rev, self._phaseroots[phase])
196 if roots:
196 if roots:
197 for rev in roots:
197 for rev in roots:
198 cache[rev] = phase
198 cache[rev] = phase
199 for rev in self.changelog.descendants(*roots):
199 for rev in self.changelog.descendants(*roots):
200 cache[rev] = phase
200 cache[rev] = phase
201 return cache
201 return cache
202
202
203 @storecache('00changelog.i')
203 @storecache('00changelog.i')
204 def changelog(self):
204 def changelog(self):
205 c = changelog.changelog(self.sopener)
205 c = changelog.changelog(self.sopener)
206 if 'HG_PENDING' in os.environ:
206 if 'HG_PENDING' in os.environ:
207 p = os.environ['HG_PENDING']
207 p = os.environ['HG_PENDING']
208 if p.startswith(self.root):
208 if p.startswith(self.root):
209 c.readpending('00changelog.i.a')
209 c.readpending('00changelog.i.a')
210 return c
210 return c
211
211
212 @storecache('00manifest.i')
212 @storecache('00manifest.i')
213 def manifest(self):
213 def manifest(self):
214 return manifest.manifest(self.sopener)
214 return manifest.manifest(self.sopener)
215
215
216 @filecache('dirstate')
216 @filecache('dirstate')
217 def dirstate(self):
217 def dirstate(self):
218 warned = [0]
218 warned = [0]
219 def validate(node):
219 def validate(node):
220 try:
220 try:
221 self.changelog.rev(node)
221 self.changelog.rev(node)
222 return node
222 return node
223 except error.LookupError:
223 except error.LookupError:
224 if not warned[0]:
224 if not warned[0]:
225 warned[0] = True
225 warned[0] = True
226 self.ui.warn(_("warning: ignoring unknown"
226 self.ui.warn(_("warning: ignoring unknown"
227 " working parent %s!\n") % short(node))
227 " working parent %s!\n") % short(node))
228 return nullid
228 return nullid
229
229
230 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
230 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
231
231
232 def __getitem__(self, changeid):
232 def __getitem__(self, changeid):
233 if changeid is None:
233 if changeid is None:
234 return context.workingctx(self)
234 return context.workingctx(self)
235 return context.changectx(self, changeid)
235 return context.changectx(self, changeid)
236
236
237 def __contains__(self, changeid):
237 def __contains__(self, changeid):
238 try:
238 try:
239 return bool(self.lookup(changeid))
239 return bool(self.lookup(changeid))
240 except error.RepoLookupError:
240 except error.RepoLookupError:
241 return False
241 return False
242
242
243 def __nonzero__(self):
243 def __nonzero__(self):
244 return True
244 return True
245
245
246 def __len__(self):
246 def __len__(self):
247 return len(self.changelog)
247 return len(self.changelog)
248
248
249 def __iter__(self):
249 def __iter__(self):
250 for i in xrange(len(self)):
250 for i in xrange(len(self)):
251 yield i
251 yield i
252
252
253 def revs(self, expr, *args):
253 def revs(self, expr, *args):
254 '''Return a list of revisions matching the given revset'''
254 '''Return a list of revisions matching the given revset'''
255 expr = revset.formatspec(expr, *args)
255 expr = revset.formatspec(expr, *args)
256 m = revset.match(None, expr)
256 m = revset.match(None, expr)
257 return [r for r in m(self, range(len(self)))]
257 return [r for r in m(self, range(len(self)))]
258
258
259 def set(self, expr, *args):
259 def set(self, expr, *args):
260 '''
260 '''
261 Yield a context for each matching revision, after doing arg
261 Yield a context for each matching revision, after doing arg
262 replacement via revset.formatspec
262 replacement via revset.formatspec
263 '''
263 '''
264 for r in self.revs(expr, *args):
264 for r in self.revs(expr, *args):
265 yield self[r]
265 yield self[r]
266
266
267 def url(self):
267 def url(self):
268 return 'file:' + self.root
268 return 'file:' + self.root
269
269
270 def hook(self, name, throw=False, **args):
270 def hook(self, name, throw=False, **args):
271 return hook.hook(self.ui, self, name, throw, **args)
271 return hook.hook(self.ui, self, name, throw, **args)
272
272
273 tag_disallowed = ':\r\n'
273 tag_disallowed = ':\r\n'
274
274
275 def _tag(self, names, node, message, local, user, date, extra={}):
275 def _tag(self, names, node, message, local, user, date, extra={}):
276 if isinstance(names, str):
276 if isinstance(names, str):
277 allchars = names
277 allchars = names
278 names = (names,)
278 names = (names,)
279 else:
279 else:
280 allchars = ''.join(names)
280 allchars = ''.join(names)
281 for c in self.tag_disallowed:
281 for c in self.tag_disallowed:
282 if c in allchars:
282 if c in allchars:
283 raise util.Abort(_('%r cannot be used in a tag name') % c)
283 raise util.Abort(_('%r cannot be used in a tag name') % c)
284
284
285 branches = self.branchmap()
285 branches = self.branchmap()
286 for name in names:
286 for name in names:
287 self.hook('pretag', throw=True, node=hex(node), tag=name,
287 self.hook('pretag', throw=True, node=hex(node), tag=name,
288 local=local)
288 local=local)
289 if name in branches:
289 if name in branches:
290 self.ui.warn(_("warning: tag %s conflicts with existing"
290 self.ui.warn(_("warning: tag %s conflicts with existing"
291 " branch name\n") % name)
291 " branch name\n") % name)
292
292
293 def writetags(fp, names, munge, prevtags):
293 def writetags(fp, names, munge, prevtags):
294 fp.seek(0, 2)
294 fp.seek(0, 2)
295 if prevtags and prevtags[-1] != '\n':
295 if prevtags and prevtags[-1] != '\n':
296 fp.write('\n')
296 fp.write('\n')
297 for name in names:
297 for name in names:
298 m = munge and munge(name) or name
298 m = munge and munge(name) or name
299 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
299 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
300 old = self.tags().get(name, nullid)
300 old = self.tags().get(name, nullid)
301 fp.write('%s %s\n' % (hex(old), m))
301 fp.write('%s %s\n' % (hex(old), m))
302 fp.write('%s %s\n' % (hex(node), m))
302 fp.write('%s %s\n' % (hex(node), m))
303 fp.close()
303 fp.close()
304
304
305 prevtags = ''
305 prevtags = ''
306 if local:
306 if local:
307 try:
307 try:
308 fp = self.opener('localtags', 'r+')
308 fp = self.opener('localtags', 'r+')
309 except IOError:
309 except IOError:
310 fp = self.opener('localtags', 'a')
310 fp = self.opener('localtags', 'a')
311 else:
311 else:
312 prevtags = fp.read()
312 prevtags = fp.read()
313
313
314 # local tags are stored in the current charset
314 # local tags are stored in the current charset
315 writetags(fp, names, None, prevtags)
315 writetags(fp, names, None, prevtags)
316 for name in names:
316 for name in names:
317 self.hook('tag', node=hex(node), tag=name, local=local)
317 self.hook('tag', node=hex(node), tag=name, local=local)
318 return
318 return
319
319
320 try:
320 try:
321 fp = self.wfile('.hgtags', 'rb+')
321 fp = self.wfile('.hgtags', 'rb+')
322 except IOError, e:
322 except IOError, e:
323 if e.errno != errno.ENOENT:
323 if e.errno != errno.ENOENT:
324 raise
324 raise
325 fp = self.wfile('.hgtags', 'ab')
325 fp = self.wfile('.hgtags', 'ab')
326 else:
326 else:
327 prevtags = fp.read()
327 prevtags = fp.read()
328
328
329 # committed tags are stored in UTF-8
329 # committed tags are stored in UTF-8
330 writetags(fp, names, encoding.fromlocal, prevtags)
330 writetags(fp, names, encoding.fromlocal, prevtags)
331
331
332 fp.close()
332 fp.close()
333
333
334 self.invalidatecaches()
334 self.invalidatecaches()
335
335
336 if '.hgtags' not in self.dirstate:
336 if '.hgtags' not in self.dirstate:
337 self[None].add(['.hgtags'])
337 self[None].add(['.hgtags'])
338
338
339 m = matchmod.exact(self.root, '', ['.hgtags'])
339 m = matchmod.exact(self.root, '', ['.hgtags'])
340 tagnode = self.commit(message, user, date, extra=extra, match=m)
340 tagnode = self.commit(message, user, date, extra=extra, match=m)
341
341
342 for name in names:
342 for name in names:
343 self.hook('tag', node=hex(node), tag=name, local=local)
343 self.hook('tag', node=hex(node), tag=name, local=local)
344
344
345 return tagnode
345 return tagnode
346
346
347 def tag(self, names, node, message, local, user, date):
347 def tag(self, names, node, message, local, user, date):
348 '''tag a revision with one or more symbolic names.
348 '''tag a revision with one or more symbolic names.
349
349
350 names is a list of strings or, when adding a single tag, names may be a
350 names is a list of strings or, when adding a single tag, names may be a
351 string.
351 string.
352
352
353 if local is True, the tags are stored in a per-repository file.
353 if local is True, the tags are stored in a per-repository file.
354 otherwise, they are stored in the .hgtags file, and a new
354 otherwise, they are stored in the .hgtags file, and a new
355 changeset is committed with the change.
355 changeset is committed with the change.
356
356
357 keyword arguments:
357 keyword arguments:
358
358
359 local: whether to store tags in non-version-controlled file
359 local: whether to store tags in non-version-controlled file
360 (default False)
360 (default False)
361
361
362 message: commit message to use if committing
362 message: commit message to use if committing
363
363
364 user: name of user to use if committing
364 user: name of user to use if committing
365
365
366 date: date tuple to use if committing'''
366 date: date tuple to use if committing'''
367
367
368 if not local:
368 if not local:
369 for x in self.status()[:5]:
369 for x in self.status()[:5]:
370 if '.hgtags' in x:
370 if '.hgtags' in x:
371 raise util.Abort(_('working copy of .hgtags is changed '
371 raise util.Abort(_('working copy of .hgtags is changed '
372 '(please commit .hgtags manually)'))
372 '(please commit .hgtags manually)'))
373
373
374 self.tags() # instantiate the cache
374 self.tags() # instantiate the cache
375 self._tag(names, node, message, local, user, date)
375 self._tag(names, node, message, local, user, date)
376
376
377 @propertycache
377 @propertycache
378 def _tagscache(self):
378 def _tagscache(self):
379 '''Returns a tagscache object that contains various tags related caches.'''
379 '''Returns a tagscache object that contains various tags related caches.'''
380
380
381 # This simplifies its cache management by having one decorated
381 # This simplifies its cache management by having one decorated
382 # function (this one) and the rest simply fetch things from it.
382 # function (this one) and the rest simply fetch things from it.
383 class tagscache(object):
383 class tagscache(object):
384 def __init__(self):
384 def __init__(self):
385 # These two define the set of tags for this repository. tags
385 # These two define the set of tags for this repository. tags
386 # maps tag name to node; tagtypes maps tag name to 'global' or
386 # maps tag name to node; tagtypes maps tag name to 'global' or
387 # 'local'. (Global tags are defined by .hgtags across all
387 # 'local'. (Global tags are defined by .hgtags across all
388 # heads, and local tags are defined in .hg/localtags.)
388 # heads, and local tags are defined in .hg/localtags.)
389 # They constitute the in-memory cache of tags.
389 # They constitute the in-memory cache of tags.
390 self.tags = self.tagtypes = None
390 self.tags = self.tagtypes = None
391
391
392 self.nodetagscache = self.tagslist = None
392 self.nodetagscache = self.tagslist = None
393
393
394 cache = tagscache()
394 cache = tagscache()
395 cache.tags, cache.tagtypes = self._findtags()
395 cache.tags, cache.tagtypes = self._findtags()
396
396
397 return cache
397 return cache
398
398
399 def tags(self):
399 def tags(self):
400 '''return a mapping of tag to node'''
400 '''return a mapping of tag to node'''
401 t = {}
401 t = {}
402 for k, v in self._tagscache.tags.iteritems():
402 for k, v in self._tagscache.tags.iteritems():
403 try:
403 try:
404 # ignore tags to unknown nodes
404 # ignore tags to unknown nodes
405 self.changelog.rev(v)
405 self.changelog.rev(v)
406 t[k] = v
406 t[k] = v
407 except (error.LookupError, ValueError):
407 except (error.LookupError, ValueError):
408 pass
408 pass
409 return t
409 return t
410
410
411 def _findtags(self):
411 def _findtags(self):
412 '''Do the hard work of finding tags. Return a pair of dicts
412 '''Do the hard work of finding tags. Return a pair of dicts
413 (tags, tagtypes) where tags maps tag name to node, and tagtypes
413 (tags, tagtypes) where tags maps tag name to node, and tagtypes
414 maps tag name to a string like \'global\' or \'local\'.
414 maps tag name to a string like \'global\' or \'local\'.
415 Subclasses or extensions are free to add their own tags, but
415 Subclasses or extensions are free to add their own tags, but
416 should be aware that the returned dicts will be retained for the
416 should be aware that the returned dicts will be retained for the
417 duration of the localrepo object.'''
417 duration of the localrepo object.'''
418
418
419 # XXX what tagtype should subclasses/extensions use? Currently
419 # XXX what tagtype should subclasses/extensions use? Currently
420 # mq and bookmarks add tags, but do not set the tagtype at all.
420 # mq and bookmarks add tags, but do not set the tagtype at all.
421 # Should each extension invent its own tag type? Should there
421 # Should each extension invent its own tag type? Should there
422 # be one tagtype for all such "virtual" tags? Or is the status
422 # be one tagtype for all such "virtual" tags? Or is the status
423 # quo fine?
423 # quo fine?
424
424
425 alltags = {} # map tag name to (node, hist)
425 alltags = {} # map tag name to (node, hist)
426 tagtypes = {}
426 tagtypes = {}
427
427
428 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
428 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
429 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
429 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
430
430
431 # Build the return dicts. Have to re-encode tag names because
431 # Build the return dicts. Have to re-encode tag names because
432 # the tags module always uses UTF-8 (in order not to lose info
432 # the tags module always uses UTF-8 (in order not to lose info
433 # writing to the cache), but the rest of Mercurial wants them in
433 # writing to the cache), but the rest of Mercurial wants them in
434 # local encoding.
434 # local encoding.
435 tags = {}
435 tags = {}
436 for (name, (node, hist)) in alltags.iteritems():
436 for (name, (node, hist)) in alltags.iteritems():
437 if node != nullid:
437 if node != nullid:
438 tags[encoding.tolocal(name)] = node
438 tags[encoding.tolocal(name)] = node
439 tags['tip'] = self.changelog.tip()
439 tags['tip'] = self.changelog.tip()
440 tagtypes = dict([(encoding.tolocal(name), value)
440 tagtypes = dict([(encoding.tolocal(name), value)
441 for (name, value) in tagtypes.iteritems()])
441 for (name, value) in tagtypes.iteritems()])
442 return (tags, tagtypes)
442 return (tags, tagtypes)
443
443
444 def tagtype(self, tagname):
444 def tagtype(self, tagname):
445 '''
445 '''
446 return the type of the given tag. result can be:
446 return the type of the given tag. result can be:
447
447
448 'local' : a local tag
448 'local' : a local tag
449 'global' : a global tag
449 'global' : a global tag
450 None : tag does not exist
450 None : tag does not exist
451 '''
451 '''
452
452
453 return self._tagscache.tagtypes.get(tagname)
453 return self._tagscache.tagtypes.get(tagname)
454
454
455 def tagslist(self):
455 def tagslist(self):
456 '''return a list of tags ordered by revision'''
456 '''return a list of tags ordered by revision'''
457 if not self._tagscache.tagslist:
457 if not self._tagscache.tagslist:
458 l = []
458 l = []
459 for t, n in self.tags().iteritems():
459 for t, n in self.tags().iteritems():
460 r = self.changelog.rev(n)
460 r = self.changelog.rev(n)
461 l.append((r, t, n))
461 l.append((r, t, n))
462 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
462 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
463
463
464 return self._tagscache.tagslist
464 return self._tagscache.tagslist
465
465
466 def nodetags(self, node):
466 def nodetags(self, node):
467 '''return the tags associated with a node'''
467 '''return the tags associated with a node'''
468 if not self._tagscache.nodetagscache:
468 if not self._tagscache.nodetagscache:
469 nodetagscache = {}
469 nodetagscache = {}
470 for t, n in self._tagscache.tags.iteritems():
470 for t, n in self._tagscache.tags.iteritems():
471 nodetagscache.setdefault(n, []).append(t)
471 nodetagscache.setdefault(n, []).append(t)
472 for tags in nodetagscache.itervalues():
472 for tags in nodetagscache.itervalues():
473 tags.sort()
473 tags.sort()
474 self._tagscache.nodetagscache = nodetagscache
474 self._tagscache.nodetagscache = nodetagscache
475 return self._tagscache.nodetagscache.get(node, [])
475 return self._tagscache.nodetagscache.get(node, [])
476
476
477 def nodebookmarks(self, node):
477 def nodebookmarks(self, node):
478 marks = []
478 marks = []
479 for bookmark, n in self._bookmarks.iteritems():
479 for bookmark, n in self._bookmarks.iteritems():
480 if n == node:
480 if n == node:
481 marks.append(bookmark)
481 marks.append(bookmark)
482 return sorted(marks)
482 return sorted(marks)
483
483
484 def _branchtags(self, partial, lrev):
484 def _branchtags(self, partial, lrev):
485 # TODO: rename this function?
485 # TODO: rename this function?
486 tiprev = len(self) - 1
486 tiprev = len(self) - 1
487 if lrev != tiprev:
487 if lrev != tiprev:
488 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
488 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
489 self._updatebranchcache(partial, ctxgen)
489 self._updatebranchcache(partial, ctxgen)
490 self._writebranchcache(partial, self.changelog.tip(), tiprev)
490 self._writebranchcache(partial, self.changelog.tip(), tiprev)
491
491
492 return partial
492 return partial
493
493
494 def updatebranchcache(self):
494 def updatebranchcache(self):
495 tip = self.changelog.tip()
495 tip = self.changelog.tip()
496 if self._branchcache is not None and self._branchcachetip == tip:
496 if self._branchcache is not None and self._branchcachetip == tip:
497 return
497 return
498
498
499 oldtip = self._branchcachetip
499 oldtip = self._branchcachetip
500 self._branchcachetip = tip
500 self._branchcachetip = tip
501 if oldtip is None or oldtip not in self.changelog.nodemap:
501 if oldtip is None or oldtip not in self.changelog.nodemap:
502 partial, last, lrev = self._readbranchcache()
502 partial, last, lrev = self._readbranchcache()
503 else:
503 else:
504 lrev = self.changelog.rev(oldtip)
504 lrev = self.changelog.rev(oldtip)
505 partial = self._branchcache
505 partial = self._branchcache
506
506
507 self._branchtags(partial, lrev)
507 self._branchtags(partial, lrev)
508 # this private cache holds all heads (not just tips)
508 # this private cache holds all heads (not just tips)
509 self._branchcache = partial
509 self._branchcache = partial
510
510
511 def branchmap(self):
511 def branchmap(self):
512 '''returns a dictionary {branch: [branchheads]}'''
512 '''returns a dictionary {branch: [branchheads]}'''
513 self.updatebranchcache()
513 self.updatebranchcache()
514 return self._branchcache
514 return self._branchcache
515
515
516 def branchtags(self):
516 def branchtags(self):
517 '''return a dict where branch names map to the tipmost head of
517 '''return a dict where branch names map to the tipmost head of
518 the branch, open heads come before closed'''
518 the branch, open heads come before closed'''
519 bt = {}
519 bt = {}
520 for bn, heads in self.branchmap().iteritems():
520 for bn, heads in self.branchmap().iteritems():
521 tip = heads[-1]
521 tip = heads[-1]
522 for h in reversed(heads):
522 for h in reversed(heads):
523 if 'close' not in self.changelog.read(h)[5]:
523 if 'close' not in self.changelog.read(h)[5]:
524 tip = h
524 tip = h
525 break
525 break
526 bt[bn] = tip
526 bt[bn] = tip
527 return bt
527 return bt
528
528
529 def _readbranchcache(self):
529 def _readbranchcache(self):
530 partial = {}
530 partial = {}
531 try:
531 try:
532 f = self.opener("cache/branchheads")
532 f = self.opener("cache/branchheads")
533 lines = f.read().split('\n')
533 lines = f.read().split('\n')
534 f.close()
534 f.close()
535 except (IOError, OSError):
535 except (IOError, OSError):
536 return {}, nullid, nullrev
536 return {}, nullid, nullrev
537
537
538 try:
538 try:
539 last, lrev = lines.pop(0).split(" ", 1)
539 last, lrev = lines.pop(0).split(" ", 1)
540 last, lrev = bin(last), int(lrev)
540 last, lrev = bin(last), int(lrev)
541 if lrev >= len(self) or self[lrev].node() != last:
541 if lrev >= len(self) or self[lrev].node() != last:
542 # invalidate the cache
542 # invalidate the cache
543 raise ValueError('invalidating branch cache (tip differs)')
543 raise ValueError('invalidating branch cache (tip differs)')
544 for l in lines:
544 for l in lines:
545 if not l:
545 if not l:
546 continue
546 continue
547 node, label = l.split(" ", 1)
547 node, label = l.split(" ", 1)
548 label = encoding.tolocal(label.strip())
548 label = encoding.tolocal(label.strip())
549 partial.setdefault(label, []).append(bin(node))
549 partial.setdefault(label, []).append(bin(node))
550 except KeyboardInterrupt:
550 except KeyboardInterrupt:
551 raise
551 raise
552 except Exception, inst:
552 except Exception, inst:
553 if self.ui.debugflag:
553 if self.ui.debugflag:
554 self.ui.warn(str(inst), '\n')
554 self.ui.warn(str(inst), '\n')
555 partial, last, lrev = {}, nullid, nullrev
555 partial, last, lrev = {}, nullid, nullrev
556 return partial, last, lrev
556 return partial, last, lrev
557
557
558 def _writebranchcache(self, branches, tip, tiprev):
558 def _writebranchcache(self, branches, tip, tiprev):
559 try:
559 try:
560 f = self.opener("cache/branchheads", "w", atomictemp=True)
560 f = self.opener("cache/branchheads", "w", atomictemp=True)
561 f.write("%s %s\n" % (hex(tip), tiprev))
561 f.write("%s %s\n" % (hex(tip), tiprev))
562 for label, nodes in branches.iteritems():
562 for label, nodes in branches.iteritems():
563 for node in nodes:
563 for node in nodes:
564 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
564 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
565 f.close()
565 f.close()
566 except (IOError, OSError):
566 except (IOError, OSError):
567 pass
567 pass
568
568
569 def _updatebranchcache(self, partial, ctxgen):
569 def _updatebranchcache(self, partial, ctxgen):
570 # collect new branch entries
570 # collect new branch entries
571 newbranches = {}
571 newbranches = {}
572 for c in ctxgen:
572 for c in ctxgen:
573 newbranches.setdefault(c.branch(), []).append(c.node())
573 newbranches.setdefault(c.branch(), []).append(c.node())
574 # if older branchheads are reachable from new ones, they aren't
574 # if older branchheads are reachable from new ones, they aren't
575 # really branchheads. Note checking parents is insufficient:
575 # really branchheads. Note checking parents is insufficient:
576 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
576 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
577 for branch, newnodes in newbranches.iteritems():
577 for branch, newnodes in newbranches.iteritems():
578 bheads = partial.setdefault(branch, [])
578 bheads = partial.setdefault(branch, [])
579 bheads.extend(newnodes)
579 bheads.extend(newnodes)
580 if len(bheads) <= 1:
580 if len(bheads) <= 1:
581 continue
581 continue
582 bheads = sorted(bheads, key=lambda x: self[x].rev())
582 bheads = sorted(bheads, key=lambda x: self[x].rev())
583 # starting from tip means fewer passes over reachable
583 # starting from tip means fewer passes over reachable
584 while newnodes:
584 while newnodes:
585 latest = newnodes.pop()
585 latest = newnodes.pop()
586 if latest not in bheads:
586 if latest not in bheads:
587 continue
587 continue
588 minbhrev = self[bheads[0]].node()
588 minbhrev = self[bheads[0]].node()
589 reachable = self.changelog.reachable(latest, minbhrev)
589 reachable = self.changelog.reachable(latest, minbhrev)
590 reachable.remove(latest)
590 reachable.remove(latest)
591 if reachable:
591 if reachable:
592 bheads = [b for b in bheads if b not in reachable]
592 bheads = [b for b in bheads if b not in reachable]
593 partial[branch] = bheads
593 partial[branch] = bheads
594
594
595 def lookup(self, key):
595 def lookup(self, key):
596 return self[key].node()
596 return self[key].node()
597
597
598 def lookupbranch(self, key, remote=None):
598 def lookupbranch(self, key, remote=None):
599 repo = remote or self
599 repo = remote or self
600 if key in repo.branchmap():
600 if key in repo.branchmap():
601 return key
601 return key
602
602
603 repo = (remote and remote.local()) and remote or self
603 repo = (remote and remote.local()) and remote or self
604 return repo[key].branch()
604 return repo[key].branch()
605
605
606 def known(self, nodes):
606 def known(self, nodes):
607 nm = self.changelog.nodemap
607 nm = self.changelog.nodemap
608 result = []
608 result = []
609 for n in nodes:
609 for n in nodes:
610 r = nm.get(n)
610 r = nm.get(n)
611 resp = not (r is None or self._phaserev[r] >= phases.secret)
611 resp = not (r is None or self._phaserev[r] >= phases.secret)
612 result.append(resp)
612 result.append(resp)
613 return result
613 return result
614
614
615 def local(self):
615 def local(self):
616 return self
616 return self
617
617
618 def join(self, f):
618 def join(self, f):
619 return os.path.join(self.path, f)
619 return os.path.join(self.path, f)
620
620
621 def wjoin(self, f):
621 def wjoin(self, f):
622 return os.path.join(self.root, f)
622 return os.path.join(self.root, f)
623
623
624 def file(self, f):
624 def file(self, f):
625 if f[0] == '/':
625 if f[0] == '/':
626 f = f[1:]
626 f = f[1:]
627 return filelog.filelog(self.sopener, f)
627 return filelog.filelog(self.sopener, f)
628
628
629 def changectx(self, changeid):
629 def changectx(self, changeid):
630 return self[changeid]
630 return self[changeid]
631
631
632 def parents(self, changeid=None):
632 def parents(self, changeid=None):
633 '''get list of changectxs for parents of changeid'''
633 '''get list of changectxs for parents of changeid'''
634 return self[changeid].parents()
634 return self[changeid].parents()
635
635
636 def setparents(self, p1, p2=nullid):
636 def setparents(self, p1, p2=nullid):
637 copies = self.dirstate.setparents(p1, p2)
637 copies = self.dirstate.setparents(p1, p2)
638 if copies:
638 if copies:
639 # Adjust copy records, the dirstate cannot do it, it
639 # Adjust copy records, the dirstate cannot do it, it
640 # requires access to parents manifests. Preserve them
640 # requires access to parents manifests. Preserve them
641 # only for entries added to first parent.
641 # only for entries added to first parent.
642 pctx = self[p1]
642 pctx = self[p1]
643 for f in copies:
643 for f in copies:
644 if f not in pctx and copies[f] in pctx:
644 if f not in pctx and copies[f] in pctx:
645 self.dirstate.copy(copies[f], f)
645 self.dirstate.copy(copies[f], f)
646
646
647 def filectx(self, path, changeid=None, fileid=None):
647 def filectx(self, path, changeid=None, fileid=None):
648 """changeid can be a changeset revision, node, or tag.
648 """changeid can be a changeset revision, node, or tag.
649 fileid can be a file revision or node."""
649 fileid can be a file revision or node."""
650 return context.filectx(self, path, changeid, fileid)
650 return context.filectx(self, path, changeid, fileid)
651
651
652 def getcwd(self):
652 def getcwd(self):
653 return self.dirstate.getcwd()
653 return self.dirstate.getcwd()
654
654
655 def pathto(self, f, cwd=None):
655 def pathto(self, f, cwd=None):
656 return self.dirstate.pathto(f, cwd)
656 return self.dirstate.pathto(f, cwd)
657
657
658 def wfile(self, f, mode='r'):
658 def wfile(self, f, mode='r'):
659 return self.wopener(f, mode)
659 return self.wopener(f, mode)
660
660
661 def _link(self, f):
661 def _link(self, f):
662 return os.path.islink(self.wjoin(f))
662 return os.path.islink(self.wjoin(f))
663
663
664 def _loadfilter(self, filter):
664 def _loadfilter(self, filter):
665 if filter not in self.filterpats:
665 if filter not in self.filterpats:
666 l = []
666 l = []
667 for pat, cmd in self.ui.configitems(filter):
667 for pat, cmd in self.ui.configitems(filter):
668 if cmd == '!':
668 if cmd == '!':
669 continue
669 continue
670 mf = matchmod.match(self.root, '', [pat])
670 mf = matchmod.match(self.root, '', [pat])
671 fn = None
671 fn = None
672 params = cmd
672 params = cmd
673 for name, filterfn in self._datafilters.iteritems():
673 for name, filterfn in self._datafilters.iteritems():
674 if cmd.startswith(name):
674 if cmd.startswith(name):
675 fn = filterfn
675 fn = filterfn
676 params = cmd[len(name):].lstrip()
676 params = cmd[len(name):].lstrip()
677 break
677 break
678 if not fn:
678 if not fn:
679 fn = lambda s, c, **kwargs: util.filter(s, c)
679 fn = lambda s, c, **kwargs: util.filter(s, c)
680 # Wrap old filters not supporting keyword arguments
680 # Wrap old filters not supporting keyword arguments
681 if not inspect.getargspec(fn)[2]:
681 if not inspect.getargspec(fn)[2]:
682 oldfn = fn
682 oldfn = fn
683 fn = lambda s, c, **kwargs: oldfn(s, c)
683 fn = lambda s, c, **kwargs: oldfn(s, c)
684 l.append((mf, fn, params))
684 l.append((mf, fn, params))
685 self.filterpats[filter] = l
685 self.filterpats[filter] = l
686 return self.filterpats[filter]
686 return self.filterpats[filter]
687
687
688 def _filter(self, filterpats, filename, data):
688 def _filter(self, filterpats, filename, data):
689 for mf, fn, cmd in filterpats:
689 for mf, fn, cmd in filterpats:
690 if mf(filename):
690 if mf(filename):
691 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
691 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
692 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
692 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
693 break
693 break
694
694
695 return data
695 return data
696
696
697 @propertycache
697 @propertycache
698 def _encodefilterpats(self):
698 def _encodefilterpats(self):
699 return self._loadfilter('encode')
699 return self._loadfilter('encode')
700
700
701 @propertycache
701 @propertycache
702 def _decodefilterpats(self):
702 def _decodefilterpats(self):
703 return self._loadfilter('decode')
703 return self._loadfilter('decode')
704
704
705 def adddatafilter(self, name, filter):
705 def adddatafilter(self, name, filter):
706 self._datafilters[name] = filter
706 self._datafilters[name] = filter
707
707
708 def wread(self, filename):
708 def wread(self, filename):
709 if self._link(filename):
709 if self._link(filename):
710 data = os.readlink(self.wjoin(filename))
710 data = os.readlink(self.wjoin(filename))
711 else:
711 else:
712 data = self.wopener.read(filename)
712 data = self.wopener.read(filename)
713 return self._filter(self._encodefilterpats, filename, data)
713 return self._filter(self._encodefilterpats, filename, data)
714
714
715 def wwrite(self, filename, data, flags):
715 def wwrite(self, filename, data, flags):
716 data = self._filter(self._decodefilterpats, filename, data)
716 data = self._filter(self._decodefilterpats, filename, data)
717 if 'l' in flags:
717 if 'l' in flags:
718 self.wopener.symlink(data, filename)
718 self.wopener.symlink(data, filename)
719 else:
719 else:
720 self.wopener.write(filename, data)
720 self.wopener.write(filename, data)
721 if 'x' in flags:
721 if 'x' in flags:
722 util.setflags(self.wjoin(filename), False, True)
722 util.setflags(self.wjoin(filename), False, True)
723
723
724 def wwritedata(self, filename, data):
724 def wwritedata(self, filename, data):
725 return self._filter(self._decodefilterpats, filename, data)
725 return self._filter(self._decodefilterpats, filename, data)
726
726
727 def transaction(self, desc):
727 def transaction(self, desc):
728 tr = self._transref and self._transref() or None
728 tr = self._transref and self._transref() or None
729 if tr and tr.running():
729 if tr and tr.running():
730 return tr.nest()
730 return tr.nest()
731
731
732 # abort here if the journal already exists
732 # abort here if the journal already exists
733 if os.path.exists(self.sjoin("journal")):
733 if os.path.exists(self.sjoin("journal")):
734 raise error.RepoError(
734 raise error.RepoError(
735 _("abandoned transaction found - run hg recover"))
735 _("abandoned transaction found - run hg recover"))
736
736
737 self._writejournal(desc)
737 self._writejournal(desc)
738 renames = [(x, undoname(x)) for x in self._journalfiles()]
738 renames = [(x, undoname(x)) for x in self._journalfiles()]
739
739
740 tr = transaction.transaction(self.ui.warn, self.sopener,
740 tr = transaction.transaction(self.ui.warn, self.sopener,
741 self.sjoin("journal"),
741 self.sjoin("journal"),
742 aftertrans(renames),
742 aftertrans(renames),
743 self.store.createmode)
743 self.store.createmode)
744 self._transref = weakref.ref(tr)
744 self._transref = weakref.ref(tr)
745 return tr
745 return tr
746
746
747 def _journalfiles(self):
747 def _journalfiles(self):
748 return (self.sjoin('journal'), self.join('journal.dirstate'),
748 return (self.sjoin('journal'), self.join('journal.dirstate'),
749 self.join('journal.branch'), self.join('journal.desc'),
749 self.join('journal.branch'), self.join('journal.desc'),
750 self.join('journal.bookmarks'),
750 self.join('journal.bookmarks'),
751 self.sjoin('journal.phaseroots'))
751 self.sjoin('journal.phaseroots'))
752
752
753 def undofiles(self):
753 def undofiles(self):
754 return [undoname(x) for x in self._journalfiles()]
754 return [undoname(x) for x in self._journalfiles()]
755
755
756 def _writejournal(self, desc):
756 def _writejournal(self, desc):
757 self.opener.write("journal.dirstate",
757 self.opener.write("journal.dirstate",
758 self.opener.tryread("dirstate"))
758 self.opener.tryread("dirstate"))
759 self.opener.write("journal.branch",
759 self.opener.write("journal.branch",
760 encoding.fromlocal(self.dirstate.branch()))
760 encoding.fromlocal(self.dirstate.branch()))
761 self.opener.write("journal.desc",
761 self.opener.write("journal.desc",
762 "%d\n%s\n" % (len(self), desc))
762 "%d\n%s\n" % (len(self), desc))
763 self.opener.write("journal.bookmarks",
763 self.opener.write("journal.bookmarks",
764 self.opener.tryread("bookmarks"))
764 self.opener.tryread("bookmarks"))
765 self.sopener.write("journal.phaseroots",
765 self.sopener.write("journal.phaseroots",
766 self.sopener.tryread("phaseroots"))
766 self.sopener.tryread("phaseroots"))
767
767
768 def recover(self):
768 def recover(self):
769 lock = self.lock()
769 lock = self.lock()
770 try:
770 try:
771 if os.path.exists(self.sjoin("journal")):
771 if os.path.exists(self.sjoin("journal")):
772 self.ui.status(_("rolling back interrupted transaction\n"))
772 self.ui.status(_("rolling back interrupted transaction\n"))
773 transaction.rollback(self.sopener, self.sjoin("journal"),
773 transaction.rollback(self.sopener, self.sjoin("journal"),
774 self.ui.warn)
774 self.ui.warn)
775 self.invalidate()
775 self.invalidate()
776 return True
776 return True
777 else:
777 else:
778 self.ui.warn(_("no interrupted transaction available\n"))
778 self.ui.warn(_("no interrupted transaction available\n"))
779 return False
779 return False
780 finally:
780 finally:
781 lock.release()
781 lock.release()
782
782
783 def rollback(self, dryrun=False, force=False):
783 def rollback(self, dryrun=False, force=False):
784 wlock = lock = None
784 wlock = lock = None
785 try:
785 try:
786 wlock = self.wlock()
786 wlock = self.wlock()
787 lock = self.lock()
787 lock = self.lock()
788 if os.path.exists(self.sjoin("undo")):
788 if os.path.exists(self.sjoin("undo")):
789 return self._rollback(dryrun, force)
789 return self._rollback(dryrun, force)
790 else:
790 else:
791 self.ui.warn(_("no rollback information available\n"))
791 self.ui.warn(_("no rollback information available\n"))
792 return 1
792 return 1
793 finally:
793 finally:
794 release(lock, wlock)
794 release(lock, wlock)
795
795
796 def _rollback(self, dryrun, force):
796 def _rollback(self, dryrun, force):
797 ui = self.ui
797 ui = self.ui
798 try:
798 try:
799 args = self.opener.read('undo.desc').splitlines()
799 args = self.opener.read('undo.desc').splitlines()
800 (oldlen, desc, detail) = (int(args[0]), args[1], None)
800 (oldlen, desc, detail) = (int(args[0]), args[1], None)
801 if len(args) >= 3:
801 if len(args) >= 3:
802 detail = args[2]
802 detail = args[2]
803 oldtip = oldlen - 1
803 oldtip = oldlen - 1
804
804
805 if detail and ui.verbose:
805 if detail and ui.verbose:
806 msg = (_('repository tip rolled back to revision %s'
806 msg = (_('repository tip rolled back to revision %s'
807 ' (undo %s: %s)\n')
807 ' (undo %s: %s)\n')
808 % (oldtip, desc, detail))
808 % (oldtip, desc, detail))
809 else:
809 else:
810 msg = (_('repository tip rolled back to revision %s'
810 msg = (_('repository tip rolled back to revision %s'
811 ' (undo %s)\n')
811 ' (undo %s)\n')
812 % (oldtip, desc))
812 % (oldtip, desc))
813 except IOError:
813 except IOError:
814 msg = _('rolling back unknown transaction\n')
814 msg = _('rolling back unknown transaction\n')
815 desc = None
815 desc = None
816
816
817 if not force and self['.'] != self['tip'] and desc == 'commit':
817 if not force and self['.'] != self['tip'] and desc == 'commit':
818 raise util.Abort(
818 raise util.Abort(
819 _('rollback of last commit while not checked out '
819 _('rollback of last commit while not checked out '
820 'may lose data'), hint=_('use -f to force'))
820 'may lose data'), hint=_('use -f to force'))
821
821
822 ui.status(msg)
822 ui.status(msg)
823 if dryrun:
823 if dryrun:
824 return 0
824 return 0
825
825
826 parents = self.dirstate.parents()
826 parents = self.dirstate.parents()
827 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
827 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
828 if os.path.exists(self.join('undo.bookmarks')):
828 if os.path.exists(self.join('undo.bookmarks')):
829 util.rename(self.join('undo.bookmarks'),
829 util.rename(self.join('undo.bookmarks'),
830 self.join('bookmarks'))
830 self.join('bookmarks'))
831 if os.path.exists(self.sjoin('undo.phaseroots')):
831 if os.path.exists(self.sjoin('undo.phaseroots')):
832 util.rename(self.sjoin('undo.phaseroots'),
832 util.rename(self.sjoin('undo.phaseroots'),
833 self.sjoin('phaseroots'))
833 self.sjoin('phaseroots'))
834 self.invalidate()
834 self.invalidate()
835
835
836 parentgone = (parents[0] not in self.changelog.nodemap or
836 parentgone = (parents[0] not in self.changelog.nodemap or
837 parents[1] not in self.changelog.nodemap)
837 parents[1] not in self.changelog.nodemap)
838 if parentgone:
838 if parentgone:
839 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
839 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
840 try:
840 try:
841 branch = self.opener.read('undo.branch')
841 branch = self.opener.read('undo.branch')
842 self.dirstate.setbranch(branch)
842 self.dirstate.setbranch(branch)
843 except IOError:
843 except IOError:
844 ui.warn(_('named branch could not be reset: '
844 ui.warn(_('named branch could not be reset: '
845 'current branch is still \'%s\'\n')
845 'current branch is still \'%s\'\n')
846 % self.dirstate.branch())
846 % self.dirstate.branch())
847
847
848 self.dirstate.invalidate()
848 self.dirstate.invalidate()
849 parents = tuple([p.rev() for p in self.parents()])
849 parents = tuple([p.rev() for p in self.parents()])
850 if len(parents) > 1:
850 if len(parents) > 1:
851 ui.status(_('working directory now based on '
851 ui.status(_('working directory now based on '
852 'revisions %d and %d\n') % parents)
852 'revisions %d and %d\n') % parents)
853 else:
853 else:
854 ui.status(_('working directory now based on '
854 ui.status(_('working directory now based on '
855 'revision %d\n') % parents)
855 'revision %d\n') % parents)
856 self.destroyed()
856 self.destroyed()
857 return 0
857 return 0
858
858
859 def invalidatecaches(self):
859 def invalidatecaches(self):
860 def delcache(name):
860 def delcache(name):
861 try:
861 try:
862 delattr(self, name)
862 delattr(self, name)
863 except AttributeError:
863 except AttributeError:
864 pass
864 pass
865
865
866 delcache('_tagscache')
866 delcache('_tagscache')
867 delcache('_phaserev')
867 delcache('_phaserev')
868
868
869 self._branchcache = None # in UTF-8
869 self._branchcache = None # in UTF-8
870 self._branchcachetip = None
870 self._branchcachetip = None
871
871
872 def invalidatedirstate(self):
872 def invalidatedirstate(self):
873 '''Invalidates the dirstate, causing the next call to dirstate
873 '''Invalidates the dirstate, causing the next call to dirstate
874 to check if it was modified since the last time it was read,
874 to check if it was modified since the last time it was read,
875 rereading it if it has.
875 rereading it if it has.
876
876
877 This is different to dirstate.invalidate() that it doesn't always
877 This is different to dirstate.invalidate() that it doesn't always
878 rereads the dirstate. Use dirstate.invalidate() if you want to
878 rereads the dirstate. Use dirstate.invalidate() if you want to
879 explicitly read the dirstate again (i.e. restoring it to a previous
879 explicitly read the dirstate again (i.e. restoring it to a previous
880 known good state).'''
880 known good state).'''
881 if 'dirstate' in self.__dict__:
881 if 'dirstate' in self.__dict__:
882 for k in self.dirstate._filecache:
882 for k in self.dirstate._filecache:
883 try:
883 try:
884 delattr(self.dirstate, k)
884 delattr(self.dirstate, k)
885 except AttributeError:
885 except AttributeError:
886 pass
886 pass
887 delattr(self, 'dirstate')
887 delattr(self, 'dirstate')
888
888
889 def invalidate(self):
889 def invalidate(self):
890 for k in self._filecache:
890 for k in self._filecache:
891 # dirstate is invalidated separately in invalidatedirstate()
891 # dirstate is invalidated separately in invalidatedirstate()
892 if k == 'dirstate':
892 if k == 'dirstate':
893 continue
893 continue
894
894
895 try:
895 try:
896 delattr(self, k)
896 delattr(self, k)
897 except AttributeError:
897 except AttributeError:
898 pass
898 pass
899 self.invalidatecaches()
899 self.invalidatecaches()
900
900
901 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
901 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
902 try:
902 try:
903 l = lock.lock(lockname, 0, releasefn, desc=desc)
903 l = lock.lock(lockname, 0, releasefn, desc=desc)
904 except error.LockHeld, inst:
904 except error.LockHeld, inst:
905 if not wait:
905 if not wait:
906 raise
906 raise
907 self.ui.warn(_("waiting for lock on %s held by %r\n") %
907 self.ui.warn(_("waiting for lock on %s held by %r\n") %
908 (desc, inst.locker))
908 (desc, inst.locker))
909 # default to 600 seconds timeout
909 # default to 600 seconds timeout
910 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
910 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
911 releasefn, desc=desc)
911 releasefn, desc=desc)
912 if acquirefn:
912 if acquirefn:
913 acquirefn()
913 acquirefn()
914 return l
914 return l
915
915
916 def _afterlock(self, callback):
916 def _afterlock(self, callback):
917 """add a callback to the current repository lock.
917 """add a callback to the current repository lock.
918
918
919 The callback will be executed on lock release."""
919 The callback will be executed on lock release."""
920 l = self._lockref and self._lockref()
920 l = self._lockref and self._lockref()
921 if l:
921 if l:
922 l.postrelease.append(callback)
922 l.postrelease.append(callback)
923 else:
924 callback()
923
925
924 def lock(self, wait=True):
926 def lock(self, wait=True):
925 '''Lock the repository store (.hg/store) and return a weak reference
927 '''Lock the repository store (.hg/store) and return a weak reference
926 to the lock. Use this before modifying the store (e.g. committing or
928 to the lock. Use this before modifying the store (e.g. committing or
927 stripping). If you are opening a transaction, get a lock as well.)'''
929 stripping). If you are opening a transaction, get a lock as well.)'''
928 l = self._lockref and self._lockref()
930 l = self._lockref and self._lockref()
929 if l is not None and l.held:
931 if l is not None and l.held:
930 l.lock()
932 l.lock()
931 return l
933 return l
932
934
933 def unlock():
935 def unlock():
934 self.store.write()
936 self.store.write()
935 if self._dirtyphases:
937 if self._dirtyphases:
936 phases.writeroots(self)
938 phases.writeroots(self)
937 self._dirtyphases = False
939 self._dirtyphases = False
938 for k, ce in self._filecache.items():
940 for k, ce in self._filecache.items():
939 if k == 'dirstate':
941 if k == 'dirstate':
940 continue
942 continue
941 ce.refresh()
943 ce.refresh()
942
944
943 l = self._lock(self.sjoin("lock"), wait, unlock,
945 l = self._lock(self.sjoin("lock"), wait, unlock,
944 self.invalidate, _('repository %s') % self.origroot)
946 self.invalidate, _('repository %s') % self.origroot)
945 self._lockref = weakref.ref(l)
947 self._lockref = weakref.ref(l)
946 return l
948 return l
947
949
948 def wlock(self, wait=True):
950 def wlock(self, wait=True):
949 '''Lock the non-store parts of the repository (everything under
951 '''Lock the non-store parts of the repository (everything under
950 .hg except .hg/store) and return a weak reference to the lock.
952 .hg except .hg/store) and return a weak reference to the lock.
951 Use this before modifying files in .hg.'''
953 Use this before modifying files in .hg.'''
952 l = self._wlockref and self._wlockref()
954 l = self._wlockref and self._wlockref()
953 if l is not None and l.held:
955 if l is not None and l.held:
954 l.lock()
956 l.lock()
955 return l
957 return l
956
958
957 def unlock():
959 def unlock():
958 self.dirstate.write()
960 self.dirstate.write()
959 ce = self._filecache.get('dirstate')
961 ce = self._filecache.get('dirstate')
960 if ce:
962 if ce:
961 ce.refresh()
963 ce.refresh()
962
964
963 l = self._lock(self.join("wlock"), wait, unlock,
965 l = self._lock(self.join("wlock"), wait, unlock,
964 self.invalidatedirstate, _('working directory of %s') %
966 self.invalidatedirstate, _('working directory of %s') %
965 self.origroot)
967 self.origroot)
966 self._wlockref = weakref.ref(l)
968 self._wlockref = weakref.ref(l)
967 return l
969 return l
968
970
969 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
971 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
970 """
972 """
971 commit an individual file as part of a larger transaction
973 commit an individual file as part of a larger transaction
972 """
974 """
973
975
974 fname = fctx.path()
976 fname = fctx.path()
975 text = fctx.data()
977 text = fctx.data()
976 flog = self.file(fname)
978 flog = self.file(fname)
977 fparent1 = manifest1.get(fname, nullid)
979 fparent1 = manifest1.get(fname, nullid)
978 fparent2 = fparent2o = manifest2.get(fname, nullid)
980 fparent2 = fparent2o = manifest2.get(fname, nullid)
979
981
980 meta = {}
982 meta = {}
981 copy = fctx.renamed()
983 copy = fctx.renamed()
982 if copy and copy[0] != fname:
984 if copy and copy[0] != fname:
983 # Mark the new revision of this file as a copy of another
985 # Mark the new revision of this file as a copy of another
984 # file. This copy data will effectively act as a parent
986 # file. This copy data will effectively act as a parent
985 # of this new revision. If this is a merge, the first
987 # of this new revision. If this is a merge, the first
986 # parent will be the nullid (meaning "look up the copy data")
988 # parent will be the nullid (meaning "look up the copy data")
987 # and the second one will be the other parent. For example:
989 # and the second one will be the other parent. For example:
988 #
990 #
989 # 0 --- 1 --- 3 rev1 changes file foo
991 # 0 --- 1 --- 3 rev1 changes file foo
990 # \ / rev2 renames foo to bar and changes it
992 # \ / rev2 renames foo to bar and changes it
991 # \- 2 -/ rev3 should have bar with all changes and
993 # \- 2 -/ rev3 should have bar with all changes and
992 # should record that bar descends from
994 # should record that bar descends from
993 # bar in rev2 and foo in rev1
995 # bar in rev2 and foo in rev1
994 #
996 #
995 # this allows this merge to succeed:
997 # this allows this merge to succeed:
996 #
998 #
997 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
999 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
998 # \ / merging rev3 and rev4 should use bar@rev2
1000 # \ / merging rev3 and rev4 should use bar@rev2
999 # \- 2 --- 4 as the merge base
1001 # \- 2 --- 4 as the merge base
1000 #
1002 #
1001
1003
1002 cfname = copy[0]
1004 cfname = copy[0]
1003 crev = manifest1.get(cfname)
1005 crev = manifest1.get(cfname)
1004 newfparent = fparent2
1006 newfparent = fparent2
1005
1007
1006 if manifest2: # branch merge
1008 if manifest2: # branch merge
1007 if fparent2 == nullid or crev is None: # copied on remote side
1009 if fparent2 == nullid or crev is None: # copied on remote side
1008 if cfname in manifest2:
1010 if cfname in manifest2:
1009 crev = manifest2[cfname]
1011 crev = manifest2[cfname]
1010 newfparent = fparent1
1012 newfparent = fparent1
1011
1013
1012 # find source in nearest ancestor if we've lost track
1014 # find source in nearest ancestor if we've lost track
1013 if not crev:
1015 if not crev:
1014 self.ui.debug(" %s: searching for copy revision for %s\n" %
1016 self.ui.debug(" %s: searching for copy revision for %s\n" %
1015 (fname, cfname))
1017 (fname, cfname))
1016 for ancestor in self[None].ancestors():
1018 for ancestor in self[None].ancestors():
1017 if cfname in ancestor:
1019 if cfname in ancestor:
1018 crev = ancestor[cfname].filenode()
1020 crev = ancestor[cfname].filenode()
1019 break
1021 break
1020
1022
1021 if crev:
1023 if crev:
1022 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1024 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1023 meta["copy"] = cfname
1025 meta["copy"] = cfname
1024 meta["copyrev"] = hex(crev)
1026 meta["copyrev"] = hex(crev)
1025 fparent1, fparent2 = nullid, newfparent
1027 fparent1, fparent2 = nullid, newfparent
1026 else:
1028 else:
1027 self.ui.warn(_("warning: can't find ancestor for '%s' "
1029 self.ui.warn(_("warning: can't find ancestor for '%s' "
1028 "copied from '%s'!\n") % (fname, cfname))
1030 "copied from '%s'!\n") % (fname, cfname))
1029
1031
1030 elif fparent2 != nullid:
1032 elif fparent2 != nullid:
1031 # is one parent an ancestor of the other?
1033 # is one parent an ancestor of the other?
1032 fparentancestor = flog.ancestor(fparent1, fparent2)
1034 fparentancestor = flog.ancestor(fparent1, fparent2)
1033 if fparentancestor == fparent1:
1035 if fparentancestor == fparent1:
1034 fparent1, fparent2 = fparent2, nullid
1036 fparent1, fparent2 = fparent2, nullid
1035 elif fparentancestor == fparent2:
1037 elif fparentancestor == fparent2:
1036 fparent2 = nullid
1038 fparent2 = nullid
1037
1039
1038 # is the file changed?
1040 # is the file changed?
1039 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1041 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1040 changelist.append(fname)
1042 changelist.append(fname)
1041 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1043 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1042
1044
1043 # are just the flags changed during merge?
1045 # are just the flags changed during merge?
1044 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1046 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1045 changelist.append(fname)
1047 changelist.append(fname)
1046
1048
1047 return fparent1
1049 return fparent1
1048
1050
1049 def commit(self, text="", user=None, date=None, match=None, force=False,
1051 def commit(self, text="", user=None, date=None, match=None, force=False,
1050 editor=False, extra={}):
1052 editor=False, extra={}):
1051 """Add a new revision to current repository.
1053 """Add a new revision to current repository.
1052
1054
1053 Revision information is gathered from the working directory,
1055 Revision information is gathered from the working directory,
1054 match can be used to filter the committed files. If editor is
1056 match can be used to filter the committed files. If editor is
1055 supplied, it is called to get a commit message.
1057 supplied, it is called to get a commit message.
1056 """
1058 """
1057
1059
1058 def fail(f, msg):
1060 def fail(f, msg):
1059 raise util.Abort('%s: %s' % (f, msg))
1061 raise util.Abort('%s: %s' % (f, msg))
1060
1062
1061 if not match:
1063 if not match:
1062 match = matchmod.always(self.root, '')
1064 match = matchmod.always(self.root, '')
1063
1065
1064 if not force:
1066 if not force:
1065 vdirs = []
1067 vdirs = []
1066 match.dir = vdirs.append
1068 match.dir = vdirs.append
1067 match.bad = fail
1069 match.bad = fail
1068
1070
1069 wlock = self.wlock()
1071 wlock = self.wlock()
1070 try:
1072 try:
1071 wctx = self[None]
1073 wctx = self[None]
1072 merge = len(wctx.parents()) > 1
1074 merge = len(wctx.parents()) > 1
1073
1075
1074 if (not force and merge and match and
1076 if (not force and merge and match and
1075 (match.files() or match.anypats())):
1077 (match.files() or match.anypats())):
1076 raise util.Abort(_('cannot partially commit a merge '
1078 raise util.Abort(_('cannot partially commit a merge '
1077 '(do not specify files or patterns)'))
1079 '(do not specify files or patterns)'))
1078
1080
1079 changes = self.status(match=match, clean=force)
1081 changes = self.status(match=match, clean=force)
1080 if force:
1082 if force:
1081 changes[0].extend(changes[6]) # mq may commit unchanged files
1083 changes[0].extend(changes[6]) # mq may commit unchanged files
1082
1084
1083 # check subrepos
1085 # check subrepos
1084 subs = []
1086 subs = []
1085 commitsubs = set()
1087 commitsubs = set()
1086 newstate = wctx.substate.copy()
1088 newstate = wctx.substate.copy()
1087 # only manage subrepos and .hgsubstate if .hgsub is present
1089 # only manage subrepos and .hgsubstate if .hgsub is present
1088 if '.hgsub' in wctx:
1090 if '.hgsub' in wctx:
1089 # we'll decide whether to track this ourselves, thanks
1091 # we'll decide whether to track this ourselves, thanks
1090 if '.hgsubstate' in changes[0]:
1092 if '.hgsubstate' in changes[0]:
1091 changes[0].remove('.hgsubstate')
1093 changes[0].remove('.hgsubstate')
1092 if '.hgsubstate' in changes[2]:
1094 if '.hgsubstate' in changes[2]:
1093 changes[2].remove('.hgsubstate')
1095 changes[2].remove('.hgsubstate')
1094
1096
1095 # compare current state to last committed state
1097 # compare current state to last committed state
1096 # build new substate based on last committed state
1098 # build new substate based on last committed state
1097 oldstate = wctx.p1().substate
1099 oldstate = wctx.p1().substate
1098 for s in sorted(newstate.keys()):
1100 for s in sorted(newstate.keys()):
1099 if not match(s):
1101 if not match(s):
1100 # ignore working copy, use old state if present
1102 # ignore working copy, use old state if present
1101 if s in oldstate:
1103 if s in oldstate:
1102 newstate[s] = oldstate[s]
1104 newstate[s] = oldstate[s]
1103 continue
1105 continue
1104 if not force:
1106 if not force:
1105 raise util.Abort(
1107 raise util.Abort(
1106 _("commit with new subrepo %s excluded") % s)
1108 _("commit with new subrepo %s excluded") % s)
1107 if wctx.sub(s).dirty(True):
1109 if wctx.sub(s).dirty(True):
1108 if not self.ui.configbool('ui', 'commitsubrepos'):
1110 if not self.ui.configbool('ui', 'commitsubrepos'):
1109 raise util.Abort(
1111 raise util.Abort(
1110 _("uncommitted changes in subrepo %s") % s,
1112 _("uncommitted changes in subrepo %s") % s,
1111 hint=_("use --subrepos for recursive commit"))
1113 hint=_("use --subrepos for recursive commit"))
1112 subs.append(s)
1114 subs.append(s)
1113 commitsubs.add(s)
1115 commitsubs.add(s)
1114 else:
1116 else:
1115 bs = wctx.sub(s).basestate()
1117 bs = wctx.sub(s).basestate()
1116 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1118 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1117 if oldstate.get(s, (None, None, None))[1] != bs:
1119 if oldstate.get(s, (None, None, None))[1] != bs:
1118 subs.append(s)
1120 subs.append(s)
1119
1121
1120 # check for removed subrepos
1122 # check for removed subrepos
1121 for p in wctx.parents():
1123 for p in wctx.parents():
1122 r = [s for s in p.substate if s not in newstate]
1124 r = [s for s in p.substate if s not in newstate]
1123 subs += [s for s in r if match(s)]
1125 subs += [s for s in r if match(s)]
1124 if subs:
1126 if subs:
1125 if (not match('.hgsub') and
1127 if (not match('.hgsub') and
1126 '.hgsub' in (wctx.modified() + wctx.added())):
1128 '.hgsub' in (wctx.modified() + wctx.added())):
1127 raise util.Abort(
1129 raise util.Abort(
1128 _("can't commit subrepos without .hgsub"))
1130 _("can't commit subrepos without .hgsub"))
1129 changes[0].insert(0, '.hgsubstate')
1131 changes[0].insert(0, '.hgsubstate')
1130
1132
1131 elif '.hgsub' in changes[2]:
1133 elif '.hgsub' in changes[2]:
1132 # clean up .hgsubstate when .hgsub is removed
1134 # clean up .hgsubstate when .hgsub is removed
1133 if ('.hgsubstate' in wctx and
1135 if ('.hgsubstate' in wctx and
1134 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1136 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1135 changes[2].insert(0, '.hgsubstate')
1137 changes[2].insert(0, '.hgsubstate')
1136
1138
1137 # make sure all explicit patterns are matched
1139 # make sure all explicit patterns are matched
1138 if not force and match.files():
1140 if not force and match.files():
1139 matched = set(changes[0] + changes[1] + changes[2])
1141 matched = set(changes[0] + changes[1] + changes[2])
1140
1142
1141 for f in match.files():
1143 for f in match.files():
1142 if f == '.' or f in matched or f in wctx.substate:
1144 if f == '.' or f in matched or f in wctx.substate:
1143 continue
1145 continue
1144 if f in changes[3]: # missing
1146 if f in changes[3]: # missing
1145 fail(f, _('file not found!'))
1147 fail(f, _('file not found!'))
1146 if f in vdirs: # visited directory
1148 if f in vdirs: # visited directory
1147 d = f + '/'
1149 d = f + '/'
1148 for mf in matched:
1150 for mf in matched:
1149 if mf.startswith(d):
1151 if mf.startswith(d):
1150 break
1152 break
1151 else:
1153 else:
1152 fail(f, _("no match under directory!"))
1154 fail(f, _("no match under directory!"))
1153 elif f not in self.dirstate:
1155 elif f not in self.dirstate:
1154 fail(f, _("file not tracked!"))
1156 fail(f, _("file not tracked!"))
1155
1157
1156 if (not force and not extra.get("close") and not merge
1158 if (not force and not extra.get("close") and not merge
1157 and not (changes[0] or changes[1] or changes[2])
1159 and not (changes[0] or changes[1] or changes[2])
1158 and wctx.branch() == wctx.p1().branch()):
1160 and wctx.branch() == wctx.p1().branch()):
1159 return None
1161 return None
1160
1162
1161 if merge and changes[3]:
1163 if merge and changes[3]:
1162 raise util.Abort(_("cannot commit merge with missing files"))
1164 raise util.Abort(_("cannot commit merge with missing files"))
1163
1165
1164 ms = mergemod.mergestate(self)
1166 ms = mergemod.mergestate(self)
1165 for f in changes[0]:
1167 for f in changes[0]:
1166 if f in ms and ms[f] == 'u':
1168 if f in ms and ms[f] == 'u':
1167 raise util.Abort(_("unresolved merge conflicts "
1169 raise util.Abort(_("unresolved merge conflicts "
1168 "(see hg help resolve)"))
1170 "(see hg help resolve)"))
1169
1171
1170 cctx = context.workingctx(self, text, user, date, extra, changes)
1172 cctx = context.workingctx(self, text, user, date, extra, changes)
1171 if editor:
1173 if editor:
1172 cctx._text = editor(self, cctx, subs)
1174 cctx._text = editor(self, cctx, subs)
1173 edited = (text != cctx._text)
1175 edited = (text != cctx._text)
1174
1176
1175 # commit subs and write new state
1177 # commit subs and write new state
1176 if subs:
1178 if subs:
1177 for s in sorted(commitsubs):
1179 for s in sorted(commitsubs):
1178 sub = wctx.sub(s)
1180 sub = wctx.sub(s)
1179 self.ui.status(_('committing subrepository %s\n') %
1181 self.ui.status(_('committing subrepository %s\n') %
1180 subrepo.subrelpath(sub))
1182 subrepo.subrelpath(sub))
1181 sr = sub.commit(cctx._text, user, date)
1183 sr = sub.commit(cctx._text, user, date)
1182 newstate[s] = (newstate[s][0], sr)
1184 newstate[s] = (newstate[s][0], sr)
1183 subrepo.writestate(self, newstate)
1185 subrepo.writestate(self, newstate)
1184
1186
1185 # Save commit message in case this transaction gets rolled back
1187 # Save commit message in case this transaction gets rolled back
1186 # (e.g. by a pretxncommit hook). Leave the content alone on
1188 # (e.g. by a pretxncommit hook). Leave the content alone on
1187 # the assumption that the user will use the same editor again.
1189 # the assumption that the user will use the same editor again.
1188 msgfn = self.savecommitmessage(cctx._text)
1190 msgfn = self.savecommitmessage(cctx._text)
1189
1191
1190 p1, p2 = self.dirstate.parents()
1192 p1, p2 = self.dirstate.parents()
1191 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1193 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1192 try:
1194 try:
1193 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1195 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1194 ret = self.commitctx(cctx, True)
1196 ret = self.commitctx(cctx, True)
1195 except:
1197 except:
1196 if edited:
1198 if edited:
1197 self.ui.write(
1199 self.ui.write(
1198 _('note: commit message saved in %s\n') % msgfn)
1200 _('note: commit message saved in %s\n') % msgfn)
1199 raise
1201 raise
1200
1202
1201 # update bookmarks, dirstate and mergestate
1203 # update bookmarks, dirstate and mergestate
1202 bookmarks.update(self, p1, ret)
1204 bookmarks.update(self, p1, ret)
1203 for f in changes[0] + changes[1]:
1205 for f in changes[0] + changes[1]:
1204 self.dirstate.normal(f)
1206 self.dirstate.normal(f)
1205 for f in changes[2]:
1207 for f in changes[2]:
1206 self.dirstate.drop(f)
1208 self.dirstate.drop(f)
1207 self.dirstate.setparents(ret)
1209 self.dirstate.setparents(ret)
1208 ms.reset()
1210 ms.reset()
1209 finally:
1211 finally:
1210 wlock.release()
1212 wlock.release()
1211
1213
1212 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1214 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1215 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1216 self._afterlock(commithook)
1213 return ret
1217 return ret
1214
1218
1215 def commitctx(self, ctx, error=False):
1219 def commitctx(self, ctx, error=False):
1216 """Add a new revision to current repository.
1220 """Add a new revision to current repository.
1217 Revision information is passed via the context argument.
1221 Revision information is passed via the context argument.
1218 """
1222 """
1219
1223
1220 tr = lock = None
1224 tr = lock = None
1221 removed = list(ctx.removed())
1225 removed = list(ctx.removed())
1222 p1, p2 = ctx.p1(), ctx.p2()
1226 p1, p2 = ctx.p1(), ctx.p2()
1223 user = ctx.user()
1227 user = ctx.user()
1224
1228
1225 lock = self.lock()
1229 lock = self.lock()
1226 try:
1230 try:
1227 tr = self.transaction("commit")
1231 tr = self.transaction("commit")
1228 trp = weakref.proxy(tr)
1232 trp = weakref.proxy(tr)
1229
1233
1230 if ctx.files():
1234 if ctx.files():
1231 m1 = p1.manifest().copy()
1235 m1 = p1.manifest().copy()
1232 m2 = p2.manifest()
1236 m2 = p2.manifest()
1233
1237
1234 # check in files
1238 # check in files
1235 new = {}
1239 new = {}
1236 changed = []
1240 changed = []
1237 linkrev = len(self)
1241 linkrev = len(self)
1238 for f in sorted(ctx.modified() + ctx.added()):
1242 for f in sorted(ctx.modified() + ctx.added()):
1239 self.ui.note(f + "\n")
1243 self.ui.note(f + "\n")
1240 try:
1244 try:
1241 fctx = ctx[f]
1245 fctx = ctx[f]
1242 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1246 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1243 changed)
1247 changed)
1244 m1.set(f, fctx.flags())
1248 m1.set(f, fctx.flags())
1245 except OSError, inst:
1249 except OSError, inst:
1246 self.ui.warn(_("trouble committing %s!\n") % f)
1250 self.ui.warn(_("trouble committing %s!\n") % f)
1247 raise
1251 raise
1248 except IOError, inst:
1252 except IOError, inst:
1249 errcode = getattr(inst, 'errno', errno.ENOENT)
1253 errcode = getattr(inst, 'errno', errno.ENOENT)
1250 if error or errcode and errcode != errno.ENOENT:
1254 if error or errcode and errcode != errno.ENOENT:
1251 self.ui.warn(_("trouble committing %s!\n") % f)
1255 self.ui.warn(_("trouble committing %s!\n") % f)
1252 raise
1256 raise
1253 else:
1257 else:
1254 removed.append(f)
1258 removed.append(f)
1255
1259
1256 # update manifest
1260 # update manifest
1257 m1.update(new)
1261 m1.update(new)
1258 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1262 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1259 drop = [f for f in removed if f in m1]
1263 drop = [f for f in removed if f in m1]
1260 for f in drop:
1264 for f in drop:
1261 del m1[f]
1265 del m1[f]
1262 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1266 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1263 p2.manifestnode(), (new, drop))
1267 p2.manifestnode(), (new, drop))
1264 files = changed + removed
1268 files = changed + removed
1265 else:
1269 else:
1266 mn = p1.manifestnode()
1270 mn = p1.manifestnode()
1267 files = []
1271 files = []
1268
1272
1269 # update changelog
1273 # update changelog
1270 self.changelog.delayupdate()
1274 self.changelog.delayupdate()
1271 n = self.changelog.add(mn, files, ctx.description(),
1275 n = self.changelog.add(mn, files, ctx.description(),
1272 trp, p1.node(), p2.node(),
1276 trp, p1.node(), p2.node(),
1273 user, ctx.date(), ctx.extra().copy())
1277 user, ctx.date(), ctx.extra().copy())
1274 p = lambda: self.changelog.writepending() and self.root or ""
1278 p = lambda: self.changelog.writepending() and self.root or ""
1275 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1279 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1276 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1280 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1277 parent2=xp2, pending=p)
1281 parent2=xp2, pending=p)
1278 self.changelog.finalize(trp)
1282 self.changelog.finalize(trp)
1279 # set the new commit is proper phase
1283 # set the new commit is proper phase
1280 targetphase = phases.newcommitphase(self.ui)
1284 targetphase = phases.newcommitphase(self.ui)
1281 if targetphase:
1285 if targetphase:
1282 # retract boundary do not alter parent changeset.
1286 # retract boundary do not alter parent changeset.
1283 # if a parent have higher the resulting phase will
1287 # if a parent have higher the resulting phase will
1284 # be compliant anyway
1288 # be compliant anyway
1285 #
1289 #
1286 # if minimal phase was 0 we don't need to retract anything
1290 # if minimal phase was 0 we don't need to retract anything
1287 phases.retractboundary(self, targetphase, [n])
1291 phases.retractboundary(self, targetphase, [n])
1288 tr.close()
1292 tr.close()
1289 self.updatebranchcache()
1293 self.updatebranchcache()
1290 return n
1294 return n
1291 finally:
1295 finally:
1292 if tr:
1296 if tr:
1293 tr.release()
1297 tr.release()
1294 lock.release()
1298 lock.release()
1295
1299
1296 def destroyed(self):
1300 def destroyed(self):
1297 '''Inform the repository that nodes have been destroyed.
1301 '''Inform the repository that nodes have been destroyed.
1298 Intended for use by strip and rollback, so there's a common
1302 Intended for use by strip and rollback, so there's a common
1299 place for anything that has to be done after destroying history.'''
1303 place for anything that has to be done after destroying history.'''
1300 # XXX it might be nice if we could take the list of destroyed
1304 # XXX it might be nice if we could take the list of destroyed
1301 # nodes, but I don't see an easy way for rollback() to do that
1305 # nodes, but I don't see an easy way for rollback() to do that
1302
1306
1303 # Ensure the persistent tag cache is updated. Doing it now
1307 # Ensure the persistent tag cache is updated. Doing it now
1304 # means that the tag cache only has to worry about destroyed
1308 # means that the tag cache only has to worry about destroyed
1305 # heads immediately after a strip/rollback. That in turn
1309 # heads immediately after a strip/rollback. That in turn
1306 # guarantees that "cachetip == currenttip" (comparing both rev
1310 # guarantees that "cachetip == currenttip" (comparing both rev
1307 # and node) always means no nodes have been added or destroyed.
1311 # and node) always means no nodes have been added or destroyed.
1308
1312
1309 # XXX this is suboptimal when qrefresh'ing: we strip the current
1313 # XXX this is suboptimal when qrefresh'ing: we strip the current
1310 # head, refresh the tag cache, then immediately add a new head.
1314 # head, refresh the tag cache, then immediately add a new head.
1311 # But I think doing it this way is necessary for the "instant
1315 # But I think doing it this way is necessary for the "instant
1312 # tag cache retrieval" case to work.
1316 # tag cache retrieval" case to work.
1313 self.invalidatecaches()
1317 self.invalidatecaches()
1314
1318
1315 # Discard all cache entries to force reloading everything.
1319 # Discard all cache entries to force reloading everything.
1316 self._filecache.clear()
1320 self._filecache.clear()
1317
1321
1318 def walk(self, match, node=None):
1322 def walk(self, match, node=None):
1319 '''
1323 '''
1320 walk recursively through the directory tree or a given
1324 walk recursively through the directory tree or a given
1321 changeset, finding all files matched by the match
1325 changeset, finding all files matched by the match
1322 function
1326 function
1323 '''
1327 '''
1324 return self[node].walk(match)
1328 return self[node].walk(match)
1325
1329
1326 def status(self, node1='.', node2=None, match=None,
1330 def status(self, node1='.', node2=None, match=None,
1327 ignored=False, clean=False, unknown=False,
1331 ignored=False, clean=False, unknown=False,
1328 listsubrepos=False):
1332 listsubrepos=False):
1329 """return status of files between two nodes or node and working directory
1333 """return status of files between two nodes or node and working directory
1330
1334
1331 If node1 is None, use the first dirstate parent instead.
1335 If node1 is None, use the first dirstate parent instead.
1332 If node2 is None, compare node1 with working directory.
1336 If node2 is None, compare node1 with working directory.
1333 """
1337 """
1334
1338
1335 def mfmatches(ctx):
1339 def mfmatches(ctx):
1336 mf = ctx.manifest().copy()
1340 mf = ctx.manifest().copy()
1337 for fn in mf.keys():
1341 for fn in mf.keys():
1338 if not match(fn):
1342 if not match(fn):
1339 del mf[fn]
1343 del mf[fn]
1340 return mf
1344 return mf
1341
1345
1342 if isinstance(node1, context.changectx):
1346 if isinstance(node1, context.changectx):
1343 ctx1 = node1
1347 ctx1 = node1
1344 else:
1348 else:
1345 ctx1 = self[node1]
1349 ctx1 = self[node1]
1346 if isinstance(node2, context.changectx):
1350 if isinstance(node2, context.changectx):
1347 ctx2 = node2
1351 ctx2 = node2
1348 else:
1352 else:
1349 ctx2 = self[node2]
1353 ctx2 = self[node2]
1350
1354
1351 working = ctx2.rev() is None
1355 working = ctx2.rev() is None
1352 parentworking = working and ctx1 == self['.']
1356 parentworking = working and ctx1 == self['.']
1353 match = match or matchmod.always(self.root, self.getcwd())
1357 match = match or matchmod.always(self.root, self.getcwd())
1354 listignored, listclean, listunknown = ignored, clean, unknown
1358 listignored, listclean, listunknown = ignored, clean, unknown
1355
1359
1356 # load earliest manifest first for caching reasons
1360 # load earliest manifest first for caching reasons
1357 if not working and ctx2.rev() < ctx1.rev():
1361 if not working and ctx2.rev() < ctx1.rev():
1358 ctx2.manifest()
1362 ctx2.manifest()
1359
1363
1360 if not parentworking:
1364 if not parentworking:
1361 def bad(f, msg):
1365 def bad(f, msg):
1362 # 'f' may be a directory pattern from 'match.files()',
1366 # 'f' may be a directory pattern from 'match.files()',
1363 # so 'f not in ctx1' is not enough
1367 # so 'f not in ctx1' is not enough
1364 if f not in ctx1 and f not in ctx1.dirs():
1368 if f not in ctx1 and f not in ctx1.dirs():
1365 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1369 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1366 match.bad = bad
1370 match.bad = bad
1367
1371
1368 if working: # we need to scan the working dir
1372 if working: # we need to scan the working dir
1369 subrepos = []
1373 subrepos = []
1370 if '.hgsub' in self.dirstate:
1374 if '.hgsub' in self.dirstate:
1371 subrepos = ctx2.substate.keys()
1375 subrepos = ctx2.substate.keys()
1372 s = self.dirstate.status(match, subrepos, listignored,
1376 s = self.dirstate.status(match, subrepos, listignored,
1373 listclean, listunknown)
1377 listclean, listunknown)
1374 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1378 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1375
1379
1376 # check for any possibly clean files
1380 # check for any possibly clean files
1377 if parentworking and cmp:
1381 if parentworking and cmp:
1378 fixup = []
1382 fixup = []
1379 # do a full compare of any files that might have changed
1383 # do a full compare of any files that might have changed
1380 for f in sorted(cmp):
1384 for f in sorted(cmp):
1381 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1385 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1382 or ctx1[f].cmp(ctx2[f])):
1386 or ctx1[f].cmp(ctx2[f])):
1383 modified.append(f)
1387 modified.append(f)
1384 else:
1388 else:
1385 fixup.append(f)
1389 fixup.append(f)
1386
1390
1387 # update dirstate for files that are actually clean
1391 # update dirstate for files that are actually clean
1388 if fixup:
1392 if fixup:
1389 if listclean:
1393 if listclean:
1390 clean += fixup
1394 clean += fixup
1391
1395
1392 try:
1396 try:
1393 # updating the dirstate is optional
1397 # updating the dirstate is optional
1394 # so we don't wait on the lock
1398 # so we don't wait on the lock
1395 wlock = self.wlock(False)
1399 wlock = self.wlock(False)
1396 try:
1400 try:
1397 for f in fixup:
1401 for f in fixup:
1398 self.dirstate.normal(f)
1402 self.dirstate.normal(f)
1399 finally:
1403 finally:
1400 wlock.release()
1404 wlock.release()
1401 except error.LockError:
1405 except error.LockError:
1402 pass
1406 pass
1403
1407
1404 if not parentworking:
1408 if not parentworking:
1405 mf1 = mfmatches(ctx1)
1409 mf1 = mfmatches(ctx1)
1406 if working:
1410 if working:
1407 # we are comparing working dir against non-parent
1411 # we are comparing working dir against non-parent
1408 # generate a pseudo-manifest for the working dir
1412 # generate a pseudo-manifest for the working dir
1409 mf2 = mfmatches(self['.'])
1413 mf2 = mfmatches(self['.'])
1410 for f in cmp + modified + added:
1414 for f in cmp + modified + added:
1411 mf2[f] = None
1415 mf2[f] = None
1412 mf2.set(f, ctx2.flags(f))
1416 mf2.set(f, ctx2.flags(f))
1413 for f in removed:
1417 for f in removed:
1414 if f in mf2:
1418 if f in mf2:
1415 del mf2[f]
1419 del mf2[f]
1416 else:
1420 else:
1417 # we are comparing two revisions
1421 # we are comparing two revisions
1418 deleted, unknown, ignored = [], [], []
1422 deleted, unknown, ignored = [], [], []
1419 mf2 = mfmatches(ctx2)
1423 mf2 = mfmatches(ctx2)
1420
1424
1421 modified, added, clean = [], [], []
1425 modified, added, clean = [], [], []
1422 for fn in mf2:
1426 for fn in mf2:
1423 if fn in mf1:
1427 if fn in mf1:
1424 if (fn not in deleted and
1428 if (fn not in deleted and
1425 (mf1.flags(fn) != mf2.flags(fn) or
1429 (mf1.flags(fn) != mf2.flags(fn) or
1426 (mf1[fn] != mf2[fn] and
1430 (mf1[fn] != mf2[fn] and
1427 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1431 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1428 modified.append(fn)
1432 modified.append(fn)
1429 elif listclean:
1433 elif listclean:
1430 clean.append(fn)
1434 clean.append(fn)
1431 del mf1[fn]
1435 del mf1[fn]
1432 elif fn not in deleted:
1436 elif fn not in deleted:
1433 added.append(fn)
1437 added.append(fn)
1434 removed = mf1.keys()
1438 removed = mf1.keys()
1435
1439
1436 if working and modified and not self.dirstate._checklink:
1440 if working and modified and not self.dirstate._checklink:
1437 # Symlink placeholders may get non-symlink-like contents
1441 # Symlink placeholders may get non-symlink-like contents
1438 # via user error or dereferencing by NFS or Samba servers,
1442 # via user error or dereferencing by NFS or Samba servers,
1439 # so we filter out any placeholders that don't look like a
1443 # so we filter out any placeholders that don't look like a
1440 # symlink
1444 # symlink
1441 sane = []
1445 sane = []
1442 for f in modified:
1446 for f in modified:
1443 if ctx2.flags(f) == 'l':
1447 if ctx2.flags(f) == 'l':
1444 d = ctx2[f].data()
1448 d = ctx2[f].data()
1445 if len(d) >= 1024 or '\n' in d or util.binary(d):
1449 if len(d) >= 1024 or '\n' in d or util.binary(d):
1446 self.ui.debug('ignoring suspect symlink placeholder'
1450 self.ui.debug('ignoring suspect symlink placeholder'
1447 ' "%s"\n' % f)
1451 ' "%s"\n' % f)
1448 continue
1452 continue
1449 sane.append(f)
1453 sane.append(f)
1450 modified = sane
1454 modified = sane
1451
1455
1452 r = modified, added, removed, deleted, unknown, ignored, clean
1456 r = modified, added, removed, deleted, unknown, ignored, clean
1453
1457
1454 if listsubrepos:
1458 if listsubrepos:
1455 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1459 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1456 if working:
1460 if working:
1457 rev2 = None
1461 rev2 = None
1458 else:
1462 else:
1459 rev2 = ctx2.substate[subpath][1]
1463 rev2 = ctx2.substate[subpath][1]
1460 try:
1464 try:
1461 submatch = matchmod.narrowmatcher(subpath, match)
1465 submatch = matchmod.narrowmatcher(subpath, match)
1462 s = sub.status(rev2, match=submatch, ignored=listignored,
1466 s = sub.status(rev2, match=submatch, ignored=listignored,
1463 clean=listclean, unknown=listunknown,
1467 clean=listclean, unknown=listunknown,
1464 listsubrepos=True)
1468 listsubrepos=True)
1465 for rfiles, sfiles in zip(r, s):
1469 for rfiles, sfiles in zip(r, s):
1466 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1470 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1467 except error.LookupError:
1471 except error.LookupError:
1468 self.ui.status(_("skipping missing subrepository: %s\n")
1472 self.ui.status(_("skipping missing subrepository: %s\n")
1469 % subpath)
1473 % subpath)
1470
1474
1471 for l in r:
1475 for l in r:
1472 l.sort()
1476 l.sort()
1473 return r
1477 return r
1474
1478
1475 def heads(self, start=None):
1479 def heads(self, start=None):
1476 heads = self.changelog.heads(start)
1480 heads = self.changelog.heads(start)
1477 # sort the output in rev descending order
1481 # sort the output in rev descending order
1478 return sorted(heads, key=self.changelog.rev, reverse=True)
1482 return sorted(heads, key=self.changelog.rev, reverse=True)
1479
1483
1480 def branchheads(self, branch=None, start=None, closed=False):
1484 def branchheads(self, branch=None, start=None, closed=False):
1481 '''return a (possibly filtered) list of heads for the given branch
1485 '''return a (possibly filtered) list of heads for the given branch
1482
1486
1483 Heads are returned in topological order, from newest to oldest.
1487 Heads are returned in topological order, from newest to oldest.
1484 If branch is None, use the dirstate branch.
1488 If branch is None, use the dirstate branch.
1485 If start is not None, return only heads reachable from start.
1489 If start is not None, return only heads reachable from start.
1486 If closed is True, return heads that are marked as closed as well.
1490 If closed is True, return heads that are marked as closed as well.
1487 '''
1491 '''
1488 if branch is None:
1492 if branch is None:
1489 branch = self[None].branch()
1493 branch = self[None].branch()
1490 branches = self.branchmap()
1494 branches = self.branchmap()
1491 if branch not in branches:
1495 if branch not in branches:
1492 return []
1496 return []
1493 # the cache returns heads ordered lowest to highest
1497 # the cache returns heads ordered lowest to highest
1494 bheads = list(reversed(branches[branch]))
1498 bheads = list(reversed(branches[branch]))
1495 if start is not None:
1499 if start is not None:
1496 # filter out the heads that cannot be reached from startrev
1500 # filter out the heads that cannot be reached from startrev
1497 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1501 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1498 bheads = [h for h in bheads if h in fbheads]
1502 bheads = [h for h in bheads if h in fbheads]
1499 if not closed:
1503 if not closed:
1500 bheads = [h for h in bheads if
1504 bheads = [h for h in bheads if
1501 ('close' not in self.changelog.read(h)[5])]
1505 ('close' not in self.changelog.read(h)[5])]
1502 return bheads
1506 return bheads
1503
1507
1504 def branches(self, nodes):
1508 def branches(self, nodes):
1505 if not nodes:
1509 if not nodes:
1506 nodes = [self.changelog.tip()]
1510 nodes = [self.changelog.tip()]
1507 b = []
1511 b = []
1508 for n in nodes:
1512 for n in nodes:
1509 t = n
1513 t = n
1510 while True:
1514 while True:
1511 p = self.changelog.parents(n)
1515 p = self.changelog.parents(n)
1512 if p[1] != nullid or p[0] == nullid:
1516 if p[1] != nullid or p[0] == nullid:
1513 b.append((t, n, p[0], p[1]))
1517 b.append((t, n, p[0], p[1]))
1514 break
1518 break
1515 n = p[0]
1519 n = p[0]
1516 return b
1520 return b
1517
1521
1518 def between(self, pairs):
1522 def between(self, pairs):
1519 r = []
1523 r = []
1520
1524
1521 for top, bottom in pairs:
1525 for top, bottom in pairs:
1522 n, l, i = top, [], 0
1526 n, l, i = top, [], 0
1523 f = 1
1527 f = 1
1524
1528
1525 while n != bottom and n != nullid:
1529 while n != bottom and n != nullid:
1526 p = self.changelog.parents(n)[0]
1530 p = self.changelog.parents(n)[0]
1527 if i == f:
1531 if i == f:
1528 l.append(n)
1532 l.append(n)
1529 f = f * 2
1533 f = f * 2
1530 n = p
1534 n = p
1531 i += 1
1535 i += 1
1532
1536
1533 r.append(l)
1537 r.append(l)
1534
1538
1535 return r
1539 return r
1536
1540
1537 def pull(self, remote, heads=None, force=False):
1541 def pull(self, remote, heads=None, force=False):
1538 lock = self.lock()
1542 lock = self.lock()
1539 try:
1543 try:
1540 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1544 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1541 force=force)
1545 force=force)
1542 common, fetch, rheads = tmp
1546 common, fetch, rheads = tmp
1543 if not fetch:
1547 if not fetch:
1544 self.ui.status(_("no changes found\n"))
1548 self.ui.status(_("no changes found\n"))
1545 added = []
1549 added = []
1546 result = 0
1550 result = 0
1547 else:
1551 else:
1548 if heads is None and list(common) == [nullid]:
1552 if heads is None and list(common) == [nullid]:
1549 self.ui.status(_("requesting all changes\n"))
1553 self.ui.status(_("requesting all changes\n"))
1550 elif heads is None and remote.capable('changegroupsubset'):
1554 elif heads is None and remote.capable('changegroupsubset'):
1551 # issue1320, avoid a race if remote changed after discovery
1555 # issue1320, avoid a race if remote changed after discovery
1552 heads = rheads
1556 heads = rheads
1553
1557
1554 if remote.capable('getbundle'):
1558 if remote.capable('getbundle'):
1555 cg = remote.getbundle('pull', common=common,
1559 cg = remote.getbundle('pull', common=common,
1556 heads=heads or rheads)
1560 heads=heads or rheads)
1557 elif heads is None:
1561 elif heads is None:
1558 cg = remote.changegroup(fetch, 'pull')
1562 cg = remote.changegroup(fetch, 'pull')
1559 elif not remote.capable('changegroupsubset'):
1563 elif not remote.capable('changegroupsubset'):
1560 raise util.Abort(_("partial pull cannot be done because "
1564 raise util.Abort(_("partial pull cannot be done because "
1561 "other repository doesn't support "
1565 "other repository doesn't support "
1562 "changegroupsubset."))
1566 "changegroupsubset."))
1563 else:
1567 else:
1564 cg = remote.changegroupsubset(fetch, heads, 'pull')
1568 cg = remote.changegroupsubset(fetch, heads, 'pull')
1565 clstart = len(self.changelog)
1569 clstart = len(self.changelog)
1566 result = self.addchangegroup(cg, 'pull', remote.url())
1570 result = self.addchangegroup(cg, 'pull', remote.url())
1567 clend = len(self.changelog)
1571 clend = len(self.changelog)
1568 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1572 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1569
1573
1570 # compute target subset
1574 # compute target subset
1571 if heads is None:
1575 if heads is None:
1572 # We pulled every thing possible
1576 # We pulled every thing possible
1573 # sync on everything common
1577 # sync on everything common
1574 subset = common + added
1578 subset = common + added
1575 else:
1579 else:
1576 # We pulled a specific subset
1580 # We pulled a specific subset
1577 # sync on this subset
1581 # sync on this subset
1578 subset = heads
1582 subset = heads
1579
1583
1580 # Get remote phases data from remote
1584 # Get remote phases data from remote
1581 remotephases = remote.listkeys('phases')
1585 remotephases = remote.listkeys('phases')
1582 publishing = bool(remotephases.get('publishing', False))
1586 publishing = bool(remotephases.get('publishing', False))
1583 if remotephases and not publishing:
1587 if remotephases and not publishing:
1584 # remote is new and unpublishing
1588 # remote is new and unpublishing
1585 pheads, _dr = phases.analyzeremotephases(self, subset,
1589 pheads, _dr = phases.analyzeremotephases(self, subset,
1586 remotephases)
1590 remotephases)
1587 phases.advanceboundary(self, phases.public, pheads)
1591 phases.advanceboundary(self, phases.public, pheads)
1588 phases.advanceboundary(self, phases.draft, subset)
1592 phases.advanceboundary(self, phases.draft, subset)
1589 else:
1593 else:
1590 # Remote is old or publishing all common changesets
1594 # Remote is old or publishing all common changesets
1591 # should be seen as public
1595 # should be seen as public
1592 phases.advanceboundary(self, phases.public, subset)
1596 phases.advanceboundary(self, phases.public, subset)
1593 finally:
1597 finally:
1594 lock.release()
1598 lock.release()
1595
1599
1596 return result
1600 return result
1597
1601
1598 def checkpush(self, force, revs):
1602 def checkpush(self, force, revs):
1599 """Extensions can override this function if additional checks have
1603 """Extensions can override this function if additional checks have
1600 to be performed before pushing, or call it if they override push
1604 to be performed before pushing, or call it if they override push
1601 command.
1605 command.
1602 """
1606 """
1603 pass
1607 pass
1604
1608
1605 def push(self, remote, force=False, revs=None, newbranch=False):
1609 def push(self, remote, force=False, revs=None, newbranch=False):
1606 '''Push outgoing changesets (limited by revs) from the current
1610 '''Push outgoing changesets (limited by revs) from the current
1607 repository to remote. Return an integer:
1611 repository to remote. Return an integer:
1608 - None means nothing to push
1612 - None means nothing to push
1609 - 0 means HTTP error
1613 - 0 means HTTP error
1610 - 1 means we pushed and remote head count is unchanged *or*
1614 - 1 means we pushed and remote head count is unchanged *or*
1611 we have outgoing changesets but refused to push
1615 we have outgoing changesets but refused to push
1612 - other values as described by addchangegroup()
1616 - other values as described by addchangegroup()
1613 '''
1617 '''
1614 # there are two ways to push to remote repo:
1618 # there are two ways to push to remote repo:
1615 #
1619 #
1616 # addchangegroup assumes local user can lock remote
1620 # addchangegroup assumes local user can lock remote
1617 # repo (local filesystem, old ssh servers).
1621 # repo (local filesystem, old ssh servers).
1618 #
1622 #
1619 # unbundle assumes local user cannot lock remote repo (new ssh
1623 # unbundle assumes local user cannot lock remote repo (new ssh
1620 # servers, http servers).
1624 # servers, http servers).
1621
1625
1622 # get local lock as we might write phase data
1626 # get local lock as we might write phase data
1623 locallock = self.lock()
1627 locallock = self.lock()
1624 try:
1628 try:
1625 self.checkpush(force, revs)
1629 self.checkpush(force, revs)
1626 lock = None
1630 lock = None
1627 unbundle = remote.capable('unbundle')
1631 unbundle = remote.capable('unbundle')
1628 if not unbundle:
1632 if not unbundle:
1629 lock = remote.lock()
1633 lock = remote.lock()
1630 try:
1634 try:
1631 # discovery
1635 # discovery
1632 fci = discovery.findcommonincoming
1636 fci = discovery.findcommonincoming
1633 commoninc = fci(self, remote, force=force)
1637 commoninc = fci(self, remote, force=force)
1634 common, inc, remoteheads = commoninc
1638 common, inc, remoteheads = commoninc
1635 fco = discovery.findcommonoutgoing
1639 fco = discovery.findcommonoutgoing
1636 outgoing = fco(self, remote, onlyheads=revs,
1640 outgoing = fco(self, remote, onlyheads=revs,
1637 commoninc=commoninc, force=force)
1641 commoninc=commoninc, force=force)
1638
1642
1639
1643
1640 if not outgoing.missing:
1644 if not outgoing.missing:
1641 # nothing to push
1645 # nothing to push
1642 scmutil.nochangesfound(self.ui, outgoing.excluded)
1646 scmutil.nochangesfound(self.ui, outgoing.excluded)
1643 ret = None
1647 ret = None
1644 else:
1648 else:
1645 # something to push
1649 # something to push
1646 if not force:
1650 if not force:
1647 discovery.checkheads(self, remote, outgoing,
1651 discovery.checkheads(self, remote, outgoing,
1648 remoteheads, newbranch,
1652 remoteheads, newbranch,
1649 bool(inc))
1653 bool(inc))
1650
1654
1651 # create a changegroup from local
1655 # create a changegroup from local
1652 if revs is None and not outgoing.excluded:
1656 if revs is None and not outgoing.excluded:
1653 # push everything,
1657 # push everything,
1654 # use the fast path, no race possible on push
1658 # use the fast path, no race possible on push
1655 cg = self._changegroup(outgoing.missing, 'push')
1659 cg = self._changegroup(outgoing.missing, 'push')
1656 else:
1660 else:
1657 cg = self.getlocalbundle('push', outgoing)
1661 cg = self.getlocalbundle('push', outgoing)
1658
1662
1659 # apply changegroup to remote
1663 # apply changegroup to remote
1660 if unbundle:
1664 if unbundle:
1661 # local repo finds heads on server, finds out what
1665 # local repo finds heads on server, finds out what
1662 # revs it must push. once revs transferred, if server
1666 # revs it must push. once revs transferred, if server
1663 # finds it has different heads (someone else won
1667 # finds it has different heads (someone else won
1664 # commit/push race), server aborts.
1668 # commit/push race), server aborts.
1665 if force:
1669 if force:
1666 remoteheads = ['force']
1670 remoteheads = ['force']
1667 # ssh: return remote's addchangegroup()
1671 # ssh: return remote's addchangegroup()
1668 # http: return remote's addchangegroup() or 0 for error
1672 # http: return remote's addchangegroup() or 0 for error
1669 ret = remote.unbundle(cg, remoteheads, 'push')
1673 ret = remote.unbundle(cg, remoteheads, 'push')
1670 else:
1674 else:
1671 # we return an integer indicating remote head count change
1675 # we return an integer indicating remote head count change
1672 ret = remote.addchangegroup(cg, 'push', self.url())
1676 ret = remote.addchangegroup(cg, 'push', self.url())
1673
1677
1674 if ret:
1678 if ret:
1675 # push succeed, synchonize target of the push
1679 # push succeed, synchonize target of the push
1676 cheads = outgoing.missingheads
1680 cheads = outgoing.missingheads
1677 elif revs is None:
1681 elif revs is None:
1678 # All out push fails. synchronize all common
1682 # All out push fails. synchronize all common
1679 cheads = outgoing.commonheads
1683 cheads = outgoing.commonheads
1680 else:
1684 else:
1681 # I want cheads = heads(::missingheads and ::commonheads)
1685 # I want cheads = heads(::missingheads and ::commonheads)
1682 # (missingheads is revs with secret changeset filtered out)
1686 # (missingheads is revs with secret changeset filtered out)
1683 #
1687 #
1684 # This can be expressed as:
1688 # This can be expressed as:
1685 # cheads = ( (missingheads and ::commonheads)
1689 # cheads = ( (missingheads and ::commonheads)
1686 # + (commonheads and ::missingheads))"
1690 # + (commonheads and ::missingheads))"
1687 # )
1691 # )
1688 #
1692 #
1689 # while trying to push we already computed the following:
1693 # while trying to push we already computed the following:
1690 # common = (::commonheads)
1694 # common = (::commonheads)
1691 # missing = ((commonheads::missingheads) - commonheads)
1695 # missing = ((commonheads::missingheads) - commonheads)
1692 #
1696 #
1693 # We can pick:
1697 # We can pick:
1694 # * missingheads part of comon (::commonheads)
1698 # * missingheads part of comon (::commonheads)
1695 common = set(outgoing.common)
1699 common = set(outgoing.common)
1696 cheads = [node for node in revs if node in common]
1700 cheads = [node for node in revs if node in common]
1697 # and
1701 # and
1698 # * commonheads parents on missing
1702 # * commonheads parents on missing
1699 revset = self.set('%ln and parents(roots(%ln))',
1703 revset = self.set('%ln and parents(roots(%ln))',
1700 outgoing.commonheads,
1704 outgoing.commonheads,
1701 outgoing.missing)
1705 outgoing.missing)
1702 cheads.extend(c.node() for c in revset)
1706 cheads.extend(c.node() for c in revset)
1703 # even when we don't push, exchanging phase data is useful
1707 # even when we don't push, exchanging phase data is useful
1704 remotephases = remote.listkeys('phases')
1708 remotephases = remote.listkeys('phases')
1705 if not remotephases: # old server or public only repo
1709 if not remotephases: # old server or public only repo
1706 phases.advanceboundary(self, phases.public, cheads)
1710 phases.advanceboundary(self, phases.public, cheads)
1707 # don't push any phase data as there is nothing to push
1711 # don't push any phase data as there is nothing to push
1708 else:
1712 else:
1709 ana = phases.analyzeremotephases(self, cheads, remotephases)
1713 ana = phases.analyzeremotephases(self, cheads, remotephases)
1710 pheads, droots = ana
1714 pheads, droots = ana
1711 ### Apply remote phase on local
1715 ### Apply remote phase on local
1712 if remotephases.get('publishing', False):
1716 if remotephases.get('publishing', False):
1713 phases.advanceboundary(self, phases.public, cheads)
1717 phases.advanceboundary(self, phases.public, cheads)
1714 else: # publish = False
1718 else: # publish = False
1715 phases.advanceboundary(self, phases.public, pheads)
1719 phases.advanceboundary(self, phases.public, pheads)
1716 phases.advanceboundary(self, phases.draft, cheads)
1720 phases.advanceboundary(self, phases.draft, cheads)
1717 ### Apply local phase on remote
1721 ### Apply local phase on remote
1718
1722
1719 # Get the list of all revs draft on remote by public here.
1723 # Get the list of all revs draft on remote by public here.
1720 # XXX Beware that revset break if droots is not strictly
1724 # XXX Beware that revset break if droots is not strictly
1721 # XXX root we may want to ensure it is but it is costly
1725 # XXX root we may want to ensure it is but it is costly
1722 outdated = self.set('heads((%ln::%ln) and public())',
1726 outdated = self.set('heads((%ln::%ln) and public())',
1723 droots, cheads)
1727 droots, cheads)
1724 for newremotehead in outdated:
1728 for newremotehead in outdated:
1725 r = remote.pushkey('phases',
1729 r = remote.pushkey('phases',
1726 newremotehead.hex(),
1730 newremotehead.hex(),
1727 str(phases.draft),
1731 str(phases.draft),
1728 str(phases.public))
1732 str(phases.public))
1729 if not r:
1733 if not r:
1730 self.ui.warn(_('updating %s to public failed!\n')
1734 self.ui.warn(_('updating %s to public failed!\n')
1731 % newremotehead)
1735 % newremotehead)
1732 finally:
1736 finally:
1733 if lock is not None:
1737 if lock is not None:
1734 lock.release()
1738 lock.release()
1735 finally:
1739 finally:
1736 locallock.release()
1740 locallock.release()
1737
1741
1738 self.ui.debug("checking for updated bookmarks\n")
1742 self.ui.debug("checking for updated bookmarks\n")
1739 rb = remote.listkeys('bookmarks')
1743 rb = remote.listkeys('bookmarks')
1740 for k in rb.keys():
1744 for k in rb.keys():
1741 if k in self._bookmarks:
1745 if k in self._bookmarks:
1742 nr, nl = rb[k], hex(self._bookmarks[k])
1746 nr, nl = rb[k], hex(self._bookmarks[k])
1743 if nr in self:
1747 if nr in self:
1744 cr = self[nr]
1748 cr = self[nr]
1745 cl = self[nl]
1749 cl = self[nl]
1746 if cl in cr.descendants():
1750 if cl in cr.descendants():
1747 r = remote.pushkey('bookmarks', k, nr, nl)
1751 r = remote.pushkey('bookmarks', k, nr, nl)
1748 if r:
1752 if r:
1749 self.ui.status(_("updating bookmark %s\n") % k)
1753 self.ui.status(_("updating bookmark %s\n") % k)
1750 else:
1754 else:
1751 self.ui.warn(_('updating bookmark %s'
1755 self.ui.warn(_('updating bookmark %s'
1752 ' failed!\n') % k)
1756 ' failed!\n') % k)
1753
1757
1754 return ret
1758 return ret
1755
1759
1756 def changegroupinfo(self, nodes, source):
1760 def changegroupinfo(self, nodes, source):
1757 if self.ui.verbose or source == 'bundle':
1761 if self.ui.verbose or source == 'bundle':
1758 self.ui.status(_("%d changesets found\n") % len(nodes))
1762 self.ui.status(_("%d changesets found\n") % len(nodes))
1759 if self.ui.debugflag:
1763 if self.ui.debugflag:
1760 self.ui.debug("list of changesets:\n")
1764 self.ui.debug("list of changesets:\n")
1761 for node in nodes:
1765 for node in nodes:
1762 self.ui.debug("%s\n" % hex(node))
1766 self.ui.debug("%s\n" % hex(node))
1763
1767
1764 def changegroupsubset(self, bases, heads, source):
1768 def changegroupsubset(self, bases, heads, source):
1765 """Compute a changegroup consisting of all the nodes that are
1769 """Compute a changegroup consisting of all the nodes that are
1766 descendants of any of the bases and ancestors of any of the heads.
1770 descendants of any of the bases and ancestors of any of the heads.
1767 Return a chunkbuffer object whose read() method will return
1771 Return a chunkbuffer object whose read() method will return
1768 successive changegroup chunks.
1772 successive changegroup chunks.
1769
1773
1770 It is fairly complex as determining which filenodes and which
1774 It is fairly complex as determining which filenodes and which
1771 manifest nodes need to be included for the changeset to be complete
1775 manifest nodes need to be included for the changeset to be complete
1772 is non-trivial.
1776 is non-trivial.
1773
1777
1774 Another wrinkle is doing the reverse, figuring out which changeset in
1778 Another wrinkle is doing the reverse, figuring out which changeset in
1775 the changegroup a particular filenode or manifestnode belongs to.
1779 the changegroup a particular filenode or manifestnode belongs to.
1776 """
1780 """
1777 cl = self.changelog
1781 cl = self.changelog
1778 if not bases:
1782 if not bases:
1779 bases = [nullid]
1783 bases = [nullid]
1780 csets, bases, heads = cl.nodesbetween(bases, heads)
1784 csets, bases, heads = cl.nodesbetween(bases, heads)
1781 # We assume that all ancestors of bases are known
1785 # We assume that all ancestors of bases are known
1782 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1786 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1783 return self._changegroupsubset(common, csets, heads, source)
1787 return self._changegroupsubset(common, csets, heads, source)
1784
1788
1785 def getlocalbundle(self, source, outgoing):
1789 def getlocalbundle(self, source, outgoing):
1786 """Like getbundle, but taking a discovery.outgoing as an argument.
1790 """Like getbundle, but taking a discovery.outgoing as an argument.
1787
1791
1788 This is only implemented for local repos and reuses potentially
1792 This is only implemented for local repos and reuses potentially
1789 precomputed sets in outgoing."""
1793 precomputed sets in outgoing."""
1790 if not outgoing.missing:
1794 if not outgoing.missing:
1791 return None
1795 return None
1792 return self._changegroupsubset(outgoing.common,
1796 return self._changegroupsubset(outgoing.common,
1793 outgoing.missing,
1797 outgoing.missing,
1794 outgoing.missingheads,
1798 outgoing.missingheads,
1795 source)
1799 source)
1796
1800
1797 def getbundle(self, source, heads=None, common=None):
1801 def getbundle(self, source, heads=None, common=None):
1798 """Like changegroupsubset, but returns the set difference between the
1802 """Like changegroupsubset, but returns the set difference between the
1799 ancestors of heads and the ancestors common.
1803 ancestors of heads and the ancestors common.
1800
1804
1801 If heads is None, use the local heads. If common is None, use [nullid].
1805 If heads is None, use the local heads. If common is None, use [nullid].
1802
1806
1803 The nodes in common might not all be known locally due to the way the
1807 The nodes in common might not all be known locally due to the way the
1804 current discovery protocol works.
1808 current discovery protocol works.
1805 """
1809 """
1806 cl = self.changelog
1810 cl = self.changelog
1807 if common:
1811 if common:
1808 nm = cl.nodemap
1812 nm = cl.nodemap
1809 common = [n for n in common if n in nm]
1813 common = [n for n in common if n in nm]
1810 else:
1814 else:
1811 common = [nullid]
1815 common = [nullid]
1812 if not heads:
1816 if not heads:
1813 heads = cl.heads()
1817 heads = cl.heads()
1814 return self.getlocalbundle(source,
1818 return self.getlocalbundle(source,
1815 discovery.outgoing(cl, common, heads))
1819 discovery.outgoing(cl, common, heads))
1816
1820
1817 def _changegroupsubset(self, commonrevs, csets, heads, source):
1821 def _changegroupsubset(self, commonrevs, csets, heads, source):
1818
1822
1819 cl = self.changelog
1823 cl = self.changelog
1820 mf = self.manifest
1824 mf = self.manifest
1821 mfs = {} # needed manifests
1825 mfs = {} # needed manifests
1822 fnodes = {} # needed file nodes
1826 fnodes = {} # needed file nodes
1823 changedfiles = set()
1827 changedfiles = set()
1824 fstate = ['', {}]
1828 fstate = ['', {}]
1825 count = [0, 0]
1829 count = [0, 0]
1826
1830
1827 # can we go through the fast path ?
1831 # can we go through the fast path ?
1828 heads.sort()
1832 heads.sort()
1829 if heads == sorted(self.heads()):
1833 if heads == sorted(self.heads()):
1830 return self._changegroup(csets, source)
1834 return self._changegroup(csets, source)
1831
1835
1832 # slow path
1836 # slow path
1833 self.hook('preoutgoing', throw=True, source=source)
1837 self.hook('preoutgoing', throw=True, source=source)
1834 self.changegroupinfo(csets, source)
1838 self.changegroupinfo(csets, source)
1835
1839
1836 # filter any nodes that claim to be part of the known set
1840 # filter any nodes that claim to be part of the known set
1837 def prune(revlog, missing):
1841 def prune(revlog, missing):
1838 rr, rl = revlog.rev, revlog.linkrev
1842 rr, rl = revlog.rev, revlog.linkrev
1839 return [n for n in missing
1843 return [n for n in missing
1840 if rl(rr(n)) not in commonrevs]
1844 if rl(rr(n)) not in commonrevs]
1841
1845
1842 progress = self.ui.progress
1846 progress = self.ui.progress
1843 _bundling = _('bundling')
1847 _bundling = _('bundling')
1844 _changesets = _('changesets')
1848 _changesets = _('changesets')
1845 _manifests = _('manifests')
1849 _manifests = _('manifests')
1846 _files = _('files')
1850 _files = _('files')
1847
1851
1848 def lookup(revlog, x):
1852 def lookup(revlog, x):
1849 if revlog == cl:
1853 if revlog == cl:
1850 c = cl.read(x)
1854 c = cl.read(x)
1851 changedfiles.update(c[3])
1855 changedfiles.update(c[3])
1852 mfs.setdefault(c[0], x)
1856 mfs.setdefault(c[0], x)
1853 count[0] += 1
1857 count[0] += 1
1854 progress(_bundling, count[0],
1858 progress(_bundling, count[0],
1855 unit=_changesets, total=count[1])
1859 unit=_changesets, total=count[1])
1856 return x
1860 return x
1857 elif revlog == mf:
1861 elif revlog == mf:
1858 clnode = mfs[x]
1862 clnode = mfs[x]
1859 mdata = mf.readfast(x)
1863 mdata = mf.readfast(x)
1860 for f, n in mdata.iteritems():
1864 for f, n in mdata.iteritems():
1861 if f in changedfiles:
1865 if f in changedfiles:
1862 fnodes[f].setdefault(n, clnode)
1866 fnodes[f].setdefault(n, clnode)
1863 count[0] += 1
1867 count[0] += 1
1864 progress(_bundling, count[0],
1868 progress(_bundling, count[0],
1865 unit=_manifests, total=count[1])
1869 unit=_manifests, total=count[1])
1866 return clnode
1870 return clnode
1867 else:
1871 else:
1868 progress(_bundling, count[0], item=fstate[0],
1872 progress(_bundling, count[0], item=fstate[0],
1869 unit=_files, total=count[1])
1873 unit=_files, total=count[1])
1870 return fstate[1][x]
1874 return fstate[1][x]
1871
1875
1872 bundler = changegroup.bundle10(lookup)
1876 bundler = changegroup.bundle10(lookup)
1873 reorder = self.ui.config('bundle', 'reorder', 'auto')
1877 reorder = self.ui.config('bundle', 'reorder', 'auto')
1874 if reorder == 'auto':
1878 if reorder == 'auto':
1875 reorder = None
1879 reorder = None
1876 else:
1880 else:
1877 reorder = util.parsebool(reorder)
1881 reorder = util.parsebool(reorder)
1878
1882
1879 def gengroup():
1883 def gengroup():
1880 # Create a changenode group generator that will call our functions
1884 # Create a changenode group generator that will call our functions
1881 # back to lookup the owning changenode and collect information.
1885 # back to lookup the owning changenode and collect information.
1882 count[:] = [0, len(csets)]
1886 count[:] = [0, len(csets)]
1883 for chunk in cl.group(csets, bundler, reorder=reorder):
1887 for chunk in cl.group(csets, bundler, reorder=reorder):
1884 yield chunk
1888 yield chunk
1885 progress(_bundling, None)
1889 progress(_bundling, None)
1886
1890
1887 # Create a generator for the manifestnodes that calls our lookup
1891 # Create a generator for the manifestnodes that calls our lookup
1888 # and data collection functions back.
1892 # and data collection functions back.
1889 for f in changedfiles:
1893 for f in changedfiles:
1890 fnodes[f] = {}
1894 fnodes[f] = {}
1891 count[:] = [0, len(mfs)]
1895 count[:] = [0, len(mfs)]
1892 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1896 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1893 yield chunk
1897 yield chunk
1894 progress(_bundling, None)
1898 progress(_bundling, None)
1895
1899
1896 mfs.clear()
1900 mfs.clear()
1897
1901
1898 # Go through all our files in order sorted by name.
1902 # Go through all our files in order sorted by name.
1899 count[:] = [0, len(changedfiles)]
1903 count[:] = [0, len(changedfiles)]
1900 for fname in sorted(changedfiles):
1904 for fname in sorted(changedfiles):
1901 filerevlog = self.file(fname)
1905 filerevlog = self.file(fname)
1902 if not len(filerevlog):
1906 if not len(filerevlog):
1903 raise util.Abort(_("empty or missing revlog for %s") % fname)
1907 raise util.Abort(_("empty or missing revlog for %s") % fname)
1904 fstate[0] = fname
1908 fstate[0] = fname
1905 fstate[1] = fnodes.pop(fname, {})
1909 fstate[1] = fnodes.pop(fname, {})
1906
1910
1907 nodelist = prune(filerevlog, fstate[1])
1911 nodelist = prune(filerevlog, fstate[1])
1908 if nodelist:
1912 if nodelist:
1909 count[0] += 1
1913 count[0] += 1
1910 yield bundler.fileheader(fname)
1914 yield bundler.fileheader(fname)
1911 for chunk in filerevlog.group(nodelist, bundler, reorder):
1915 for chunk in filerevlog.group(nodelist, bundler, reorder):
1912 yield chunk
1916 yield chunk
1913
1917
1914 # Signal that no more groups are left.
1918 # Signal that no more groups are left.
1915 yield bundler.close()
1919 yield bundler.close()
1916 progress(_bundling, None)
1920 progress(_bundling, None)
1917
1921
1918 if csets:
1922 if csets:
1919 self.hook('outgoing', node=hex(csets[0]), source=source)
1923 self.hook('outgoing', node=hex(csets[0]), source=source)
1920
1924
1921 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1925 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1922
1926
1923 def changegroup(self, basenodes, source):
1927 def changegroup(self, basenodes, source):
1924 # to avoid a race we use changegroupsubset() (issue1320)
1928 # to avoid a race we use changegroupsubset() (issue1320)
1925 return self.changegroupsubset(basenodes, self.heads(), source)
1929 return self.changegroupsubset(basenodes, self.heads(), source)
1926
1930
1927 def _changegroup(self, nodes, source):
1931 def _changegroup(self, nodes, source):
1928 """Compute the changegroup of all nodes that we have that a recipient
1932 """Compute the changegroup of all nodes that we have that a recipient
1929 doesn't. Return a chunkbuffer object whose read() method will return
1933 doesn't. Return a chunkbuffer object whose read() method will return
1930 successive changegroup chunks.
1934 successive changegroup chunks.
1931
1935
1932 This is much easier than the previous function as we can assume that
1936 This is much easier than the previous function as we can assume that
1933 the recipient has any changenode we aren't sending them.
1937 the recipient has any changenode we aren't sending them.
1934
1938
1935 nodes is the set of nodes to send"""
1939 nodes is the set of nodes to send"""
1936
1940
1937 cl = self.changelog
1941 cl = self.changelog
1938 mf = self.manifest
1942 mf = self.manifest
1939 mfs = {}
1943 mfs = {}
1940 changedfiles = set()
1944 changedfiles = set()
1941 fstate = ['']
1945 fstate = ['']
1942 count = [0, 0]
1946 count = [0, 0]
1943
1947
1944 self.hook('preoutgoing', throw=True, source=source)
1948 self.hook('preoutgoing', throw=True, source=source)
1945 self.changegroupinfo(nodes, source)
1949 self.changegroupinfo(nodes, source)
1946
1950
1947 revset = set([cl.rev(n) for n in nodes])
1951 revset = set([cl.rev(n) for n in nodes])
1948
1952
1949 def gennodelst(log):
1953 def gennodelst(log):
1950 ln, llr = log.node, log.linkrev
1954 ln, llr = log.node, log.linkrev
1951 return [ln(r) for r in log if llr(r) in revset]
1955 return [ln(r) for r in log if llr(r) in revset]
1952
1956
1953 progress = self.ui.progress
1957 progress = self.ui.progress
1954 _bundling = _('bundling')
1958 _bundling = _('bundling')
1955 _changesets = _('changesets')
1959 _changesets = _('changesets')
1956 _manifests = _('manifests')
1960 _manifests = _('manifests')
1957 _files = _('files')
1961 _files = _('files')
1958
1962
1959 def lookup(revlog, x):
1963 def lookup(revlog, x):
1960 if revlog == cl:
1964 if revlog == cl:
1961 c = cl.read(x)
1965 c = cl.read(x)
1962 changedfiles.update(c[3])
1966 changedfiles.update(c[3])
1963 mfs.setdefault(c[0], x)
1967 mfs.setdefault(c[0], x)
1964 count[0] += 1
1968 count[0] += 1
1965 progress(_bundling, count[0],
1969 progress(_bundling, count[0],
1966 unit=_changesets, total=count[1])
1970 unit=_changesets, total=count[1])
1967 return x
1971 return x
1968 elif revlog == mf:
1972 elif revlog == mf:
1969 count[0] += 1
1973 count[0] += 1
1970 progress(_bundling, count[0],
1974 progress(_bundling, count[0],
1971 unit=_manifests, total=count[1])
1975 unit=_manifests, total=count[1])
1972 return cl.node(revlog.linkrev(revlog.rev(x)))
1976 return cl.node(revlog.linkrev(revlog.rev(x)))
1973 else:
1977 else:
1974 progress(_bundling, count[0], item=fstate[0],
1978 progress(_bundling, count[0], item=fstate[0],
1975 total=count[1], unit=_files)
1979 total=count[1], unit=_files)
1976 return cl.node(revlog.linkrev(revlog.rev(x)))
1980 return cl.node(revlog.linkrev(revlog.rev(x)))
1977
1981
1978 bundler = changegroup.bundle10(lookup)
1982 bundler = changegroup.bundle10(lookup)
1979 reorder = self.ui.config('bundle', 'reorder', 'auto')
1983 reorder = self.ui.config('bundle', 'reorder', 'auto')
1980 if reorder == 'auto':
1984 if reorder == 'auto':
1981 reorder = None
1985 reorder = None
1982 else:
1986 else:
1983 reorder = util.parsebool(reorder)
1987 reorder = util.parsebool(reorder)
1984
1988
1985 def gengroup():
1989 def gengroup():
1986 '''yield a sequence of changegroup chunks (strings)'''
1990 '''yield a sequence of changegroup chunks (strings)'''
1987 # construct a list of all changed files
1991 # construct a list of all changed files
1988
1992
1989 count[:] = [0, len(nodes)]
1993 count[:] = [0, len(nodes)]
1990 for chunk in cl.group(nodes, bundler, reorder=reorder):
1994 for chunk in cl.group(nodes, bundler, reorder=reorder):
1991 yield chunk
1995 yield chunk
1992 progress(_bundling, None)
1996 progress(_bundling, None)
1993
1997
1994 count[:] = [0, len(mfs)]
1998 count[:] = [0, len(mfs)]
1995 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1999 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1996 yield chunk
2000 yield chunk
1997 progress(_bundling, None)
2001 progress(_bundling, None)
1998
2002
1999 count[:] = [0, len(changedfiles)]
2003 count[:] = [0, len(changedfiles)]
2000 for fname in sorted(changedfiles):
2004 for fname in sorted(changedfiles):
2001 filerevlog = self.file(fname)
2005 filerevlog = self.file(fname)
2002 if not len(filerevlog):
2006 if not len(filerevlog):
2003 raise util.Abort(_("empty or missing revlog for %s") % fname)
2007 raise util.Abort(_("empty or missing revlog for %s") % fname)
2004 fstate[0] = fname
2008 fstate[0] = fname
2005 nodelist = gennodelst(filerevlog)
2009 nodelist = gennodelst(filerevlog)
2006 if nodelist:
2010 if nodelist:
2007 count[0] += 1
2011 count[0] += 1
2008 yield bundler.fileheader(fname)
2012 yield bundler.fileheader(fname)
2009 for chunk in filerevlog.group(nodelist, bundler, reorder):
2013 for chunk in filerevlog.group(nodelist, bundler, reorder):
2010 yield chunk
2014 yield chunk
2011 yield bundler.close()
2015 yield bundler.close()
2012 progress(_bundling, None)
2016 progress(_bundling, None)
2013
2017
2014 if nodes:
2018 if nodes:
2015 self.hook('outgoing', node=hex(nodes[0]), source=source)
2019 self.hook('outgoing', node=hex(nodes[0]), source=source)
2016
2020
2017 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2021 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2018
2022
2019 def addchangegroup(self, source, srctype, url, emptyok=False):
2023 def addchangegroup(self, source, srctype, url, emptyok=False):
2020 """Add the changegroup returned by source.read() to this repo.
2024 """Add the changegroup returned by source.read() to this repo.
2021 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2025 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2022 the URL of the repo where this changegroup is coming from.
2026 the URL of the repo where this changegroup is coming from.
2023
2027
2024 Return an integer summarizing the change to this repo:
2028 Return an integer summarizing the change to this repo:
2025 - nothing changed or no source: 0
2029 - nothing changed or no source: 0
2026 - more heads than before: 1+added heads (2..n)
2030 - more heads than before: 1+added heads (2..n)
2027 - fewer heads than before: -1-removed heads (-2..-n)
2031 - fewer heads than before: -1-removed heads (-2..-n)
2028 - number of heads stays the same: 1
2032 - number of heads stays the same: 1
2029 """
2033 """
2030 def csmap(x):
2034 def csmap(x):
2031 self.ui.debug("add changeset %s\n" % short(x))
2035 self.ui.debug("add changeset %s\n" % short(x))
2032 return len(cl)
2036 return len(cl)
2033
2037
2034 def revmap(x):
2038 def revmap(x):
2035 return cl.rev(x)
2039 return cl.rev(x)
2036
2040
2037 if not source:
2041 if not source:
2038 return 0
2042 return 0
2039
2043
2040 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2044 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2041
2045
2042 changesets = files = revisions = 0
2046 changesets = files = revisions = 0
2043 efiles = set()
2047 efiles = set()
2044
2048
2045 # write changelog data to temp files so concurrent readers will not see
2049 # write changelog data to temp files so concurrent readers will not see
2046 # inconsistent view
2050 # inconsistent view
2047 cl = self.changelog
2051 cl = self.changelog
2048 cl.delayupdate()
2052 cl.delayupdate()
2049 oldheads = cl.heads()
2053 oldheads = cl.heads()
2050
2054
2051 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2055 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2052 try:
2056 try:
2053 trp = weakref.proxy(tr)
2057 trp = weakref.proxy(tr)
2054 # pull off the changeset group
2058 # pull off the changeset group
2055 self.ui.status(_("adding changesets\n"))
2059 self.ui.status(_("adding changesets\n"))
2056 clstart = len(cl)
2060 clstart = len(cl)
2057 class prog(object):
2061 class prog(object):
2058 step = _('changesets')
2062 step = _('changesets')
2059 count = 1
2063 count = 1
2060 ui = self.ui
2064 ui = self.ui
2061 total = None
2065 total = None
2062 def __call__(self):
2066 def __call__(self):
2063 self.ui.progress(self.step, self.count, unit=_('chunks'),
2067 self.ui.progress(self.step, self.count, unit=_('chunks'),
2064 total=self.total)
2068 total=self.total)
2065 self.count += 1
2069 self.count += 1
2066 pr = prog()
2070 pr = prog()
2067 source.callback = pr
2071 source.callback = pr
2068
2072
2069 source.changelogheader()
2073 source.changelogheader()
2070 srccontent = cl.addgroup(source, csmap, trp)
2074 srccontent = cl.addgroup(source, csmap, trp)
2071 if not (srccontent or emptyok):
2075 if not (srccontent or emptyok):
2072 raise util.Abort(_("received changelog group is empty"))
2076 raise util.Abort(_("received changelog group is empty"))
2073 clend = len(cl)
2077 clend = len(cl)
2074 changesets = clend - clstart
2078 changesets = clend - clstart
2075 for c in xrange(clstart, clend):
2079 for c in xrange(clstart, clend):
2076 efiles.update(self[c].files())
2080 efiles.update(self[c].files())
2077 efiles = len(efiles)
2081 efiles = len(efiles)
2078 self.ui.progress(_('changesets'), None)
2082 self.ui.progress(_('changesets'), None)
2079
2083
2080 # pull off the manifest group
2084 # pull off the manifest group
2081 self.ui.status(_("adding manifests\n"))
2085 self.ui.status(_("adding manifests\n"))
2082 pr.step = _('manifests')
2086 pr.step = _('manifests')
2083 pr.count = 1
2087 pr.count = 1
2084 pr.total = changesets # manifests <= changesets
2088 pr.total = changesets # manifests <= changesets
2085 # no need to check for empty manifest group here:
2089 # no need to check for empty manifest group here:
2086 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2090 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2087 # no new manifest will be created and the manifest group will
2091 # no new manifest will be created and the manifest group will
2088 # be empty during the pull
2092 # be empty during the pull
2089 source.manifestheader()
2093 source.manifestheader()
2090 self.manifest.addgroup(source, revmap, trp)
2094 self.manifest.addgroup(source, revmap, trp)
2091 self.ui.progress(_('manifests'), None)
2095 self.ui.progress(_('manifests'), None)
2092
2096
2093 needfiles = {}
2097 needfiles = {}
2094 if self.ui.configbool('server', 'validate', default=False):
2098 if self.ui.configbool('server', 'validate', default=False):
2095 # validate incoming csets have their manifests
2099 # validate incoming csets have their manifests
2096 for cset in xrange(clstart, clend):
2100 for cset in xrange(clstart, clend):
2097 mfest = self.changelog.read(self.changelog.node(cset))[0]
2101 mfest = self.changelog.read(self.changelog.node(cset))[0]
2098 mfest = self.manifest.readdelta(mfest)
2102 mfest = self.manifest.readdelta(mfest)
2099 # store file nodes we must see
2103 # store file nodes we must see
2100 for f, n in mfest.iteritems():
2104 for f, n in mfest.iteritems():
2101 needfiles.setdefault(f, set()).add(n)
2105 needfiles.setdefault(f, set()).add(n)
2102
2106
2103 # process the files
2107 # process the files
2104 self.ui.status(_("adding file changes\n"))
2108 self.ui.status(_("adding file changes\n"))
2105 pr.step = _('files')
2109 pr.step = _('files')
2106 pr.count = 1
2110 pr.count = 1
2107 pr.total = efiles
2111 pr.total = efiles
2108 source.callback = None
2112 source.callback = None
2109
2113
2110 while True:
2114 while True:
2111 chunkdata = source.filelogheader()
2115 chunkdata = source.filelogheader()
2112 if not chunkdata:
2116 if not chunkdata:
2113 break
2117 break
2114 f = chunkdata["filename"]
2118 f = chunkdata["filename"]
2115 self.ui.debug("adding %s revisions\n" % f)
2119 self.ui.debug("adding %s revisions\n" % f)
2116 pr()
2120 pr()
2117 fl = self.file(f)
2121 fl = self.file(f)
2118 o = len(fl)
2122 o = len(fl)
2119 if not fl.addgroup(source, revmap, trp):
2123 if not fl.addgroup(source, revmap, trp):
2120 raise util.Abort(_("received file revlog group is empty"))
2124 raise util.Abort(_("received file revlog group is empty"))
2121 revisions += len(fl) - o
2125 revisions += len(fl) - o
2122 files += 1
2126 files += 1
2123 if f in needfiles:
2127 if f in needfiles:
2124 needs = needfiles[f]
2128 needs = needfiles[f]
2125 for new in xrange(o, len(fl)):
2129 for new in xrange(o, len(fl)):
2126 n = fl.node(new)
2130 n = fl.node(new)
2127 if n in needs:
2131 if n in needs:
2128 needs.remove(n)
2132 needs.remove(n)
2129 if not needs:
2133 if not needs:
2130 del needfiles[f]
2134 del needfiles[f]
2131 self.ui.progress(_('files'), None)
2135 self.ui.progress(_('files'), None)
2132
2136
2133 for f, needs in needfiles.iteritems():
2137 for f, needs in needfiles.iteritems():
2134 fl = self.file(f)
2138 fl = self.file(f)
2135 for n in needs:
2139 for n in needs:
2136 try:
2140 try:
2137 fl.rev(n)
2141 fl.rev(n)
2138 except error.LookupError:
2142 except error.LookupError:
2139 raise util.Abort(
2143 raise util.Abort(
2140 _('missing file data for %s:%s - run hg verify') %
2144 _('missing file data for %s:%s - run hg verify') %
2141 (f, hex(n)))
2145 (f, hex(n)))
2142
2146
2143 dh = 0
2147 dh = 0
2144 if oldheads:
2148 if oldheads:
2145 heads = cl.heads()
2149 heads = cl.heads()
2146 dh = len(heads) - len(oldheads)
2150 dh = len(heads) - len(oldheads)
2147 for h in heads:
2151 for h in heads:
2148 if h not in oldheads and 'close' in self[h].extra():
2152 if h not in oldheads and 'close' in self[h].extra():
2149 dh -= 1
2153 dh -= 1
2150 htext = ""
2154 htext = ""
2151 if dh:
2155 if dh:
2152 htext = _(" (%+d heads)") % dh
2156 htext = _(" (%+d heads)") % dh
2153
2157
2154 self.ui.status(_("added %d changesets"
2158 self.ui.status(_("added %d changesets"
2155 " with %d changes to %d files%s\n")
2159 " with %d changes to %d files%s\n")
2156 % (changesets, revisions, files, htext))
2160 % (changesets, revisions, files, htext))
2157
2161
2158 if changesets > 0:
2162 if changesets > 0:
2159 p = lambda: cl.writepending() and self.root or ""
2163 p = lambda: cl.writepending() and self.root or ""
2160 self.hook('pretxnchangegroup', throw=True,
2164 self.hook('pretxnchangegroup', throw=True,
2161 node=hex(cl.node(clstart)), source=srctype,
2165 node=hex(cl.node(clstart)), source=srctype,
2162 url=url, pending=p)
2166 url=url, pending=p)
2163
2167
2164 added = [cl.node(r) for r in xrange(clstart, clend)]
2168 added = [cl.node(r) for r in xrange(clstart, clend)]
2165 publishing = self.ui.configbool('phases', 'publish', True)
2169 publishing = self.ui.configbool('phases', 'publish', True)
2166 if srctype == 'push':
2170 if srctype == 'push':
2167 # Old server can not push the boundary themself.
2171 # Old server can not push the boundary themself.
2168 # New server won't push the boundary if changeset already
2172 # New server won't push the boundary if changeset already
2169 # existed locally as secrete
2173 # existed locally as secrete
2170 #
2174 #
2171 # We should not use added here but the list of all change in
2175 # We should not use added here but the list of all change in
2172 # the bundle
2176 # the bundle
2173 if publishing:
2177 if publishing:
2174 phases.advanceboundary(self, phases.public, srccontent)
2178 phases.advanceboundary(self, phases.public, srccontent)
2175 else:
2179 else:
2176 phases.advanceboundary(self, phases.draft, srccontent)
2180 phases.advanceboundary(self, phases.draft, srccontent)
2177 phases.retractboundary(self, phases.draft, added)
2181 phases.retractboundary(self, phases.draft, added)
2178 elif srctype != 'strip':
2182 elif srctype != 'strip':
2179 # publishing only alter behavior during push
2183 # publishing only alter behavior during push
2180 #
2184 #
2181 # strip should not touch boundary at all
2185 # strip should not touch boundary at all
2182 phases.retractboundary(self, phases.draft, added)
2186 phases.retractboundary(self, phases.draft, added)
2183
2187
2184 # make changelog see real files again
2188 # make changelog see real files again
2185 cl.finalize(trp)
2189 cl.finalize(trp)
2186
2190
2187 tr.close()
2191 tr.close()
2188
2192
2189 if changesets > 0:
2193 if changesets > 0:
2190 def runhooks():
2194 def runhooks():
2191 # forcefully update the on-disk branch cache
2195 # forcefully update the on-disk branch cache
2192 self.ui.debug("updating the branch cache\n")
2196 self.ui.debug("updating the branch cache\n")
2193 self.updatebranchcache()
2197 self.updatebranchcache()
2194 self.hook("changegroup", node=hex(cl.node(clstart)),
2198 self.hook("changegroup", node=hex(cl.node(clstart)),
2195 source=srctype, url=url)
2199 source=srctype, url=url)
2196
2200
2197 for n in added:
2201 for n in added:
2198 self.hook("incoming", node=hex(n), source=srctype,
2202 self.hook("incoming", node=hex(n), source=srctype,
2199 url=url)
2203 url=url)
2200 self._afterlock(runhooks)
2204 self._afterlock(runhooks)
2201
2205
2202 finally:
2206 finally:
2203 tr.release()
2207 tr.release()
2204 # never return 0 here:
2208 # never return 0 here:
2205 if dh < 0:
2209 if dh < 0:
2206 return dh - 1
2210 return dh - 1
2207 else:
2211 else:
2208 return dh + 1
2212 return dh + 1
2209
2213
2210 def stream_in(self, remote, requirements):
2214 def stream_in(self, remote, requirements):
2211 lock = self.lock()
2215 lock = self.lock()
2212 try:
2216 try:
2213 fp = remote.stream_out()
2217 fp = remote.stream_out()
2214 l = fp.readline()
2218 l = fp.readline()
2215 try:
2219 try:
2216 resp = int(l)
2220 resp = int(l)
2217 except ValueError:
2221 except ValueError:
2218 raise error.ResponseError(
2222 raise error.ResponseError(
2219 _('Unexpected response from remote server:'), l)
2223 _('Unexpected response from remote server:'), l)
2220 if resp == 1:
2224 if resp == 1:
2221 raise util.Abort(_('operation forbidden by server'))
2225 raise util.Abort(_('operation forbidden by server'))
2222 elif resp == 2:
2226 elif resp == 2:
2223 raise util.Abort(_('locking the remote repository failed'))
2227 raise util.Abort(_('locking the remote repository failed'))
2224 elif resp != 0:
2228 elif resp != 0:
2225 raise util.Abort(_('the server sent an unknown error code'))
2229 raise util.Abort(_('the server sent an unknown error code'))
2226 self.ui.status(_('streaming all changes\n'))
2230 self.ui.status(_('streaming all changes\n'))
2227 l = fp.readline()
2231 l = fp.readline()
2228 try:
2232 try:
2229 total_files, total_bytes = map(int, l.split(' ', 1))
2233 total_files, total_bytes = map(int, l.split(' ', 1))
2230 except (ValueError, TypeError):
2234 except (ValueError, TypeError):
2231 raise error.ResponseError(
2235 raise error.ResponseError(
2232 _('Unexpected response from remote server:'), l)
2236 _('Unexpected response from remote server:'), l)
2233 self.ui.status(_('%d files to transfer, %s of data\n') %
2237 self.ui.status(_('%d files to transfer, %s of data\n') %
2234 (total_files, util.bytecount(total_bytes)))
2238 (total_files, util.bytecount(total_bytes)))
2235 start = time.time()
2239 start = time.time()
2236 for i in xrange(total_files):
2240 for i in xrange(total_files):
2237 # XXX doesn't support '\n' or '\r' in filenames
2241 # XXX doesn't support '\n' or '\r' in filenames
2238 l = fp.readline()
2242 l = fp.readline()
2239 try:
2243 try:
2240 name, size = l.split('\0', 1)
2244 name, size = l.split('\0', 1)
2241 size = int(size)
2245 size = int(size)
2242 except (ValueError, TypeError):
2246 except (ValueError, TypeError):
2243 raise error.ResponseError(
2247 raise error.ResponseError(
2244 _('Unexpected response from remote server:'), l)
2248 _('Unexpected response from remote server:'), l)
2245 if self.ui.debugflag:
2249 if self.ui.debugflag:
2246 self.ui.debug('adding %s (%s)\n' %
2250 self.ui.debug('adding %s (%s)\n' %
2247 (name, util.bytecount(size)))
2251 (name, util.bytecount(size)))
2248 # for backwards compat, name was partially encoded
2252 # for backwards compat, name was partially encoded
2249 ofp = self.sopener(store.decodedir(name), 'w')
2253 ofp = self.sopener(store.decodedir(name), 'w')
2250 for chunk in util.filechunkiter(fp, limit=size):
2254 for chunk in util.filechunkiter(fp, limit=size):
2251 ofp.write(chunk)
2255 ofp.write(chunk)
2252 ofp.close()
2256 ofp.close()
2253 elapsed = time.time() - start
2257 elapsed = time.time() - start
2254 if elapsed <= 0:
2258 if elapsed <= 0:
2255 elapsed = 0.001
2259 elapsed = 0.001
2256 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2260 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2257 (util.bytecount(total_bytes), elapsed,
2261 (util.bytecount(total_bytes), elapsed,
2258 util.bytecount(total_bytes / elapsed)))
2262 util.bytecount(total_bytes / elapsed)))
2259
2263
2260 # new requirements = old non-format requirements + new format-related
2264 # new requirements = old non-format requirements + new format-related
2261 # requirements from the streamed-in repository
2265 # requirements from the streamed-in repository
2262 requirements.update(set(self.requirements) - self.supportedformats)
2266 requirements.update(set(self.requirements) - self.supportedformats)
2263 self._applyrequirements(requirements)
2267 self._applyrequirements(requirements)
2264 self._writerequirements()
2268 self._writerequirements()
2265
2269
2266 self.invalidate()
2270 self.invalidate()
2267 return len(self.heads()) + 1
2271 return len(self.heads()) + 1
2268 finally:
2272 finally:
2269 lock.release()
2273 lock.release()
2270
2274
2271 def clone(self, remote, heads=[], stream=False):
2275 def clone(self, remote, heads=[], stream=False):
2272 '''clone remote repository.
2276 '''clone remote repository.
2273
2277
2274 keyword arguments:
2278 keyword arguments:
2275 heads: list of revs to clone (forces use of pull)
2279 heads: list of revs to clone (forces use of pull)
2276 stream: use streaming clone if possible'''
2280 stream: use streaming clone if possible'''
2277
2281
2278 # now, all clients that can request uncompressed clones can
2282 # now, all clients that can request uncompressed clones can
2279 # read repo formats supported by all servers that can serve
2283 # read repo formats supported by all servers that can serve
2280 # them.
2284 # them.
2281
2285
2282 # if revlog format changes, client will have to check version
2286 # if revlog format changes, client will have to check version
2283 # and format flags on "stream" capability, and use
2287 # and format flags on "stream" capability, and use
2284 # uncompressed only if compatible.
2288 # uncompressed only if compatible.
2285
2289
2286 if not stream:
2290 if not stream:
2287 # if the server explicitely prefer to stream (for fast LANs)
2291 # if the server explicitely prefer to stream (for fast LANs)
2288 stream = remote.capable('stream-preferred')
2292 stream = remote.capable('stream-preferred')
2289
2293
2290 if stream and not heads:
2294 if stream and not heads:
2291 # 'stream' means remote revlog format is revlogv1 only
2295 # 'stream' means remote revlog format is revlogv1 only
2292 if remote.capable('stream'):
2296 if remote.capable('stream'):
2293 return self.stream_in(remote, set(('revlogv1',)))
2297 return self.stream_in(remote, set(('revlogv1',)))
2294 # otherwise, 'streamreqs' contains the remote revlog format
2298 # otherwise, 'streamreqs' contains the remote revlog format
2295 streamreqs = remote.capable('streamreqs')
2299 streamreqs = remote.capable('streamreqs')
2296 if streamreqs:
2300 if streamreqs:
2297 streamreqs = set(streamreqs.split(','))
2301 streamreqs = set(streamreqs.split(','))
2298 # if we support it, stream in and adjust our requirements
2302 # if we support it, stream in and adjust our requirements
2299 if not streamreqs - self.supportedformats:
2303 if not streamreqs - self.supportedformats:
2300 return self.stream_in(remote, streamreqs)
2304 return self.stream_in(remote, streamreqs)
2301 return self.pull(remote, heads)
2305 return self.pull(remote, heads)
2302
2306
2303 def pushkey(self, namespace, key, old, new):
2307 def pushkey(self, namespace, key, old, new):
2304 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2308 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2305 old=old, new=new)
2309 old=old, new=new)
2306 ret = pushkey.push(self, namespace, key, old, new)
2310 ret = pushkey.push(self, namespace, key, old, new)
2307 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2311 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2308 ret=ret)
2312 ret=ret)
2309 return ret
2313 return ret
2310
2314
2311 def listkeys(self, namespace):
2315 def listkeys(self, namespace):
2312 self.hook('prelistkeys', throw=True, namespace=namespace)
2316 self.hook('prelistkeys', throw=True, namespace=namespace)
2313 values = pushkey.list(self, namespace)
2317 values = pushkey.list(self, namespace)
2314 self.hook('listkeys', namespace=namespace, values=values)
2318 self.hook('listkeys', namespace=namespace, values=values)
2315 return values
2319 return values
2316
2320
2317 def debugwireargs(self, one, two, three=None, four=None, five=None):
2321 def debugwireargs(self, one, two, three=None, four=None, five=None):
2318 '''used to test argument passing over the wire'''
2322 '''used to test argument passing over the wire'''
2319 return "%s %s %s %s %s" % (one, two, three, four, five)
2323 return "%s %s %s %s %s" % (one, two, three, four, five)
2320
2324
2321 def savecommitmessage(self, text):
2325 def savecommitmessage(self, text):
2322 fp = self.opener('last-message.txt', 'wb')
2326 fp = self.opener('last-message.txt', 'wb')
2323 try:
2327 try:
2324 fp.write(text)
2328 fp.write(text)
2325 finally:
2329 finally:
2326 fp.close()
2330 fp.close()
2327 return self.pathto(fp.name[len(self.root)+1:])
2331 return self.pathto(fp.name[len(self.root)+1:])
2328
2332
2329 # used to avoid circular references so destructors work
2333 # used to avoid circular references so destructors work
2330 def aftertrans(files):
2334 def aftertrans(files):
2331 renamefiles = [tuple(t) for t in files]
2335 renamefiles = [tuple(t) for t in files]
2332 def a():
2336 def a():
2333 for src, dest in renamefiles:
2337 for src, dest in renamefiles:
2334 try:
2338 try:
2335 util.rename(src, dest)
2339 util.rename(src, dest)
2336 except OSError: # journal file does not yet exist
2340 except OSError: # journal file does not yet exist
2337 pass
2341 pass
2338 return a
2342 return a
2339
2343
2340 def undoname(fn):
2344 def undoname(fn):
2341 base, name = os.path.split(fn)
2345 base, name = os.path.split(fn)
2342 assert name.startswith('journal')
2346 assert name.startswith('journal')
2343 return os.path.join(base, name.replace('journal', 'undo', 1))
2347 return os.path.join(base, name.replace('journal', 'undo', 1))
2344
2348
2345 def instance(ui, path, create):
2349 def instance(ui, path, create):
2346 return localrepository(ui, util.urllocalpath(path), create)
2350 return localrepository(ui, util.urllocalpath(path), create)
2347
2351
2348 def islocal(path):
2352 def islocal(path):
2349 return True
2353 return True
@@ -1,608 +1,608
1 $ "$TESTDIR/hghave" system-sh || exit 80
1 $ "$TESTDIR/hghave" system-sh || exit 80
2
2
3 commit hooks can see env vars
3 commit hooks can see env vars
4
4
5 $ hg init a
5 $ hg init a
6 $ cd a
6 $ cd a
7 $ echo "[hooks]" > .hg/hgrc
7 $ echo "[hooks]" > .hg/hgrc
8 $ echo 'commit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit' >> .hg/hgrc
8 $ echo 'commit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit' >> .hg/hgrc
9 $ echo 'commit.b = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit.b' >> .hg/hgrc
9 $ echo 'commit.b = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit.b' >> .hg/hgrc
10 $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python "$TESTDIR"/printenv.py precommit' >> .hg/hgrc
10 $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python "$TESTDIR"/printenv.py precommit' >> .hg/hgrc
11 $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py pretxncommit' >> .hg/hgrc
11 $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py pretxncommit' >> .hg/hgrc
12 $ echo 'pretxncommit.tip = hg -q tip' >> .hg/hgrc
12 $ echo 'pretxncommit.tip = hg -q tip' >> .hg/hgrc
13 $ echo 'pre-identify = python "$TESTDIR"/printenv.py pre-identify 1' >> .hg/hgrc
13 $ echo 'pre-identify = python "$TESTDIR"/printenv.py pre-identify 1' >> .hg/hgrc
14 $ echo 'pre-cat = python "$TESTDIR"/printenv.py pre-cat' >> .hg/hgrc
14 $ echo 'pre-cat = python "$TESTDIR"/printenv.py pre-cat' >> .hg/hgrc
15 $ echo 'post-cat = python "$TESTDIR"/printenv.py post-cat' >> .hg/hgrc
15 $ echo 'post-cat = python "$TESTDIR"/printenv.py post-cat' >> .hg/hgrc
16 $ echo a > a
16 $ echo a > a
17 $ hg add a
17 $ hg add a
18 $ hg commit -m a
18 $ hg commit -m a
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
21 0:cb9a9f314b8b
21 0:cb9a9f314b8b
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
24
24
25 $ hg clone . ../b
25 $ hg clone . ../b
26 updating to branch default
26 updating to branch default
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ cd ../b
28 $ cd ../b
29
29
30 changegroup hooks can see env vars
30 changegroup hooks can see env vars
31
31
32 $ echo '[hooks]' > .hg/hgrc
32 $ echo '[hooks]' > .hg/hgrc
33 $ echo 'prechangegroup = python "$TESTDIR"/printenv.py prechangegroup' >> .hg/hgrc
33 $ echo 'prechangegroup = python "$TESTDIR"/printenv.py prechangegroup' >> .hg/hgrc
34 $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc
34 $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc
35 $ echo 'incoming = python "$TESTDIR"/printenv.py incoming' >> .hg/hgrc
35 $ echo 'incoming = python "$TESTDIR"/printenv.py incoming' >> .hg/hgrc
36
36
37 pretxncommit and commit hooks can see both parents of merge
37 pretxncommit and commit hooks can see both parents of merge
38
38
39 $ cd ../a
39 $ cd ../a
40 $ echo b >> a
40 $ echo b >> a
41 $ hg commit -m a1 -d "1 0"
41 $ hg commit -m a1 -d "1 0"
42 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
42 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
43 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
43 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
44 1:ab228980c14d
44 1:ab228980c14d
45 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
45 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
46 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
46 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
47 $ hg update -C 0
47 $ hg update -C 0
48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
49 $ echo b > b
49 $ echo b > b
50 $ hg add b
50 $ hg add b
51 $ hg commit -m b -d '1 0'
51 $ hg commit -m b -d '1 0'
52 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
52 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
53 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
53 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
54 2:ee9deb46ab31
54 2:ee9deb46ab31
55 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
55 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
56 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
56 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
57 created new head
57 created new head
58 $ hg merge 1
58 $ hg merge 1
59 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
60 (branch merge, don't forget to commit)
60 (branch merge, don't forget to commit)
61 $ hg commit -m merge -d '2 0'
61 $ hg commit -m merge -d '2 0'
62 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
62 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
63 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
63 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
64 3:07f3376c1e65
64 3:07f3376c1e65
65 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
65 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
66 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
66 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
67
67
68 test generic hooks
68 test generic hooks
69
69
70 $ hg id
70 $ hg id
71 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
71 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
72 warning: pre-identify hook exited with status 1
72 warning: pre-identify hook exited with status 1
73 [1]
73 [1]
74 $ hg cat b
74 $ hg cat b
75 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
75 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
76 b
76 b
77 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
77 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
78
78
79 $ cd ../b
79 $ cd ../b
80 $ hg pull ../a
80 $ hg pull ../a
81 pulling from ../a
81 pulling from ../a
82 searching for changes
82 searching for changes
83 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
83 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
84 adding changesets
84 adding changesets
85 adding manifests
85 adding manifests
86 adding file changes
86 adding file changes
87 added 3 changesets with 2 changes to 2 files
87 added 3 changesets with 2 changes to 2 files
88 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
88 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
89 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
89 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
90 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
90 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 (run 'hg update' to get a working copy)
92 (run 'hg update' to get a working copy)
93
93
94 tag hooks can see env vars
94 tag hooks can see env vars
95
95
96 $ cd ../a
96 $ cd ../a
97 $ echo 'pretag = python "$TESTDIR"/printenv.py pretag' >> .hg/hgrc
97 $ echo 'pretag = python "$TESTDIR"/printenv.py pretag' >> .hg/hgrc
98 $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python "$TESTDIR"/printenv.py tag' >> .hg/hgrc
98 $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python "$TESTDIR"/printenv.py tag' >> .hg/hgrc
99 $ hg tag -d '3 0' a
99 $ hg tag -d '3 0' a
100 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
100 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
101 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
101 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
102 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
102 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
103 4:539e4b31b6dc
103 4:539e4b31b6dc
104 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
104 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
105 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
105 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
107 $ hg tag -l la
107 $ hg tag -l la
108 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
108 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
109 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
109 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
110
110
111 pretag hook can forbid tagging
111 pretag hook can forbid tagging
112
112
113 $ echo 'pretag.forbid = python "$TESTDIR"/printenv.py pretag.forbid 1' >> .hg/hgrc
113 $ echo 'pretag.forbid = python "$TESTDIR"/printenv.py pretag.forbid 1' >> .hg/hgrc
114 $ hg tag -d '4 0' fa
114 $ hg tag -d '4 0' fa
115 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
115 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
116 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
116 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
117 abort: pretag.forbid hook exited with status 1
117 abort: pretag.forbid hook exited with status 1
118 [255]
118 [255]
119 $ hg tag -l fla
119 $ hg tag -l fla
120 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
120 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
121 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
121 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
122 abort: pretag.forbid hook exited with status 1
122 abort: pretag.forbid hook exited with status 1
123 [255]
123 [255]
124
124
125 pretxncommit hook can see changeset, can roll back txn, changeset no
125 pretxncommit hook can see changeset, can roll back txn, changeset no
126 more there after
126 more there after
127
127
128 $ echo 'pretxncommit.forbid0 = hg tip -q' >> .hg/hgrc
128 $ echo 'pretxncommit.forbid0 = hg tip -q' >> .hg/hgrc
129 $ echo 'pretxncommit.forbid1 = python "$TESTDIR"/printenv.py pretxncommit.forbid 1' >> .hg/hgrc
129 $ echo 'pretxncommit.forbid1 = python "$TESTDIR"/printenv.py pretxncommit.forbid 1' >> .hg/hgrc
130 $ echo z > z
130 $ echo z > z
131 $ hg add z
131 $ hg add z
132 $ hg -q tip
132 $ hg -q tip
133 4:539e4b31b6dc
133 4:539e4b31b6dc
134 $ hg commit -m 'fail' -d '4 0'
134 $ hg commit -m 'fail' -d '4 0'
135 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
135 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
136 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
136 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
137 5:6f611f8018c1
137 5:6f611f8018c1
138 5:6f611f8018c1
138 5:6f611f8018c1
139 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
139 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
140 transaction abort!
140 transaction abort!
141 rollback completed
141 rollback completed
142 abort: pretxncommit.forbid1 hook exited with status 1
142 abort: pretxncommit.forbid1 hook exited with status 1
143 [255]
143 [255]
144 $ hg -q tip
144 $ hg -q tip
145 4:539e4b31b6dc
145 4:539e4b31b6dc
146
146
147 precommit hook can prevent commit
147 precommit hook can prevent commit
148
148
149 $ echo 'precommit.forbid = python "$TESTDIR"/printenv.py precommit.forbid 1' >> .hg/hgrc
149 $ echo 'precommit.forbid = python "$TESTDIR"/printenv.py precommit.forbid 1' >> .hg/hgrc
150 $ hg commit -m 'fail' -d '4 0'
150 $ hg commit -m 'fail' -d '4 0'
151 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
151 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
152 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
152 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
153 abort: precommit.forbid hook exited with status 1
153 abort: precommit.forbid hook exited with status 1
154 [255]
154 [255]
155 $ hg -q tip
155 $ hg -q tip
156 4:539e4b31b6dc
156 4:539e4b31b6dc
157
157
158 preupdate hook can prevent update
158 preupdate hook can prevent update
159
159
160 $ echo 'preupdate = python "$TESTDIR"/printenv.py preupdate' >> .hg/hgrc
160 $ echo 'preupdate = python "$TESTDIR"/printenv.py preupdate' >> .hg/hgrc
161 $ hg update 1
161 $ hg update 1
162 preupdate hook: HG_PARENT1=ab228980c14d
162 preupdate hook: HG_PARENT1=ab228980c14d
163 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
163 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
164
164
165 update hook
165 update hook
166
166
167 $ echo 'update = python "$TESTDIR"/printenv.py update' >> .hg/hgrc
167 $ echo 'update = python "$TESTDIR"/printenv.py update' >> .hg/hgrc
168 $ hg update
168 $ hg update
169 preupdate hook: HG_PARENT1=539e4b31b6dc
169 preupdate hook: HG_PARENT1=539e4b31b6dc
170 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
170 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
171 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
172
172
173 pushkey hook
173 pushkey hook
174
174
175 $ echo 'pushkey = python "$TESTDIR"/printenv.py pushkey' >> .hg/hgrc
175 $ echo 'pushkey = python "$TESTDIR"/printenv.py pushkey' >> .hg/hgrc
176 $ cd ../b
176 $ cd ../b
177 $ hg bookmark -r null foo
177 $ hg bookmark -r null foo
178 $ hg push -B foo ../a
178 $ hg push -B foo ../a
179 pushing to ../a
179 pushing to ../a
180 searching for changes
180 searching for changes
181 no changes found
181 no changes found
182 exporting bookmark foo
182 exporting bookmark foo
183 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
183 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
184 [1]
184 [1]
185 $ cd ../a
185 $ cd ../a
186
186
187 listkeys hook
187 listkeys hook
188
188
189 $ echo 'listkeys = python "$TESTDIR"/printenv.py listkeys' >> .hg/hgrc
189 $ echo 'listkeys = python "$TESTDIR"/printenv.py listkeys' >> .hg/hgrc
190 $ hg bookmark -r null bar
190 $ hg bookmark -r null bar
191 $ cd ../b
191 $ cd ../b
192 $ hg pull -B bar ../a
192 $ hg pull -B bar ../a
193 pulling from ../a
193 pulling from ../a
194 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
194 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
195 no changes found
195 no changes found
196 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
196 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
197 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
197 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
198 importing bookmark bar
198 importing bookmark bar
199 $ cd ../a
199 $ cd ../a
200
200
201 test that prepushkey can prevent incoming keys
201 test that prepushkey can prevent incoming keys
202
202
203 $ echo 'prepushkey = python "$TESTDIR"/printenv.py prepushkey.forbid 1' >> .hg/hgrc
203 $ echo 'prepushkey = python "$TESTDIR"/printenv.py prepushkey.forbid 1' >> .hg/hgrc
204 $ cd ../b
204 $ cd ../b
205 $ hg bookmark -r null baz
205 $ hg bookmark -r null baz
206 $ hg push -B baz ../a
206 $ hg push -B baz ../a
207 pushing to ../a
207 pushing to ../a
208 searching for changes
208 searching for changes
209 no changes found
209 no changes found
210 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
210 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
211 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
211 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
212 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
212 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
213 exporting bookmark baz
213 exporting bookmark baz
214 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
214 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
215 abort: prepushkey hook exited with status 1
215 abort: prepushkey hook exited with status 1
216 [255]
216 [255]
217 $ cd ../a
217 $ cd ../a
218
218
219 test that prelistkeys can prevent listing keys
219 test that prelistkeys can prevent listing keys
220
220
221 $ echo 'prelistkeys = python "$TESTDIR"/printenv.py prelistkeys.forbid 1' >> .hg/hgrc
221 $ echo 'prelistkeys = python "$TESTDIR"/printenv.py prelistkeys.forbid 1' >> .hg/hgrc
222 $ hg bookmark -r null quux
222 $ hg bookmark -r null quux
223 $ cd ../b
223 $ cd ../b
224 $ hg pull -B quux ../a
224 $ hg pull -B quux ../a
225 pulling from ../a
225 pulling from ../a
226 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
226 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
227 abort: prelistkeys hook exited with status 1
227 abort: prelistkeys hook exited with status 1
228 [255]
228 [255]
229 $ cd ../a
229 $ cd ../a
230
230
231 prechangegroup hook can prevent incoming changes
231 prechangegroup hook can prevent incoming changes
232
232
233 $ cd ../b
233 $ cd ../b
234 $ hg -q tip
234 $ hg -q tip
235 3:07f3376c1e65
235 3:07f3376c1e65
236 $ echo '[hooks]' > .hg/hgrc
236 $ echo '[hooks]' > .hg/hgrc
237 $ echo 'prechangegroup.forbid = python "$TESTDIR"/printenv.py prechangegroup.forbid 1' >> .hg/hgrc
237 $ echo 'prechangegroup.forbid = python "$TESTDIR"/printenv.py prechangegroup.forbid 1' >> .hg/hgrc
238 $ hg pull ../a
238 $ hg pull ../a
239 pulling from ../a
239 pulling from ../a
240 searching for changes
240 searching for changes
241 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
241 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
242 abort: prechangegroup.forbid hook exited with status 1
242 abort: prechangegroup.forbid hook exited with status 1
243 [255]
243 [255]
244
244
245 pretxnchangegroup hook can see incoming changes, can roll back txn,
245 pretxnchangegroup hook can see incoming changes, can roll back txn,
246 incoming changes no longer there after
246 incoming changes no longer there after
247
247
248 $ echo '[hooks]' > .hg/hgrc
248 $ echo '[hooks]' > .hg/hgrc
249 $ echo 'pretxnchangegroup.forbid0 = hg tip -q' >> .hg/hgrc
249 $ echo 'pretxnchangegroup.forbid0 = hg tip -q' >> .hg/hgrc
250 $ echo 'pretxnchangegroup.forbid1 = python "$TESTDIR"/printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc
250 $ echo 'pretxnchangegroup.forbid1 = python "$TESTDIR"/printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc
251 $ hg pull ../a
251 $ hg pull ../a
252 pulling from ../a
252 pulling from ../a
253 searching for changes
253 searching for changes
254 adding changesets
254 adding changesets
255 adding manifests
255 adding manifests
256 adding file changes
256 adding file changes
257 added 1 changesets with 1 changes to 1 files
257 added 1 changesets with 1 changes to 1 files
258 4:539e4b31b6dc
258 4:539e4b31b6dc
259 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
259 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
260 transaction abort!
260 transaction abort!
261 rollback completed
261 rollback completed
262 abort: pretxnchangegroup.forbid1 hook exited with status 1
262 abort: pretxnchangegroup.forbid1 hook exited with status 1
263 [255]
263 [255]
264 $ hg -q tip
264 $ hg -q tip
265 3:07f3376c1e65
265 3:07f3376c1e65
266
266
267 outgoing hooks can see env vars
267 outgoing hooks can see env vars
268
268
269 $ rm .hg/hgrc
269 $ rm .hg/hgrc
270 $ echo '[hooks]' > ../a/.hg/hgrc
270 $ echo '[hooks]' > ../a/.hg/hgrc
271 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> ../a/.hg/hgrc
271 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> ../a/.hg/hgrc
272 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> ../a/.hg/hgrc
272 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> ../a/.hg/hgrc
273 $ hg pull ../a
273 $ hg pull ../a
274 pulling from ../a
274 pulling from ../a
275 searching for changes
275 searching for changes
276 preoutgoing hook: HG_SOURCE=pull
276 preoutgoing hook: HG_SOURCE=pull
277 adding changesets
277 adding changesets
278 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
278 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
279 adding manifests
279 adding manifests
280 adding file changes
280 adding file changes
281 added 1 changesets with 1 changes to 1 files
281 added 1 changesets with 1 changes to 1 files
282 (run 'hg update' to get a working copy)
282 (run 'hg update' to get a working copy)
283 $ hg rollback
283 $ hg rollback
284 repository tip rolled back to revision 3 (undo pull)
284 repository tip rolled back to revision 3 (undo pull)
285
285
286 preoutgoing hook can prevent outgoing changes
286 preoutgoing hook can prevent outgoing changes
287
287
288 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc
288 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc
289 $ hg pull ../a
289 $ hg pull ../a
290 pulling from ../a
290 pulling from ../a
291 searching for changes
291 searching for changes
292 preoutgoing hook: HG_SOURCE=pull
292 preoutgoing hook: HG_SOURCE=pull
293 preoutgoing.forbid hook: HG_SOURCE=pull
293 preoutgoing.forbid hook: HG_SOURCE=pull
294 abort: preoutgoing.forbid hook exited with status 1
294 abort: preoutgoing.forbid hook exited with status 1
295 [255]
295 [255]
296
296
297 outgoing hooks work for local clones
297 outgoing hooks work for local clones
298
298
299 $ cd ..
299 $ cd ..
300 $ echo '[hooks]' > a/.hg/hgrc
300 $ echo '[hooks]' > a/.hg/hgrc
301 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> a/.hg/hgrc
301 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> a/.hg/hgrc
302 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> a/.hg/hgrc
302 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> a/.hg/hgrc
303 $ hg clone a c
303 $ hg clone a c
304 preoutgoing hook: HG_SOURCE=clone
304 preoutgoing hook: HG_SOURCE=clone
305 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
305 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
306 updating to branch default
306 updating to branch default
307 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
307 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
308 $ rm -rf c
308 $ rm -rf c
309
309
310 preoutgoing hook can prevent outgoing changes for local clones
310 preoutgoing hook can prevent outgoing changes for local clones
311
311
312 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc
312 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc
313 $ hg clone a zzz
313 $ hg clone a zzz
314 preoutgoing hook: HG_SOURCE=clone
314 preoutgoing hook: HG_SOURCE=clone
315 preoutgoing.forbid hook: HG_SOURCE=clone
315 preoutgoing.forbid hook: HG_SOURCE=clone
316 abort: preoutgoing.forbid hook exited with status 1
316 abort: preoutgoing.forbid hook exited with status 1
317 [255]
317 [255]
318 $ cd b
318 $ cd b
319
319
320 $ cat > hooktests.py <<EOF
320 $ cat > hooktests.py <<EOF
321 > from mercurial import util
321 > from mercurial import util
322 >
322 >
323 > uncallable = 0
323 > uncallable = 0
324 >
324 >
325 > def printargs(args):
325 > def printargs(args):
326 > args.pop('ui', None)
326 > args.pop('ui', None)
327 > args.pop('repo', None)
327 > args.pop('repo', None)
328 > a = list(args.items())
328 > a = list(args.items())
329 > a.sort()
329 > a.sort()
330 > print 'hook args:'
330 > print 'hook args:'
331 > for k, v in a:
331 > for k, v in a:
332 > print ' ', k, v
332 > print ' ', k, v
333 >
333 >
334 > def passhook(**args):
334 > def passhook(**args):
335 > printargs(args)
335 > printargs(args)
336 >
336 >
337 > def failhook(**args):
337 > def failhook(**args):
338 > printargs(args)
338 > printargs(args)
339 > return True
339 > return True
340 >
340 >
341 > class LocalException(Exception):
341 > class LocalException(Exception):
342 > pass
342 > pass
343 >
343 >
344 > def raisehook(**args):
344 > def raisehook(**args):
345 > raise LocalException('exception from hook')
345 > raise LocalException('exception from hook')
346 >
346 >
347 > def aborthook(**args):
347 > def aborthook(**args):
348 > raise util.Abort('raise abort from hook')
348 > raise util.Abort('raise abort from hook')
349 >
349 >
350 > def brokenhook(**args):
350 > def brokenhook(**args):
351 > return 1 + {}
351 > return 1 + {}
352 >
352 >
353 > def verbosehook(ui, **args):
353 > def verbosehook(ui, **args):
354 > ui.note('verbose output from hook\n')
354 > ui.note('verbose output from hook\n')
355 >
355 >
356 > def printtags(ui, repo, **args):
356 > def printtags(ui, repo, **args):
357 > print repo.tags().keys()
357 > print repo.tags().keys()
358 >
358 >
359 > class container:
359 > class container:
360 > unreachable = 1
360 > unreachable = 1
361 > EOF
361 > EOF
362
362
363 test python hooks
363 test python hooks
364
364
365 $ PYTHONPATH="`pwd`:$PYTHONPATH"
365 $ PYTHONPATH="`pwd`:$PYTHONPATH"
366 $ export PYTHONPATH
366 $ export PYTHONPATH
367
367
368 $ echo '[hooks]' > ../a/.hg/hgrc
368 $ echo '[hooks]' > ../a/.hg/hgrc
369 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
369 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
370 $ hg pull ../a 2>&1 | grep 'raised an exception'
370 $ hg pull ../a 2>&1 | grep 'raised an exception'
371 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
371 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
372
372
373 $ echo '[hooks]' > ../a/.hg/hgrc
373 $ echo '[hooks]' > ../a/.hg/hgrc
374 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
374 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
375 $ hg pull ../a 2>&1 | grep 'raised an exception'
375 $ hg pull ../a 2>&1 | grep 'raised an exception'
376 error: preoutgoing.raise hook raised an exception: exception from hook
376 error: preoutgoing.raise hook raised an exception: exception from hook
377
377
378 $ echo '[hooks]' > ../a/.hg/hgrc
378 $ echo '[hooks]' > ../a/.hg/hgrc
379 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
379 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
380 $ hg pull ../a
380 $ hg pull ../a
381 pulling from ../a
381 pulling from ../a
382 searching for changes
382 searching for changes
383 error: preoutgoing.abort hook failed: raise abort from hook
383 error: preoutgoing.abort hook failed: raise abort from hook
384 abort: raise abort from hook
384 abort: raise abort from hook
385 [255]
385 [255]
386
386
387 $ echo '[hooks]' > ../a/.hg/hgrc
387 $ echo '[hooks]' > ../a/.hg/hgrc
388 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
388 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
389 $ hg pull ../a
389 $ hg pull ../a
390 pulling from ../a
390 pulling from ../a
391 searching for changes
391 searching for changes
392 hook args:
392 hook args:
393 hooktype preoutgoing
393 hooktype preoutgoing
394 source pull
394 source pull
395 abort: preoutgoing.fail hook failed
395 abort: preoutgoing.fail hook failed
396 [255]
396 [255]
397
397
398 $ echo '[hooks]' > ../a/.hg/hgrc
398 $ echo '[hooks]' > ../a/.hg/hgrc
399 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
399 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
400 $ hg pull ../a
400 $ hg pull ../a
401 pulling from ../a
401 pulling from ../a
402 searching for changes
402 searching for changes
403 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
403 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
404 [255]
404 [255]
405
405
406 $ echo '[hooks]' > ../a/.hg/hgrc
406 $ echo '[hooks]' > ../a/.hg/hgrc
407 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
407 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
408 $ hg pull ../a
408 $ hg pull ../a
409 pulling from ../a
409 pulling from ../a
410 searching for changes
410 searching for changes
411 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
411 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
412 [255]
412 [255]
413
413
414 $ echo '[hooks]' > ../a/.hg/hgrc
414 $ echo '[hooks]' > ../a/.hg/hgrc
415 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
415 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
416 $ hg pull ../a
416 $ hg pull ../a
417 pulling from ../a
417 pulling from ../a
418 searching for changes
418 searching for changes
419 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
419 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
420 [255]
420 [255]
421
421
422 $ echo '[hooks]' > ../a/.hg/hgrc
422 $ echo '[hooks]' > ../a/.hg/hgrc
423 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
423 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
424 $ hg pull ../a
424 $ hg pull ../a
425 pulling from ../a
425 pulling from ../a
426 searching for changes
426 searching for changes
427 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
427 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
428 [255]
428 [255]
429
429
430 $ echo '[hooks]' > ../a/.hg/hgrc
430 $ echo '[hooks]' > ../a/.hg/hgrc
431 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
431 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
432 $ hg pull ../a
432 $ hg pull ../a
433 pulling from ../a
433 pulling from ../a
434 searching for changes
434 searching for changes
435 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
435 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
436 [255]
436 [255]
437
437
438 $ echo '[hooks]' > ../a/.hg/hgrc
438 $ echo '[hooks]' > ../a/.hg/hgrc
439 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
439 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
440 $ hg pull ../a
440 $ hg pull ../a
441 pulling from ../a
441 pulling from ../a
442 searching for changes
442 searching for changes
443 hook args:
443 hook args:
444 hooktype preoutgoing
444 hooktype preoutgoing
445 source pull
445 source pull
446 adding changesets
446 adding changesets
447 adding manifests
447 adding manifests
448 adding file changes
448 adding file changes
449 added 1 changesets with 1 changes to 1 files
449 added 1 changesets with 1 changes to 1 files
450 (run 'hg update' to get a working copy)
450 (run 'hg update' to get a working copy)
451
451
452 make sure --traceback works
452 make sure --traceback works
453
453
454 $ echo '[hooks]' > .hg/hgrc
454 $ echo '[hooks]' > .hg/hgrc
455 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
455 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
456
456
457 $ echo aa > a
457 $ echo aa > a
458 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
458 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
459 Traceback (most recent call last):
459 Traceback (most recent call last):
460
460
461 $ cd ..
461 $ cd ..
462 $ hg init c
462 $ hg init c
463 $ cd c
463 $ cd c
464
464
465 $ cat > hookext.py <<EOF
465 $ cat > hookext.py <<EOF
466 > def autohook(**args):
466 > def autohook(**args):
467 > print "Automatically installed hook"
467 > print "Automatically installed hook"
468 >
468 >
469 > def reposetup(ui, repo):
469 > def reposetup(ui, repo):
470 > repo.ui.setconfig("hooks", "commit.auto", autohook)
470 > repo.ui.setconfig("hooks", "commit.auto", autohook)
471 > EOF
471 > EOF
472 $ echo '[extensions]' >> .hg/hgrc
472 $ echo '[extensions]' >> .hg/hgrc
473 $ echo 'hookext = hookext.py' >> .hg/hgrc
473 $ echo 'hookext = hookext.py' >> .hg/hgrc
474
474
475 $ touch foo
475 $ touch foo
476 $ hg add foo
476 $ hg add foo
477 $ hg ci -d '0 0' -m 'add foo'
477 $ hg ci -d '0 0' -m 'add foo'
478 Automatically installed hook
478 Automatically installed hook
479 $ echo >> foo
479 $ echo >> foo
480 $ hg ci --debug -d '0 0' -m 'change foo'
480 $ hg ci --debug -d '0 0' -m 'change foo'
481 foo
481 foo
482 calling hook commit.auto: <function autohook at *> (glob)
482 calling hook commit.auto: <function autohook at *> (glob)
483 Automatically installed hook
483 Automatically installed hook
484 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
484 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
485
485
486 $ hg showconfig hooks
486 $ hg showconfig hooks
487 hooks.commit.auto=<function autohook at *> (glob)
487 hooks.commit.auto=<function autohook at *> (glob)
488
488
489 test python hook configured with python:[file]:[hook] syntax
489 test python hook configured with python:[file]:[hook] syntax
490
490
491 $ cd ..
491 $ cd ..
492 $ mkdir d
492 $ mkdir d
493 $ cd d
493 $ cd d
494 $ hg init repo
494 $ hg init repo
495 $ mkdir hooks
495 $ mkdir hooks
496
496
497 $ cd hooks
497 $ cd hooks
498 $ cat > testhooks.py <<EOF
498 $ cat > testhooks.py <<EOF
499 > def testhook(**args):
499 > def testhook(**args):
500 > print 'hook works'
500 > print 'hook works'
501 > EOF
501 > EOF
502 $ echo '[hooks]' > ../repo/.hg/hgrc
502 $ echo '[hooks]' > ../repo/.hg/hgrc
503 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
503 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
504
504
505 $ cd ../repo
505 $ cd ../repo
506 $ hg commit -d '0 0'
506 $ hg commit -d '0 0'
507 hook works
507 hook works
508 nothing changed
508 nothing changed
509 [1]
509 [1]
510
510
511 $ cd ../../b
511 $ cd ../../b
512
512
513 make sure --traceback works on hook import failure
513 make sure --traceback works on hook import failure
514
514
515 $ cat > importfail.py <<EOF
515 $ cat > importfail.py <<EOF
516 > import somebogusmodule
516 > import somebogusmodule
517 > # dereference something in the module to force demandimport to load it
517 > # dereference something in the module to force demandimport to load it
518 > somebogusmodule.whatever
518 > somebogusmodule.whatever
519 > EOF
519 > EOF
520
520
521 $ echo '[hooks]' > .hg/hgrc
521 $ echo '[hooks]' > .hg/hgrc
522 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
522 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
523
523
524 $ echo a >> a
524 $ echo a >> a
525 $ hg --traceback commit -ma 2>&1 | egrep '^(exception|Traceback|ImportError)'
525 $ hg --traceback commit -ma 2>&1 | egrep '^(exception|Traceback|ImportError)'
526 exception from first failed import attempt:
526 exception from first failed import attempt:
527 Traceback (most recent call last):
527 Traceback (most recent call last):
528 ImportError: No module named somebogusmodule
528 ImportError: No module named somebogusmodule
529 exception from second failed import attempt:
529 exception from second failed import attempt:
530 Traceback (most recent call last):
530 Traceback (most recent call last):
531 ImportError: No module named hgext_importfail
531 ImportError: No module named hgext_importfail
532 Traceback (most recent call last):
532 Traceback (most recent call last):
533
533
534 Issue1827: Hooks Update & Commit not completely post operation
534 Issue1827: Hooks Update & Commit not completely post operation
535
535
536 commit and update hooks should run after command completion
536 commit and update hooks should run after command completion
537
537
538 $ echo '[hooks]' > .hg/hgrc
538 $ echo '[hooks]' > .hg/hgrc
539 $ echo 'commit = hg id' >> .hg/hgrc
539 $ echo 'commit = hg id' >> .hg/hgrc
540 $ echo 'update = hg id' >> .hg/hgrc
540 $ echo 'update = hg id' >> .hg/hgrc
541 $ echo bb > a
541 $ echo bb > a
542 $ hg ci -ma
542 $ hg ci -ma
543 223eafe2750c tip
543 223eafe2750c tip
544 $ hg up 0
544 $ hg up 0
545 cb9a9f314b8b
545 cb9a9f314b8b
546 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
546 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
547
547
548 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
548 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
549 that is passed to pre/post hooks
549 that is passed to pre/post hooks
550
550
551 $ echo '[hooks]' > .hg/hgrc
551 $ echo '[hooks]' > .hg/hgrc
552 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
552 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
553 $ hg id
553 $ hg id
554 cb9a9f314b8b
554 cb9a9f314b8b
555 $ hg id --verbose
555 $ hg id --verbose
556 calling hook pre-identify: hooktests.verbosehook
556 calling hook pre-identify: hooktests.verbosehook
557 verbose output from hook
557 verbose output from hook
558 cb9a9f314b8b
558 cb9a9f314b8b
559
559
560 Ensure hooks can be prioritized
560 Ensure hooks can be prioritized
561
561
562 $ echo '[hooks]' > .hg/hgrc
562 $ echo '[hooks]' > .hg/hgrc
563 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
563 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
564 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
564 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
565 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
565 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
566 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
566 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
567 $ hg id --verbose
567 $ hg id --verbose
568 calling hook pre-identify.b: hooktests.verbosehook
568 calling hook pre-identify.b: hooktests.verbosehook
569 verbose output from hook
569 verbose output from hook
570 calling hook pre-identify.a: hooktests.verbosehook
570 calling hook pre-identify.a: hooktests.verbosehook
571 verbose output from hook
571 verbose output from hook
572 calling hook pre-identify.c: hooktests.verbosehook
572 calling hook pre-identify.c: hooktests.verbosehook
573 verbose output from hook
573 verbose output from hook
574 cb9a9f314b8b
574 cb9a9f314b8b
575
575
576 new tags must be visible in pretxncommit (issue3210)
576 new tags must be visible in pretxncommit (issue3210)
577
577
578 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
578 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
579 $ hg tag -f foo
579 $ hg tag -f foo
580 ['a', 'foo', 'tip']
580 ['a', 'foo', 'tip']
581
581
582 new commits must be visible in pretxnchangegroup (issue3428)
582 new commits must be visible in pretxnchangegroup (issue3428)
583
583
584 $ cd ..
584 $ cd ..
585 $ hg init to
585 $ hg init to
586 $ echo '[hooks]' >> to/.hg/hgrc
586 $ echo '[hooks]' >> to/.hg/hgrc
587 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
587 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
588 $ echo a >> to/a
588 $ echo a >> to/a
589 $ hg --cwd to ci -Ama
589 $ hg --cwd to ci -Ama
590 adding a
590 adding a
591 $ hg clone to from
591 $ hg clone to from
592 updating to branch default
592 updating to branch default
593 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
593 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
594 $ echo aa >> from/a
594 $ echo aa >> from/a
595 $ hg --cwd from ci -mb
595 $ hg --cwd from ci -mb
596 $ hg --cwd from push
596 $ hg --cwd from push
597 pushing to $TESTTMP/to
597 pushing to $TESTTMP/to
598 searching for changes
598 searching for changes
599 adding changesets
599 adding changesets
600 adding manifests
600 adding manifests
601 adding file changes
601 adding file changes
602 added 1 changesets with 1 changes to 1 files
602 added 1 changesets with 1 changes to 1 files
603 changeset: 1:9836a07b9b9d
603 changeset: 1:9836a07b9b9d
604 tag: tip
604 tag: tip
605 user: test
605 user: test
606 date: Thu Jan 01 00:00:00 1970 +0000
606 date: Thu Jan 01 00:00:00 1970 +0000
607 summary: b
607 summary: b
608
608
@@ -1,302 +1,315
1 $ "$TESTDIR/hghave" system-sh || exit 80
1 $ "$TESTDIR/hghave" system-sh || exit 80
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5
5
6 $ echo a > a
6 $ echo a > a
7 $ hg add a
7 $ hg add a
8 $ hg commit -m "test"
8 $ hg commit -m "test"
9 $ hg history
9 $ hg history
10 changeset: 0:acb14030fe0a
10 changeset: 0:acb14030fe0a
11 tag: tip
11 tag: tip
12 user: test
12 user: test
13 date: Thu Jan 01 00:00:00 1970 +0000
13 date: Thu Jan 01 00:00:00 1970 +0000
14 summary: test
14 summary: test
15
15
16
16
17 $ hg tag ' '
17 $ hg tag ' '
18 abort: tag names cannot consist entirely of whitespace
18 abort: tag names cannot consist entirely of whitespace
19 [255]
19 [255]
20
20
21 $ hg tag "bleah"
21 $ hg tag "bleah"
22 $ hg history
22 $ hg history
23 changeset: 1:d4f0d2909abc
23 changeset: 1:d4f0d2909abc
24 tag: tip
24 tag: tip
25 user: test
25 user: test
26 date: Thu Jan 01 00:00:00 1970 +0000
26 date: Thu Jan 01 00:00:00 1970 +0000
27 summary: Added tag bleah for changeset acb14030fe0a
27 summary: Added tag bleah for changeset acb14030fe0a
28
28
29 changeset: 0:acb14030fe0a
29 changeset: 0:acb14030fe0a
30 tag: bleah
30 tag: bleah
31 user: test
31 user: test
32 date: Thu Jan 01 00:00:00 1970 +0000
32 date: Thu Jan 01 00:00:00 1970 +0000
33 summary: test
33 summary: test
34
34
35
35
36 $ echo foo >> .hgtags
36 $ echo foo >> .hgtags
37 $ hg tag "bleah2"
37 $ hg tag "bleah2"
38 abort: working copy of .hgtags is changed (please commit .hgtags manually)
38 abort: working copy of .hgtags is changed (please commit .hgtags manually)
39 [255]
39 [255]
40
40
41 $ hg revert .hgtags
41 $ hg revert .hgtags
42 $ hg tag -r 0 x y z y y z
42 $ hg tag -r 0 x y z y y z
43 abort: tag names must be unique
43 abort: tag names must be unique
44 [255]
44 [255]
45 $ hg tag tap nada dot tip null .
45 $ hg tag tap nada dot tip null .
46 abort: the name 'tip' is reserved
46 abort: the name 'tip' is reserved
47 [255]
47 [255]
48 $ hg tag "bleah"
48 $ hg tag "bleah"
49 abort: tag 'bleah' already exists (use -f to force)
49 abort: tag 'bleah' already exists (use -f to force)
50 [255]
50 [255]
51 $ hg tag "blecch" "bleah"
51 $ hg tag "blecch" "bleah"
52 abort: tag 'bleah' already exists (use -f to force)
52 abort: tag 'bleah' already exists (use -f to force)
53 [255]
53 [255]
54
54
55 $ hg tag --remove "blecch"
55 $ hg tag --remove "blecch"
56 abort: tag 'blecch' does not exist
56 abort: tag 'blecch' does not exist
57 [255]
57 [255]
58 $ hg tag --remove "bleah" "blecch" "blough"
58 $ hg tag --remove "bleah" "blecch" "blough"
59 abort: tag 'blecch' does not exist
59 abort: tag 'blecch' does not exist
60 [255]
60 [255]
61
61
62 $ hg tag -r 0 "bleah0"
62 $ hg tag -r 0 "bleah0"
63 $ hg tag -l -r 1 "bleah1"
63 $ hg tag -l -r 1 "bleah1"
64 $ hg tag gack gawk gorp
64 $ hg tag gack gawk gorp
65 $ hg tag -f gack
65 $ hg tag -f gack
66 $ hg tag --remove gack gorp
66 $ hg tag --remove gack gorp
67
67
68 $ hg tag "bleah "
68 $ hg tag "bleah "
69 abort: tag 'bleah' already exists (use -f to force)
69 abort: tag 'bleah' already exists (use -f to force)
70 [255]
70 [255]
71 $ hg tag " bleah"
71 $ hg tag " bleah"
72 abort: tag 'bleah' already exists (use -f to force)
72 abort: tag 'bleah' already exists (use -f to force)
73 [255]
73 [255]
74 $ hg tag " bleah"
74 $ hg tag " bleah"
75 abort: tag 'bleah' already exists (use -f to force)
75 abort: tag 'bleah' already exists (use -f to force)
76 [255]
76 [255]
77 $ hg tag -r 0 " bleahbleah "
77 $ hg tag -r 0 " bleahbleah "
78 $ hg tag -r 0 " bleah bleah "
78 $ hg tag -r 0 " bleah bleah "
79
79
80 $ cat .hgtags
80 $ cat .hgtags
81 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah
81 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah
82 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah0
82 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah0
83 336fccc858a4eb69609a291105009e484a6b6b8d gack
83 336fccc858a4eb69609a291105009e484a6b6b8d gack
84 336fccc858a4eb69609a291105009e484a6b6b8d gawk
84 336fccc858a4eb69609a291105009e484a6b6b8d gawk
85 336fccc858a4eb69609a291105009e484a6b6b8d gorp
85 336fccc858a4eb69609a291105009e484a6b6b8d gorp
86 336fccc858a4eb69609a291105009e484a6b6b8d gack
86 336fccc858a4eb69609a291105009e484a6b6b8d gack
87 799667b6f2d9b957f73fa644a918c2df22bab58f gack
87 799667b6f2d9b957f73fa644a918c2df22bab58f gack
88 799667b6f2d9b957f73fa644a918c2df22bab58f gack
88 799667b6f2d9b957f73fa644a918c2df22bab58f gack
89 0000000000000000000000000000000000000000 gack
89 0000000000000000000000000000000000000000 gack
90 336fccc858a4eb69609a291105009e484a6b6b8d gorp
90 336fccc858a4eb69609a291105009e484a6b6b8d gorp
91 0000000000000000000000000000000000000000 gorp
91 0000000000000000000000000000000000000000 gorp
92 acb14030fe0a21b60322c440ad2d20cf7685a376 bleahbleah
92 acb14030fe0a21b60322c440ad2d20cf7685a376 bleahbleah
93 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah bleah
93 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah bleah
94
94
95 $ cat .hg/localtags
95 $ cat .hg/localtags
96 d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1
96 d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1
97
97
98 tagging on a non-head revision
98 tagging on a non-head revision
99
99
100 $ hg update 0
100 $ hg update 0
101 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
101 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
102 $ hg tag -l localblah
102 $ hg tag -l localblah
103 $ hg tag "foobar"
103 $ hg tag "foobar"
104 abort: not at a branch head (use -f to force)
104 abort: not at a branch head (use -f to force)
105 [255]
105 [255]
106 $ hg tag -f "foobar"
106 $ hg tag -f "foobar"
107 $ cat .hgtags
107 $ cat .hgtags
108 acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
108 acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
109 $ cat .hg/localtags
109 $ cat .hg/localtags
110 d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1
110 d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1
111 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah
111 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah
112
112
113 $ hg tag -l 'xx
113 $ hg tag -l 'xx
114 > newline'
114 > newline'
115 abort: '\n' cannot be used in a tag name
115 abort: '\n' cannot be used in a tag name
116 [255]
116 [255]
117 $ hg tag -l 'xx:xx'
117 $ hg tag -l 'xx:xx'
118 abort: ':' cannot be used in a tag name
118 abort: ':' cannot be used in a tag name
119 [255]
119 [255]
120
120
121 cloning local tags
121 cloning local tags
122
122
123 $ cd ..
123 $ cd ..
124 $ hg -R test log -r0:5
124 $ hg -R test log -r0:5
125 changeset: 0:acb14030fe0a
125 changeset: 0:acb14030fe0a
126 tag: bleah
126 tag: bleah
127 tag: bleah bleah
127 tag: bleah bleah
128 tag: bleah0
128 tag: bleah0
129 tag: bleahbleah
129 tag: bleahbleah
130 tag: foobar
130 tag: foobar
131 tag: localblah
131 tag: localblah
132 user: test
132 user: test
133 date: Thu Jan 01 00:00:00 1970 +0000
133 date: Thu Jan 01 00:00:00 1970 +0000
134 summary: test
134 summary: test
135
135
136 changeset: 1:d4f0d2909abc
136 changeset: 1:d4f0d2909abc
137 tag: bleah1
137 tag: bleah1
138 user: test
138 user: test
139 date: Thu Jan 01 00:00:00 1970 +0000
139 date: Thu Jan 01 00:00:00 1970 +0000
140 summary: Added tag bleah for changeset acb14030fe0a
140 summary: Added tag bleah for changeset acb14030fe0a
141
141
142 changeset: 2:336fccc858a4
142 changeset: 2:336fccc858a4
143 tag: gawk
143 tag: gawk
144 user: test
144 user: test
145 date: Thu Jan 01 00:00:00 1970 +0000
145 date: Thu Jan 01 00:00:00 1970 +0000
146 summary: Added tag bleah0 for changeset acb14030fe0a
146 summary: Added tag bleah0 for changeset acb14030fe0a
147
147
148 changeset: 3:799667b6f2d9
148 changeset: 3:799667b6f2d9
149 user: test
149 user: test
150 date: Thu Jan 01 00:00:00 1970 +0000
150 date: Thu Jan 01 00:00:00 1970 +0000
151 summary: Added tag gack, gawk, gorp for changeset 336fccc858a4
151 summary: Added tag gack, gawk, gorp for changeset 336fccc858a4
152
152
153 changeset: 4:154eeb7c0138
153 changeset: 4:154eeb7c0138
154 user: test
154 user: test
155 date: Thu Jan 01 00:00:00 1970 +0000
155 date: Thu Jan 01 00:00:00 1970 +0000
156 summary: Added tag gack for changeset 799667b6f2d9
156 summary: Added tag gack for changeset 799667b6f2d9
157
157
158 changeset: 5:b4bb47aaff09
158 changeset: 5:b4bb47aaff09
159 user: test
159 user: test
160 date: Thu Jan 01 00:00:00 1970 +0000
160 date: Thu Jan 01 00:00:00 1970 +0000
161 summary: Removed tag gack, gorp
161 summary: Removed tag gack, gorp
162
162
163 $ hg clone -q -rbleah1 test test1
163 $ hg clone -q -rbleah1 test test1
164 $ hg -R test1 parents --style=compact
164 $ hg -R test1 parents --style=compact
165 1[tip] d4f0d2909abc 1970-01-01 00:00 +0000 test
165 1[tip] d4f0d2909abc 1970-01-01 00:00 +0000 test
166 Added tag bleah for changeset acb14030fe0a
166 Added tag bleah for changeset acb14030fe0a
167
167
168 $ hg clone -q -r5 test#bleah1 test2
168 $ hg clone -q -r5 test#bleah1 test2
169 $ hg -R test2 parents --style=compact
169 $ hg -R test2 parents --style=compact
170 5[tip] b4bb47aaff09 1970-01-01 00:00 +0000 test
170 5[tip] b4bb47aaff09 1970-01-01 00:00 +0000 test
171 Removed tag gack, gorp
171 Removed tag gack, gorp
172
172
173 $ hg clone -q -U test#bleah1 test3
173 $ hg clone -q -U test#bleah1 test3
174 $ hg -R test3 parents --style=compact
174 $ hg -R test3 parents --style=compact
175
175
176 $ cd test
176 $ cd test
177
177
178 Issue601: hg tag doesn't do the right thing if .hgtags or localtags
178 Issue601: hg tag doesn't do the right thing if .hgtags or localtags
179 doesn't end with EOL
179 doesn't end with EOL
180
180
181 $ python << EOF
181 $ python << EOF
182 > f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close()
182 > f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close()
183 > f = file('.hg/localtags', 'w'); f.write(last); f.close()
183 > f = file('.hg/localtags', 'w'); f.write(last); f.close()
184 > EOF
184 > EOF
185 $ cat .hg/localtags; echo
185 $ cat .hg/localtags; echo
186 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah
186 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah
187 $ hg tag -l localnewline
187 $ hg tag -l localnewline
188 $ cat .hg/localtags; echo
188 $ cat .hg/localtags; echo
189 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah
189 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah
190 c2899151f4e76890c602a2597a650a72666681bf localnewline
190 c2899151f4e76890c602a2597a650a72666681bf localnewline
191
191
192
192
193 $ python << EOF
193 $ python << EOF
194 > f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close()
194 > f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close()
195 > f = file('.hgtags', 'w'); f.write(last); f.close()
195 > f = file('.hgtags', 'w'); f.write(last); f.close()
196 > EOF
196 > EOF
197 $ hg ci -m'broken manual edit of .hgtags'
197 $ hg ci -m'broken manual edit of .hgtags'
198 $ cat .hgtags; echo
198 $ cat .hgtags; echo
199 acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
199 acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
200 $ hg tag newline
200 $ hg tag newline
201 $ cat .hgtags; echo
201 $ cat .hgtags; echo
202 acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
202 acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
203 a0eea09de1eeec777b46f2085260a373b2fbc293 newline
203 a0eea09de1eeec777b46f2085260a373b2fbc293 newline
204
204
205
205
206 tag and branch using same name
206 tag and branch using same name
207
207
208 $ hg branch tag-and-branch-same-name
208 $ hg branch tag-and-branch-same-name
209 marked working directory as branch tag-and-branch-same-name
209 marked working directory as branch tag-and-branch-same-name
210 (branches are permanent and global, did you want a bookmark?)
210 (branches are permanent and global, did you want a bookmark?)
211 $ hg ci -m"discouraged"
211 $ hg ci -m"discouraged"
212 $ hg tag tag-and-branch-same-name
212 $ hg tag tag-and-branch-same-name
213 warning: tag tag-and-branch-same-name conflicts with existing branch name
213 warning: tag tag-and-branch-same-name conflicts with existing branch name
214
214
215 test custom commit messages
215 test custom commit messages
216
216
217 $ cat > editor << '__EOF__'
217 $ cat > editor << '__EOF__'
218 > #!/bin/sh
218 > #!/bin/sh
219 > echo "custom tag message" > "$1"
219 > echo "custom tag message" > "$1"
220 > echo "second line" >> "$1"
220 > echo "second line" >> "$1"
221 > __EOF__
221 > __EOF__
222 $ chmod +x editor
222 $ chmod +x editor
223 $ HGEDITOR="'`pwd`'"/editor hg tag custom-tag -e
223 $ HGEDITOR="'`pwd`'"/editor hg tag custom-tag -e
224 $ hg log -l1 --template "{desc}\n"
224 $ hg log -l1 --template "{desc}\n"
225 custom tag message
225 custom tag message
226 second line
226 second line
227
227
228
228
229 local tag with .hgtags modified
229 local tag with .hgtags modified
230
230
231 $ hg tag hgtags-modified
231 $ hg tag hgtags-modified
232 $ hg rollback
232 $ hg rollback
233 repository tip rolled back to revision 13 (undo commit)
233 repository tip rolled back to revision 13 (undo commit)
234 working directory now based on revision 13
234 working directory now based on revision 13
235 $ hg st
235 $ hg st
236 M .hgtags
236 M .hgtags
237 ? .hgtags.orig
237 ? .hgtags.orig
238 ? editor
238 ? editor
239 $ hg tag --local baz
239 $ hg tag --local baz
240 $ hg revert --no-backup .hgtags
240 $ hg revert --no-backup .hgtags
241
241
242
242
243 tagging when at named-branch-head that's not a topo-head
243 tagging when at named-branch-head that's not a topo-head
244
244
245 $ hg up default
245 $ hg up default
246 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
246 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
247 $ hg merge -t internal:local
247 $ hg merge -t internal:local
248 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
248 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
249 (branch merge, don't forget to commit)
249 (branch merge, don't forget to commit)
250 $ hg ci -m 'merge named branch'
250 $ hg ci -m 'merge named branch'
251 $ hg up 13
251 $ hg up 13
252 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
252 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
253 $ hg tag new-topo-head
253 $ hg tag new-topo-head
254
254
255
255
256 tagging on null rev
256 tagging on null rev
257
257
258 $ hg up null
258 $ hg up null
259 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
259 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
260 $ hg tag nullrev
260 $ hg tag nullrev
261 abort: not at a branch head (use -f to force)
261 abort: not at a branch head (use -f to force)
262 [255]
262 [255]
263
263
264 $ hg init empty
264 $ hg init empty
265 $ hg tag -R empty nullrev
265 $ hg tag -R empty nullrev
266
266
267 $ cd ..
267 $ cd ..
268
268
269 tagging on an uncommitted merge (issue2542)
269 tagging on an uncommitted merge (issue2542)
270
270
271 $ hg init repo-tag-uncommitted-merge
271 $ hg init repo-tag-uncommitted-merge
272 $ cd repo-tag-uncommitted-merge
272 $ cd repo-tag-uncommitted-merge
273 $ echo c1 > f1
273 $ echo c1 > f1
274 $ hg ci -Am0
274 $ hg ci -Am0
275 adding f1
275 adding f1
276 $ echo c2 > f2
276 $ echo c2 > f2
277 $ hg ci -Am1
277 $ hg ci -Am1
278 adding f2
278 adding f2
279 $ hg co -q 0
279 $ hg co -q 0
280 $ hg branch b1
280 $ hg branch b1
281 marked working directory as branch b1
281 marked working directory as branch b1
282 (branches are permanent and global, did you want a bookmark?)
282 (branches are permanent and global, did you want a bookmark?)
283 $ hg ci -m2
283 $ hg ci -m2
284 $ hg up default
284 $ hg up default
285 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
285 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
286 $ hg merge b1
286 $ hg merge b1
287 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
287 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
288 (branch merge, don't forget to commit)
288 (branch merge, don't forget to commit)
289
289
290 $ hg tag t1
290 $ hg tag t1
291 abort: uncommitted merge
291 abort: uncommitted merge
292 [255]
292 [255]
293 $ hg status
293 $ hg status
294 $ hg tag --rev 1 t2
294 $ hg tag --rev 1 t2
295 abort: uncommitted merge
295 abort: uncommitted merge
296 [255]
296 [255]
297 $ hg tag --rev 1 --local t3
297 $ hg tag --rev 1 --local t3
298 $ hg tags -v
298 $ hg tags -v
299 tip 2:2a156e8887cc
299 tip 2:2a156e8887cc
300 t3 1:c3adabd1a5f4 local
300 t3 1:c3adabd1a5f4 local
301
301
302 $ cd ..
302 $ cd ..
303
304 commit hook on tag used to be run without write lock - issue3344
305
306 $ hg init repo-tag
307 $ hg init repo-tag-target
308 $ hg -R repo-tag --config hooks.commit="hg push \"`pwd`/repo-tag-target\"" tag tag
309 pushing to $TESTTMP/repo-tag-target
310 searching for changes
311 adding changesets
312 adding manifests
313 adding file changes
314 added 1 changesets with 1 changes to 1 files
315
General Comments 0
You need to be logged in to leave comments. Login now