##// END OF EJS Templates
push: do not try to push remote obsolete if local has none
Patrick Mezard -
r17252:16fad732 stable
parent child Browse files
Show More
@@ -1,2586 +1,2586
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import bin, hex, nullid, nullrev, short
7 from node import bin, hex, nullid, nullrev, short
8 from i18n import _
8 from i18n import _
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import lock, transaction, store, encoding, base85
11 import lock, transaction, store, encoding, base85
12 import scmutil, util, extensions, hook, error, revset
12 import scmutil, util, extensions, hook, error, revset
13 import match as matchmod
13 import match as matchmod
14 import merge as mergemod
14 import merge as mergemod
15 import tags as tagsmod
15 import tags as tagsmod
16 from lock import release
16 from lock import release
17 import weakref, errno, os, time, inspect
17 import weakref, errno, os, time, inspect
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19 filecache = scmutil.filecache
19 filecache = scmutil.filecache
20
20
21 class storecache(filecache):
21 class storecache(filecache):
22 """filecache for files in the store"""
22 """filecache for files in the store"""
23 def join(self, obj, fname):
23 def join(self, obj, fname):
24 return obj.sjoin(fname)
24 return obj.sjoin(fname)
25
25
26 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
26 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
27 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
27 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
28
28
29 class localpeer(peer.peerrepository):
29 class localpeer(peer.peerrepository):
30 '''peer for a local repo; reflects only the most recent API'''
30 '''peer for a local repo; reflects only the most recent API'''
31
31
32 def __init__(self, repo, caps=MODERNCAPS):
32 def __init__(self, repo, caps=MODERNCAPS):
33 peer.peerrepository.__init__(self)
33 peer.peerrepository.__init__(self)
34 self._repo = repo
34 self._repo = repo
35 self.ui = repo.ui
35 self.ui = repo.ui
36 self._caps = repo._restrictcapabilities(caps)
36 self._caps = repo._restrictcapabilities(caps)
37 self.requirements = repo.requirements
37 self.requirements = repo.requirements
38 self.supportedformats = repo.supportedformats
38 self.supportedformats = repo.supportedformats
39
39
40 def close(self):
40 def close(self):
41 self._repo.close()
41 self._repo.close()
42
42
43 def _capabilities(self):
43 def _capabilities(self):
44 return self._caps
44 return self._caps
45
45
46 def local(self):
46 def local(self):
47 return self._repo
47 return self._repo
48
48
49 def canpush(self):
49 def canpush(self):
50 return True
50 return True
51
51
52 def url(self):
52 def url(self):
53 return self._repo.url()
53 return self._repo.url()
54
54
55 def lookup(self, key):
55 def lookup(self, key):
56 return self._repo.lookup(key)
56 return self._repo.lookup(key)
57
57
58 def branchmap(self):
58 def branchmap(self):
59 return discovery.visiblebranchmap(self._repo)
59 return discovery.visiblebranchmap(self._repo)
60
60
61 def heads(self):
61 def heads(self):
62 return discovery.visibleheads(self._repo)
62 return discovery.visibleheads(self._repo)
63
63
64 def known(self, nodes):
64 def known(self, nodes):
65 return self._repo.known(nodes)
65 return self._repo.known(nodes)
66
66
67 def getbundle(self, source, heads=None, common=None):
67 def getbundle(self, source, heads=None, common=None):
68 return self._repo.getbundle(source, heads=heads, common=common)
68 return self._repo.getbundle(source, heads=heads, common=common)
69
69
70 # TODO We might want to move the next two calls into legacypeer and add
70 # TODO We might want to move the next two calls into legacypeer and add
71 # unbundle instead.
71 # unbundle instead.
72
72
73 def lock(self):
73 def lock(self):
74 return self._repo.lock()
74 return self._repo.lock()
75
75
76 def addchangegroup(self, cg, source, url):
76 def addchangegroup(self, cg, source, url):
77 return self._repo.addchangegroup(cg, source, url)
77 return self._repo.addchangegroup(cg, source, url)
78
78
79 def pushkey(self, namespace, key, old, new):
79 def pushkey(self, namespace, key, old, new):
80 return self._repo.pushkey(namespace, key, old, new)
80 return self._repo.pushkey(namespace, key, old, new)
81
81
82 def listkeys(self, namespace):
82 def listkeys(self, namespace):
83 return self._repo.listkeys(namespace)
83 return self._repo.listkeys(namespace)
84
84
85 def debugwireargs(self, one, two, three=None, four=None, five=None):
85 def debugwireargs(self, one, two, three=None, four=None, five=None):
86 '''used to test argument passing over the wire'''
86 '''used to test argument passing over the wire'''
87 return "%s %s %s %s %s" % (one, two, three, four, five)
87 return "%s %s %s %s %s" % (one, two, three, four, five)
88
88
89 class locallegacypeer(localpeer):
89 class locallegacypeer(localpeer):
90 '''peer extension which implements legacy methods too; used for tests with
90 '''peer extension which implements legacy methods too; used for tests with
91 restricted capabilities'''
91 restricted capabilities'''
92
92
93 def __init__(self, repo):
93 def __init__(self, repo):
94 localpeer.__init__(self, repo, caps=LEGACYCAPS)
94 localpeer.__init__(self, repo, caps=LEGACYCAPS)
95
95
96 def branches(self, nodes):
96 def branches(self, nodes):
97 return self._repo.branches(nodes)
97 return self._repo.branches(nodes)
98
98
99 def between(self, pairs):
99 def between(self, pairs):
100 return self._repo.between(pairs)
100 return self._repo.between(pairs)
101
101
102 def changegroup(self, basenodes, source):
102 def changegroup(self, basenodes, source):
103 return self._repo.changegroup(basenodes, source)
103 return self._repo.changegroup(basenodes, source)
104
104
105 def changegroupsubset(self, bases, heads, source):
105 def changegroupsubset(self, bases, heads, source):
106 return self._repo.changegroupsubset(bases, heads, source)
106 return self._repo.changegroupsubset(bases, heads, source)
107
107
108 class localrepository(object):
108 class localrepository(object):
109
109
110 supportedformats = set(('revlogv1', 'generaldelta'))
110 supportedformats = set(('revlogv1', 'generaldelta'))
111 supported = supportedformats | set(('store', 'fncache', 'shared',
111 supported = supportedformats | set(('store', 'fncache', 'shared',
112 'dotencode'))
112 'dotencode'))
113 openerreqs = set(('revlogv1', 'generaldelta'))
113 openerreqs = set(('revlogv1', 'generaldelta'))
114 requirements = ['revlogv1']
114 requirements = ['revlogv1']
115
115
116 def _baserequirements(self, create):
116 def _baserequirements(self, create):
117 return self.requirements[:]
117 return self.requirements[:]
118
118
119 def __init__(self, baseui, path=None, create=False):
119 def __init__(self, baseui, path=None, create=False):
120 self.wopener = scmutil.opener(path, expand=True)
120 self.wopener = scmutil.opener(path, expand=True)
121 self.wvfs = self.wopener
121 self.wvfs = self.wopener
122 self.root = self.wvfs.base
122 self.root = self.wvfs.base
123 self.path = self.wvfs.join(".hg")
123 self.path = self.wvfs.join(".hg")
124 self.origroot = path
124 self.origroot = path
125 self.auditor = scmutil.pathauditor(self.root, self._checknested)
125 self.auditor = scmutil.pathauditor(self.root, self._checknested)
126 self.opener = scmutil.opener(self.path)
126 self.opener = scmutil.opener(self.path)
127 self.vfs = self.opener
127 self.vfs = self.opener
128 self.baseui = baseui
128 self.baseui = baseui
129 self.ui = baseui.copy()
129 self.ui = baseui.copy()
130 # A list of callback to shape the phase if no data were found.
130 # A list of callback to shape the phase if no data were found.
131 # Callback are in the form: func(repo, roots) --> processed root.
131 # Callback are in the form: func(repo, roots) --> processed root.
132 # This list it to be filled by extension during repo setup
132 # This list it to be filled by extension during repo setup
133 self._phasedefaults = []
133 self._phasedefaults = []
134 try:
134 try:
135 self.ui.readconfig(self.join("hgrc"), self.root)
135 self.ui.readconfig(self.join("hgrc"), self.root)
136 extensions.loadall(self.ui)
136 extensions.loadall(self.ui)
137 except IOError:
137 except IOError:
138 pass
138 pass
139
139
140 if not self.vfs.isdir():
140 if not self.vfs.isdir():
141 if create:
141 if create:
142 if not self.wvfs.exists():
142 if not self.wvfs.exists():
143 self.wvfs.makedirs()
143 self.wvfs.makedirs()
144 self.vfs.makedir(notindexed=True)
144 self.vfs.makedir(notindexed=True)
145 requirements = self._baserequirements(create)
145 requirements = self._baserequirements(create)
146 if self.ui.configbool('format', 'usestore', True):
146 if self.ui.configbool('format', 'usestore', True):
147 self.vfs.mkdir("store")
147 self.vfs.mkdir("store")
148 requirements.append("store")
148 requirements.append("store")
149 if self.ui.configbool('format', 'usefncache', True):
149 if self.ui.configbool('format', 'usefncache', True):
150 requirements.append("fncache")
150 requirements.append("fncache")
151 if self.ui.configbool('format', 'dotencode', True):
151 if self.ui.configbool('format', 'dotencode', True):
152 requirements.append('dotencode')
152 requirements.append('dotencode')
153 # create an invalid changelog
153 # create an invalid changelog
154 self.vfs.append(
154 self.vfs.append(
155 "00changelog.i",
155 "00changelog.i",
156 '\0\0\0\2' # represents revlogv2
156 '\0\0\0\2' # represents revlogv2
157 ' dummy changelog to prevent using the old repo layout'
157 ' dummy changelog to prevent using the old repo layout'
158 )
158 )
159 if self.ui.configbool('format', 'generaldelta', False):
159 if self.ui.configbool('format', 'generaldelta', False):
160 requirements.append("generaldelta")
160 requirements.append("generaldelta")
161 requirements = set(requirements)
161 requirements = set(requirements)
162 else:
162 else:
163 raise error.RepoError(_("repository %s not found") % path)
163 raise error.RepoError(_("repository %s not found") % path)
164 elif create:
164 elif create:
165 raise error.RepoError(_("repository %s already exists") % path)
165 raise error.RepoError(_("repository %s already exists") % path)
166 else:
166 else:
167 try:
167 try:
168 requirements = scmutil.readrequires(self.vfs, self.supported)
168 requirements = scmutil.readrequires(self.vfs, self.supported)
169 except IOError, inst:
169 except IOError, inst:
170 if inst.errno != errno.ENOENT:
170 if inst.errno != errno.ENOENT:
171 raise
171 raise
172 requirements = set()
172 requirements = set()
173
173
174 self.sharedpath = self.path
174 self.sharedpath = self.path
175 try:
175 try:
176 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
176 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
177 if not os.path.exists(s):
177 if not os.path.exists(s):
178 raise error.RepoError(
178 raise error.RepoError(
179 _('.hg/sharedpath points to nonexistent directory %s') % s)
179 _('.hg/sharedpath points to nonexistent directory %s') % s)
180 self.sharedpath = s
180 self.sharedpath = s
181 except IOError, inst:
181 except IOError, inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184
184
185 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
185 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
186 self.spath = self.store.path
186 self.spath = self.store.path
187 self.sopener = self.store.opener
187 self.sopener = self.store.opener
188 self.svfs = self.sopener
188 self.svfs = self.sopener
189 self.sjoin = self.store.join
189 self.sjoin = self.store.join
190 self.opener.createmode = self.store.createmode
190 self.opener.createmode = self.store.createmode
191 self._applyrequirements(requirements)
191 self._applyrequirements(requirements)
192 if create:
192 if create:
193 self._writerequirements()
193 self._writerequirements()
194
194
195
195
196 self._branchcache = None
196 self._branchcache = None
197 self._branchcachetip = None
197 self._branchcachetip = None
198 self.filterpats = {}
198 self.filterpats = {}
199 self._datafilters = {}
199 self._datafilters = {}
200 self._transref = self._lockref = self._wlockref = None
200 self._transref = self._lockref = self._wlockref = None
201
201
202 # A cache for various files under .hg/ that tracks file changes,
202 # A cache for various files under .hg/ that tracks file changes,
203 # (used by the filecache decorator)
203 # (used by the filecache decorator)
204 #
204 #
205 # Maps a property name to its util.filecacheentry
205 # Maps a property name to its util.filecacheentry
206 self._filecache = {}
206 self._filecache = {}
207
207
208 def close(self):
208 def close(self):
209 pass
209 pass
210
210
211 def _restrictcapabilities(self, caps):
211 def _restrictcapabilities(self, caps):
212 return caps
212 return caps
213
213
214 def _applyrequirements(self, requirements):
214 def _applyrequirements(self, requirements):
215 self.requirements = requirements
215 self.requirements = requirements
216 self.sopener.options = dict((r, 1) for r in requirements
216 self.sopener.options = dict((r, 1) for r in requirements
217 if r in self.openerreqs)
217 if r in self.openerreqs)
218
218
219 def _writerequirements(self):
219 def _writerequirements(self):
220 reqfile = self.opener("requires", "w")
220 reqfile = self.opener("requires", "w")
221 for r in self.requirements:
221 for r in self.requirements:
222 reqfile.write("%s\n" % r)
222 reqfile.write("%s\n" % r)
223 reqfile.close()
223 reqfile.close()
224
224
225 def _checknested(self, path):
225 def _checknested(self, path):
226 """Determine if path is a legal nested repository."""
226 """Determine if path is a legal nested repository."""
227 if not path.startswith(self.root):
227 if not path.startswith(self.root):
228 return False
228 return False
229 subpath = path[len(self.root) + 1:]
229 subpath = path[len(self.root) + 1:]
230 normsubpath = util.pconvert(subpath)
230 normsubpath = util.pconvert(subpath)
231
231
232 # XXX: Checking against the current working copy is wrong in
232 # XXX: Checking against the current working copy is wrong in
233 # the sense that it can reject things like
233 # the sense that it can reject things like
234 #
234 #
235 # $ hg cat -r 10 sub/x.txt
235 # $ hg cat -r 10 sub/x.txt
236 #
236 #
237 # if sub/ is no longer a subrepository in the working copy
237 # if sub/ is no longer a subrepository in the working copy
238 # parent revision.
238 # parent revision.
239 #
239 #
240 # However, it can of course also allow things that would have
240 # However, it can of course also allow things that would have
241 # been rejected before, such as the above cat command if sub/
241 # been rejected before, such as the above cat command if sub/
242 # is a subrepository now, but was a normal directory before.
242 # is a subrepository now, but was a normal directory before.
243 # The old path auditor would have rejected by mistake since it
243 # The old path auditor would have rejected by mistake since it
244 # panics when it sees sub/.hg/.
244 # panics when it sees sub/.hg/.
245 #
245 #
246 # All in all, checking against the working copy seems sensible
246 # All in all, checking against the working copy seems sensible
247 # since we want to prevent access to nested repositories on
247 # since we want to prevent access to nested repositories on
248 # the filesystem *now*.
248 # the filesystem *now*.
249 ctx = self[None]
249 ctx = self[None]
250 parts = util.splitpath(subpath)
250 parts = util.splitpath(subpath)
251 while parts:
251 while parts:
252 prefix = '/'.join(parts)
252 prefix = '/'.join(parts)
253 if prefix in ctx.substate:
253 if prefix in ctx.substate:
254 if prefix == normsubpath:
254 if prefix == normsubpath:
255 return True
255 return True
256 else:
256 else:
257 sub = ctx.sub(prefix)
257 sub = ctx.sub(prefix)
258 return sub.checknested(subpath[len(prefix) + 1:])
258 return sub.checknested(subpath[len(prefix) + 1:])
259 else:
259 else:
260 parts.pop()
260 parts.pop()
261 return False
261 return False
262
262
263 def peer(self):
263 def peer(self):
264 return localpeer(self) # not cached to avoid reference cycle
264 return localpeer(self) # not cached to avoid reference cycle
265
265
266 @filecache('bookmarks')
266 @filecache('bookmarks')
267 def _bookmarks(self):
267 def _bookmarks(self):
268 return bookmarks.read(self)
268 return bookmarks.read(self)
269
269
270 @filecache('bookmarks.current')
270 @filecache('bookmarks.current')
271 def _bookmarkcurrent(self):
271 def _bookmarkcurrent(self):
272 return bookmarks.readcurrent(self)
272 return bookmarks.readcurrent(self)
273
273
274 def _writebookmarks(self, marks):
274 def _writebookmarks(self, marks):
275 bookmarks.write(self)
275 bookmarks.write(self)
276
276
277 def bookmarkheads(self, bookmark):
277 def bookmarkheads(self, bookmark):
278 name = bookmark.split('@', 1)[0]
278 name = bookmark.split('@', 1)[0]
279 heads = []
279 heads = []
280 for mark, n in self._bookmarks.iteritems():
280 for mark, n in self._bookmarks.iteritems():
281 if mark.split('@', 1)[0] == name:
281 if mark.split('@', 1)[0] == name:
282 heads.append(n)
282 heads.append(n)
283 return heads
283 return heads
284
284
285 @storecache('phaseroots')
285 @storecache('phaseroots')
286 def _phasecache(self):
286 def _phasecache(self):
287 return phases.phasecache(self, self._phasedefaults)
287 return phases.phasecache(self, self._phasedefaults)
288
288
289 @storecache('obsstore')
289 @storecache('obsstore')
290 def obsstore(self):
290 def obsstore(self):
291 store = obsolete.obsstore(self.sopener)
291 store = obsolete.obsstore(self.sopener)
292 return store
292 return store
293
293
294 @propertycache
294 @propertycache
295 def hiddenrevs(self):
295 def hiddenrevs(self):
296 """hiddenrevs: revs that should be hidden by command and tools
296 """hiddenrevs: revs that should be hidden by command and tools
297
297
298 This set is carried on the repo to ease initialisation and lazy
298 This set is carried on the repo to ease initialisation and lazy
299 loading it'll probably move back to changelog for efficienty and
299 loading it'll probably move back to changelog for efficienty and
300 consistency reason
300 consistency reason
301
301
302 Note that the hiddenrevs will needs invalidations when
302 Note that the hiddenrevs will needs invalidations when
303 - a new changesets is added (possible unstable above extinct)
303 - a new changesets is added (possible unstable above extinct)
304 - a new obsolete marker is added (possible new extinct changeset)
304 - a new obsolete marker is added (possible new extinct changeset)
305 """
305 """
306 hidden = set()
306 hidden = set()
307 if self.obsstore:
307 if self.obsstore:
308 ### hide extinct changeset that are not accessible by any mean
308 ### hide extinct changeset that are not accessible by any mean
309 hiddenquery = 'extinct() - ::(. + bookmark() + tagged())'
309 hiddenquery = 'extinct() - ::(. + bookmark() + tagged())'
310 hidden.update(self.revs(hiddenquery))
310 hidden.update(self.revs(hiddenquery))
311 return hidden
311 return hidden
312
312
313 @storecache('00changelog.i')
313 @storecache('00changelog.i')
314 def changelog(self):
314 def changelog(self):
315 c = changelog.changelog(self.sopener)
315 c = changelog.changelog(self.sopener)
316 if 'HG_PENDING' in os.environ:
316 if 'HG_PENDING' in os.environ:
317 p = os.environ['HG_PENDING']
317 p = os.environ['HG_PENDING']
318 if p.startswith(self.root):
318 if p.startswith(self.root):
319 c.readpending('00changelog.i.a')
319 c.readpending('00changelog.i.a')
320 return c
320 return c
321
321
322 @storecache('00manifest.i')
322 @storecache('00manifest.i')
323 def manifest(self):
323 def manifest(self):
324 return manifest.manifest(self.sopener)
324 return manifest.manifest(self.sopener)
325
325
326 @filecache('dirstate')
326 @filecache('dirstate')
327 def dirstate(self):
327 def dirstate(self):
328 warned = [0]
328 warned = [0]
329 def validate(node):
329 def validate(node):
330 try:
330 try:
331 self.changelog.rev(node)
331 self.changelog.rev(node)
332 return node
332 return node
333 except error.LookupError:
333 except error.LookupError:
334 if not warned[0]:
334 if not warned[0]:
335 warned[0] = True
335 warned[0] = True
336 self.ui.warn(_("warning: ignoring unknown"
336 self.ui.warn(_("warning: ignoring unknown"
337 " working parent %s!\n") % short(node))
337 " working parent %s!\n") % short(node))
338 return nullid
338 return nullid
339
339
340 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
340 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
341
341
342 def __getitem__(self, changeid):
342 def __getitem__(self, changeid):
343 if changeid is None:
343 if changeid is None:
344 return context.workingctx(self)
344 return context.workingctx(self)
345 return context.changectx(self, changeid)
345 return context.changectx(self, changeid)
346
346
347 def __contains__(self, changeid):
347 def __contains__(self, changeid):
348 try:
348 try:
349 return bool(self.lookup(changeid))
349 return bool(self.lookup(changeid))
350 except error.RepoLookupError:
350 except error.RepoLookupError:
351 return False
351 return False
352
352
353 def __nonzero__(self):
353 def __nonzero__(self):
354 return True
354 return True
355
355
356 def __len__(self):
356 def __len__(self):
357 return len(self.changelog)
357 return len(self.changelog)
358
358
359 def __iter__(self):
359 def __iter__(self):
360 for i in xrange(len(self)):
360 for i in xrange(len(self)):
361 yield i
361 yield i
362
362
363 def revs(self, expr, *args):
363 def revs(self, expr, *args):
364 '''Return a list of revisions matching the given revset'''
364 '''Return a list of revisions matching the given revset'''
365 expr = revset.formatspec(expr, *args)
365 expr = revset.formatspec(expr, *args)
366 m = revset.match(None, expr)
366 m = revset.match(None, expr)
367 return [r for r in m(self, range(len(self)))]
367 return [r for r in m(self, range(len(self)))]
368
368
369 def set(self, expr, *args):
369 def set(self, expr, *args):
370 '''
370 '''
371 Yield a context for each matching revision, after doing arg
371 Yield a context for each matching revision, after doing arg
372 replacement via revset.formatspec
372 replacement via revset.formatspec
373 '''
373 '''
374 for r in self.revs(expr, *args):
374 for r in self.revs(expr, *args):
375 yield self[r]
375 yield self[r]
376
376
377 def url(self):
377 def url(self):
378 return 'file:' + self.root
378 return 'file:' + self.root
379
379
380 def hook(self, name, throw=False, **args):
380 def hook(self, name, throw=False, **args):
381 return hook.hook(self.ui, self, name, throw, **args)
381 return hook.hook(self.ui, self, name, throw, **args)
382
382
383 tag_disallowed = ':\r\n'
383 tag_disallowed = ':\r\n'
384
384
385 def _tag(self, names, node, message, local, user, date, extra={}):
385 def _tag(self, names, node, message, local, user, date, extra={}):
386 if isinstance(names, str):
386 if isinstance(names, str):
387 allchars = names
387 allchars = names
388 names = (names,)
388 names = (names,)
389 else:
389 else:
390 allchars = ''.join(names)
390 allchars = ''.join(names)
391 for c in self.tag_disallowed:
391 for c in self.tag_disallowed:
392 if c in allchars:
392 if c in allchars:
393 raise util.Abort(_('%r cannot be used in a tag name') % c)
393 raise util.Abort(_('%r cannot be used in a tag name') % c)
394
394
395 branches = self.branchmap()
395 branches = self.branchmap()
396 for name in names:
396 for name in names:
397 self.hook('pretag', throw=True, node=hex(node), tag=name,
397 self.hook('pretag', throw=True, node=hex(node), tag=name,
398 local=local)
398 local=local)
399 if name in branches:
399 if name in branches:
400 self.ui.warn(_("warning: tag %s conflicts with existing"
400 self.ui.warn(_("warning: tag %s conflicts with existing"
401 " branch name\n") % name)
401 " branch name\n") % name)
402
402
403 def writetags(fp, names, munge, prevtags):
403 def writetags(fp, names, munge, prevtags):
404 fp.seek(0, 2)
404 fp.seek(0, 2)
405 if prevtags and prevtags[-1] != '\n':
405 if prevtags and prevtags[-1] != '\n':
406 fp.write('\n')
406 fp.write('\n')
407 for name in names:
407 for name in names:
408 m = munge and munge(name) or name
408 m = munge and munge(name) or name
409 if (self._tagscache.tagtypes and
409 if (self._tagscache.tagtypes and
410 name in self._tagscache.tagtypes):
410 name in self._tagscache.tagtypes):
411 old = self.tags().get(name, nullid)
411 old = self.tags().get(name, nullid)
412 fp.write('%s %s\n' % (hex(old), m))
412 fp.write('%s %s\n' % (hex(old), m))
413 fp.write('%s %s\n' % (hex(node), m))
413 fp.write('%s %s\n' % (hex(node), m))
414 fp.close()
414 fp.close()
415
415
416 prevtags = ''
416 prevtags = ''
417 if local:
417 if local:
418 try:
418 try:
419 fp = self.opener('localtags', 'r+')
419 fp = self.opener('localtags', 'r+')
420 except IOError:
420 except IOError:
421 fp = self.opener('localtags', 'a')
421 fp = self.opener('localtags', 'a')
422 else:
422 else:
423 prevtags = fp.read()
423 prevtags = fp.read()
424
424
425 # local tags are stored in the current charset
425 # local tags are stored in the current charset
426 writetags(fp, names, None, prevtags)
426 writetags(fp, names, None, prevtags)
427 for name in names:
427 for name in names:
428 self.hook('tag', node=hex(node), tag=name, local=local)
428 self.hook('tag', node=hex(node), tag=name, local=local)
429 return
429 return
430
430
431 try:
431 try:
432 fp = self.wfile('.hgtags', 'rb+')
432 fp = self.wfile('.hgtags', 'rb+')
433 except IOError, e:
433 except IOError, e:
434 if e.errno != errno.ENOENT:
434 if e.errno != errno.ENOENT:
435 raise
435 raise
436 fp = self.wfile('.hgtags', 'ab')
436 fp = self.wfile('.hgtags', 'ab')
437 else:
437 else:
438 prevtags = fp.read()
438 prevtags = fp.read()
439
439
440 # committed tags are stored in UTF-8
440 # committed tags are stored in UTF-8
441 writetags(fp, names, encoding.fromlocal, prevtags)
441 writetags(fp, names, encoding.fromlocal, prevtags)
442
442
443 fp.close()
443 fp.close()
444
444
445 self.invalidatecaches()
445 self.invalidatecaches()
446
446
447 if '.hgtags' not in self.dirstate:
447 if '.hgtags' not in self.dirstate:
448 self[None].add(['.hgtags'])
448 self[None].add(['.hgtags'])
449
449
450 m = matchmod.exact(self.root, '', ['.hgtags'])
450 m = matchmod.exact(self.root, '', ['.hgtags'])
451 tagnode = self.commit(message, user, date, extra=extra, match=m)
451 tagnode = self.commit(message, user, date, extra=extra, match=m)
452
452
453 for name in names:
453 for name in names:
454 self.hook('tag', node=hex(node), tag=name, local=local)
454 self.hook('tag', node=hex(node), tag=name, local=local)
455
455
456 return tagnode
456 return tagnode
457
457
458 def tag(self, names, node, message, local, user, date):
458 def tag(self, names, node, message, local, user, date):
459 '''tag a revision with one or more symbolic names.
459 '''tag a revision with one or more symbolic names.
460
460
461 names is a list of strings or, when adding a single tag, names may be a
461 names is a list of strings or, when adding a single tag, names may be a
462 string.
462 string.
463
463
464 if local is True, the tags are stored in a per-repository file.
464 if local is True, the tags are stored in a per-repository file.
465 otherwise, they are stored in the .hgtags file, and a new
465 otherwise, they are stored in the .hgtags file, and a new
466 changeset is committed with the change.
466 changeset is committed with the change.
467
467
468 keyword arguments:
468 keyword arguments:
469
469
470 local: whether to store tags in non-version-controlled file
470 local: whether to store tags in non-version-controlled file
471 (default False)
471 (default False)
472
472
473 message: commit message to use if committing
473 message: commit message to use if committing
474
474
475 user: name of user to use if committing
475 user: name of user to use if committing
476
476
477 date: date tuple to use if committing'''
477 date: date tuple to use if committing'''
478
478
479 if not local:
479 if not local:
480 for x in self.status()[:5]:
480 for x in self.status()[:5]:
481 if '.hgtags' in x:
481 if '.hgtags' in x:
482 raise util.Abort(_('working copy of .hgtags is changed '
482 raise util.Abort(_('working copy of .hgtags is changed '
483 '(please commit .hgtags manually)'))
483 '(please commit .hgtags manually)'))
484
484
485 self.tags() # instantiate the cache
485 self.tags() # instantiate the cache
486 self._tag(names, node, message, local, user, date)
486 self._tag(names, node, message, local, user, date)
487
487
488 @propertycache
488 @propertycache
489 def _tagscache(self):
489 def _tagscache(self):
490 '''Returns a tagscache object that contains various tags related
490 '''Returns a tagscache object that contains various tags related
491 caches.'''
491 caches.'''
492
492
493 # This simplifies its cache management by having one decorated
493 # This simplifies its cache management by having one decorated
494 # function (this one) and the rest simply fetch things from it.
494 # function (this one) and the rest simply fetch things from it.
495 class tagscache(object):
495 class tagscache(object):
496 def __init__(self):
496 def __init__(self):
497 # These two define the set of tags for this repository. tags
497 # These two define the set of tags for this repository. tags
498 # maps tag name to node; tagtypes maps tag name to 'global' or
498 # maps tag name to node; tagtypes maps tag name to 'global' or
499 # 'local'. (Global tags are defined by .hgtags across all
499 # 'local'. (Global tags are defined by .hgtags across all
500 # heads, and local tags are defined in .hg/localtags.)
500 # heads, and local tags are defined in .hg/localtags.)
501 # They constitute the in-memory cache of tags.
501 # They constitute the in-memory cache of tags.
502 self.tags = self.tagtypes = None
502 self.tags = self.tagtypes = None
503
503
504 self.nodetagscache = self.tagslist = None
504 self.nodetagscache = self.tagslist = None
505
505
506 cache = tagscache()
506 cache = tagscache()
507 cache.tags, cache.tagtypes = self._findtags()
507 cache.tags, cache.tagtypes = self._findtags()
508
508
509 return cache
509 return cache
510
510
511 def tags(self):
511 def tags(self):
512 '''return a mapping of tag to node'''
512 '''return a mapping of tag to node'''
513 t = {}
513 t = {}
514 for k, v in self._tagscache.tags.iteritems():
514 for k, v in self._tagscache.tags.iteritems():
515 try:
515 try:
516 # ignore tags to unknown nodes
516 # ignore tags to unknown nodes
517 self.changelog.rev(v)
517 self.changelog.rev(v)
518 t[k] = v
518 t[k] = v
519 except (error.LookupError, ValueError):
519 except (error.LookupError, ValueError):
520 pass
520 pass
521 return t
521 return t
522
522
523 def _findtags(self):
523 def _findtags(self):
524 '''Do the hard work of finding tags. Return a pair of dicts
524 '''Do the hard work of finding tags. Return a pair of dicts
525 (tags, tagtypes) where tags maps tag name to node, and tagtypes
525 (tags, tagtypes) where tags maps tag name to node, and tagtypes
526 maps tag name to a string like \'global\' or \'local\'.
526 maps tag name to a string like \'global\' or \'local\'.
527 Subclasses or extensions are free to add their own tags, but
527 Subclasses or extensions are free to add their own tags, but
528 should be aware that the returned dicts will be retained for the
528 should be aware that the returned dicts will be retained for the
529 duration of the localrepo object.'''
529 duration of the localrepo object.'''
530
530
531 # XXX what tagtype should subclasses/extensions use? Currently
531 # XXX what tagtype should subclasses/extensions use? Currently
532 # mq and bookmarks add tags, but do not set the tagtype at all.
532 # mq and bookmarks add tags, but do not set the tagtype at all.
533 # Should each extension invent its own tag type? Should there
533 # Should each extension invent its own tag type? Should there
534 # be one tagtype for all such "virtual" tags? Or is the status
534 # be one tagtype for all such "virtual" tags? Or is the status
535 # quo fine?
535 # quo fine?
536
536
537 alltags = {} # map tag name to (node, hist)
537 alltags = {} # map tag name to (node, hist)
538 tagtypes = {}
538 tagtypes = {}
539
539
540 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
540 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
541 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
541 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
542
542
543 # Build the return dicts. Have to re-encode tag names because
543 # Build the return dicts. Have to re-encode tag names because
544 # the tags module always uses UTF-8 (in order not to lose info
544 # the tags module always uses UTF-8 (in order not to lose info
545 # writing to the cache), but the rest of Mercurial wants them in
545 # writing to the cache), but the rest of Mercurial wants them in
546 # local encoding.
546 # local encoding.
547 tags = {}
547 tags = {}
548 for (name, (node, hist)) in alltags.iteritems():
548 for (name, (node, hist)) in alltags.iteritems():
549 if node != nullid:
549 if node != nullid:
550 tags[encoding.tolocal(name)] = node
550 tags[encoding.tolocal(name)] = node
551 tags['tip'] = self.changelog.tip()
551 tags['tip'] = self.changelog.tip()
552 tagtypes = dict([(encoding.tolocal(name), value)
552 tagtypes = dict([(encoding.tolocal(name), value)
553 for (name, value) in tagtypes.iteritems()])
553 for (name, value) in tagtypes.iteritems()])
554 return (tags, tagtypes)
554 return (tags, tagtypes)
555
555
556 def tagtype(self, tagname):
556 def tagtype(self, tagname):
557 '''
557 '''
558 return the type of the given tag. result can be:
558 return the type of the given tag. result can be:
559
559
560 'local' : a local tag
560 'local' : a local tag
561 'global' : a global tag
561 'global' : a global tag
562 None : tag does not exist
562 None : tag does not exist
563 '''
563 '''
564
564
565 return self._tagscache.tagtypes.get(tagname)
565 return self._tagscache.tagtypes.get(tagname)
566
566
567 def tagslist(self):
567 def tagslist(self):
568 '''return a list of tags ordered by revision'''
568 '''return a list of tags ordered by revision'''
569 if not self._tagscache.tagslist:
569 if not self._tagscache.tagslist:
570 l = []
570 l = []
571 for t, n in self.tags().iteritems():
571 for t, n in self.tags().iteritems():
572 r = self.changelog.rev(n)
572 r = self.changelog.rev(n)
573 l.append((r, t, n))
573 l.append((r, t, n))
574 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
574 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
575
575
576 return self._tagscache.tagslist
576 return self._tagscache.tagslist
577
577
578 def nodetags(self, node):
578 def nodetags(self, node):
579 '''return the tags associated with a node'''
579 '''return the tags associated with a node'''
580 if not self._tagscache.nodetagscache:
580 if not self._tagscache.nodetagscache:
581 nodetagscache = {}
581 nodetagscache = {}
582 for t, n in self._tagscache.tags.iteritems():
582 for t, n in self._tagscache.tags.iteritems():
583 nodetagscache.setdefault(n, []).append(t)
583 nodetagscache.setdefault(n, []).append(t)
584 for tags in nodetagscache.itervalues():
584 for tags in nodetagscache.itervalues():
585 tags.sort()
585 tags.sort()
586 self._tagscache.nodetagscache = nodetagscache
586 self._tagscache.nodetagscache = nodetagscache
587 return self._tagscache.nodetagscache.get(node, [])
587 return self._tagscache.nodetagscache.get(node, [])
588
588
589 def nodebookmarks(self, node):
589 def nodebookmarks(self, node):
590 marks = []
590 marks = []
591 for bookmark, n in self._bookmarks.iteritems():
591 for bookmark, n in self._bookmarks.iteritems():
592 if n == node:
592 if n == node:
593 marks.append(bookmark)
593 marks.append(bookmark)
594 return sorted(marks)
594 return sorted(marks)
595
595
596 def _branchtags(self, partial, lrev):
596 def _branchtags(self, partial, lrev):
597 # TODO: rename this function?
597 # TODO: rename this function?
598 tiprev = len(self) - 1
598 tiprev = len(self) - 1
599 if lrev != tiprev:
599 if lrev != tiprev:
600 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
600 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
601 self._updatebranchcache(partial, ctxgen)
601 self._updatebranchcache(partial, ctxgen)
602 self._writebranchcache(partial, self.changelog.tip(), tiprev)
602 self._writebranchcache(partial, self.changelog.tip(), tiprev)
603
603
604 return partial
604 return partial
605
605
606 def updatebranchcache(self):
606 def updatebranchcache(self):
607 tip = self.changelog.tip()
607 tip = self.changelog.tip()
608 if self._branchcache is not None and self._branchcachetip == tip:
608 if self._branchcache is not None and self._branchcachetip == tip:
609 return
609 return
610
610
611 oldtip = self._branchcachetip
611 oldtip = self._branchcachetip
612 self._branchcachetip = tip
612 self._branchcachetip = tip
613 if oldtip is None or oldtip not in self.changelog.nodemap:
613 if oldtip is None or oldtip not in self.changelog.nodemap:
614 partial, last, lrev = self._readbranchcache()
614 partial, last, lrev = self._readbranchcache()
615 else:
615 else:
616 lrev = self.changelog.rev(oldtip)
616 lrev = self.changelog.rev(oldtip)
617 partial = self._branchcache
617 partial = self._branchcache
618
618
619 self._branchtags(partial, lrev)
619 self._branchtags(partial, lrev)
620 # this private cache holds all heads (not just the branch tips)
620 # this private cache holds all heads (not just the branch tips)
621 self._branchcache = partial
621 self._branchcache = partial
622
622
623 def branchmap(self):
623 def branchmap(self):
624 '''returns a dictionary {branch: [branchheads]}'''
624 '''returns a dictionary {branch: [branchheads]}'''
625 self.updatebranchcache()
625 self.updatebranchcache()
626 return self._branchcache
626 return self._branchcache
627
627
628 def _branchtip(self, heads):
628 def _branchtip(self, heads):
629 '''return the tipmost branch head in heads'''
629 '''return the tipmost branch head in heads'''
630 tip = heads[-1]
630 tip = heads[-1]
631 for h in reversed(heads):
631 for h in reversed(heads):
632 if not self[h].closesbranch():
632 if not self[h].closesbranch():
633 tip = h
633 tip = h
634 break
634 break
635 return tip
635 return tip
636
636
637 def branchtip(self, branch):
637 def branchtip(self, branch):
638 '''return the tip node for a given branch'''
638 '''return the tip node for a given branch'''
639 if branch not in self.branchmap():
639 if branch not in self.branchmap():
640 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
640 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
641 return self._branchtip(self.branchmap()[branch])
641 return self._branchtip(self.branchmap()[branch])
642
642
643 def branchtags(self):
643 def branchtags(self):
644 '''return a dict where branch names map to the tipmost head of
644 '''return a dict where branch names map to the tipmost head of
645 the branch, open heads come before closed'''
645 the branch, open heads come before closed'''
646 bt = {}
646 bt = {}
647 for bn, heads in self.branchmap().iteritems():
647 for bn, heads in self.branchmap().iteritems():
648 bt[bn] = self._branchtip(heads)
648 bt[bn] = self._branchtip(heads)
649 return bt
649 return bt
650
650
651 def _readbranchcache(self):
651 def _readbranchcache(self):
652 partial = {}
652 partial = {}
653 try:
653 try:
654 f = self.opener("cache/branchheads")
654 f = self.opener("cache/branchheads")
655 lines = f.read().split('\n')
655 lines = f.read().split('\n')
656 f.close()
656 f.close()
657 except (IOError, OSError):
657 except (IOError, OSError):
658 return {}, nullid, nullrev
658 return {}, nullid, nullrev
659
659
660 try:
660 try:
661 last, lrev = lines.pop(0).split(" ", 1)
661 last, lrev = lines.pop(0).split(" ", 1)
662 last, lrev = bin(last), int(lrev)
662 last, lrev = bin(last), int(lrev)
663 if lrev >= len(self) or self[lrev].node() != last:
663 if lrev >= len(self) or self[lrev].node() != last:
664 # invalidate the cache
664 # invalidate the cache
665 raise ValueError('invalidating branch cache (tip differs)')
665 raise ValueError('invalidating branch cache (tip differs)')
666 for l in lines:
666 for l in lines:
667 if not l:
667 if not l:
668 continue
668 continue
669 node, label = l.split(" ", 1)
669 node, label = l.split(" ", 1)
670 label = encoding.tolocal(label.strip())
670 label = encoding.tolocal(label.strip())
671 if not node in self:
671 if not node in self:
672 raise ValueError('invalidating branch cache because node '+
672 raise ValueError('invalidating branch cache because node '+
673 '%s does not exist' % node)
673 '%s does not exist' % node)
674 partial.setdefault(label, []).append(bin(node))
674 partial.setdefault(label, []).append(bin(node))
675 except KeyboardInterrupt:
675 except KeyboardInterrupt:
676 raise
676 raise
677 except Exception, inst:
677 except Exception, inst:
678 if self.ui.debugflag:
678 if self.ui.debugflag:
679 self.ui.warn(str(inst), '\n')
679 self.ui.warn(str(inst), '\n')
680 partial, last, lrev = {}, nullid, nullrev
680 partial, last, lrev = {}, nullid, nullrev
681 return partial, last, lrev
681 return partial, last, lrev
682
682
683 def _writebranchcache(self, branches, tip, tiprev):
683 def _writebranchcache(self, branches, tip, tiprev):
684 try:
684 try:
685 f = self.opener("cache/branchheads", "w", atomictemp=True)
685 f = self.opener("cache/branchheads", "w", atomictemp=True)
686 f.write("%s %s\n" % (hex(tip), tiprev))
686 f.write("%s %s\n" % (hex(tip), tiprev))
687 for label, nodes in branches.iteritems():
687 for label, nodes in branches.iteritems():
688 for node in nodes:
688 for node in nodes:
689 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
689 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
690 f.close()
690 f.close()
691 except (IOError, OSError):
691 except (IOError, OSError):
692 pass
692 pass
693
693
694 def _updatebranchcache(self, partial, ctxgen):
694 def _updatebranchcache(self, partial, ctxgen):
695 """Given a branchhead cache, partial, that may have extra nodes or be
695 """Given a branchhead cache, partial, that may have extra nodes or be
696 missing heads, and a generator of nodes that are at least a superset of
696 missing heads, and a generator of nodes that are at least a superset of
697 heads missing, this function updates partial to be correct.
697 heads missing, this function updates partial to be correct.
698 """
698 """
699 # collect new branch entries
699 # collect new branch entries
700 newbranches = {}
700 newbranches = {}
701 for c in ctxgen:
701 for c in ctxgen:
702 newbranches.setdefault(c.branch(), []).append(c.node())
702 newbranches.setdefault(c.branch(), []).append(c.node())
703 # if older branchheads are reachable from new ones, they aren't
703 # if older branchheads are reachable from new ones, they aren't
704 # really branchheads. Note checking parents is insufficient:
704 # really branchheads. Note checking parents is insufficient:
705 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
705 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
706 for branch, newnodes in newbranches.iteritems():
706 for branch, newnodes in newbranches.iteritems():
707 bheads = partial.setdefault(branch, [])
707 bheads = partial.setdefault(branch, [])
708 # Remove candidate heads that no longer are in the repo (e.g., as
708 # Remove candidate heads that no longer are in the repo (e.g., as
709 # the result of a strip that just happened). Avoid using 'node in
709 # the result of a strip that just happened). Avoid using 'node in
710 # self' here because that dives down into branchcache code somewhat
710 # self' here because that dives down into branchcache code somewhat
711 # recrusively.
711 # recrusively.
712 bheadrevs = [self.changelog.rev(node) for node in bheads
712 bheadrevs = [self.changelog.rev(node) for node in bheads
713 if self.changelog.hasnode(node)]
713 if self.changelog.hasnode(node)]
714 newheadrevs = [self.changelog.rev(node) for node in newnodes
714 newheadrevs = [self.changelog.rev(node) for node in newnodes
715 if self.changelog.hasnode(node)]
715 if self.changelog.hasnode(node)]
716 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
716 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
717 # Remove duplicates - nodes that are in newheadrevs and are already
717 # Remove duplicates - nodes that are in newheadrevs and are already
718 # in bheadrevs. This can happen if you strip a node whose parent
718 # in bheadrevs. This can happen if you strip a node whose parent
719 # was already a head (because they're on different branches).
719 # was already a head (because they're on different branches).
720 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
720 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
721
721
722 # Starting from tip means fewer passes over reachable. If we know
722 # Starting from tip means fewer passes over reachable. If we know
723 # the new candidates are not ancestors of existing heads, we don't
723 # the new candidates are not ancestors of existing heads, we don't
724 # have to examine ancestors of existing heads
724 # have to examine ancestors of existing heads
725 if ctxisnew:
725 if ctxisnew:
726 iterrevs = sorted(newheadrevs)
726 iterrevs = sorted(newheadrevs)
727 else:
727 else:
728 iterrevs = list(bheadrevs)
728 iterrevs = list(bheadrevs)
729
729
730 # This loop prunes out two kinds of heads - heads that are
730 # This loop prunes out two kinds of heads - heads that are
731 # superceded by a head in newheadrevs, and newheadrevs that are not
731 # superceded by a head in newheadrevs, and newheadrevs that are not
732 # heads because an existing head is their descendant.
732 # heads because an existing head is their descendant.
733 while iterrevs:
733 while iterrevs:
734 latest = iterrevs.pop()
734 latest = iterrevs.pop()
735 if latest not in bheadrevs:
735 if latest not in bheadrevs:
736 continue
736 continue
737 ancestors = set(self.changelog.ancestors([latest],
737 ancestors = set(self.changelog.ancestors([latest],
738 bheadrevs[0]))
738 bheadrevs[0]))
739 if ancestors:
739 if ancestors:
740 bheadrevs = [b for b in bheadrevs if b not in ancestors]
740 bheadrevs = [b for b in bheadrevs if b not in ancestors]
741 partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
741 partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
742
742
743 # There may be branches that cease to exist when the last commit in the
743 # There may be branches that cease to exist when the last commit in the
744 # branch was stripped. This code filters them out. Note that the
744 # branch was stripped. This code filters them out. Note that the
745 # branch that ceased to exist may not be in newbranches because
745 # branch that ceased to exist may not be in newbranches because
746 # newbranches is the set of candidate heads, which when you strip the
746 # newbranches is the set of candidate heads, which when you strip the
747 # last commit in a branch will be the parent branch.
747 # last commit in a branch will be the parent branch.
748 for branch in partial.keys():
748 for branch in partial.keys():
749 nodes = [head for head in partial[branch]
749 nodes = [head for head in partial[branch]
750 if self.changelog.hasnode(head)]
750 if self.changelog.hasnode(head)]
751 if not nodes:
751 if not nodes:
752 del partial[branch]
752 del partial[branch]
753
753
754 def lookup(self, key):
754 def lookup(self, key):
755 return self[key].node()
755 return self[key].node()
756
756
757 def lookupbranch(self, key, remote=None):
757 def lookupbranch(self, key, remote=None):
758 repo = remote or self
758 repo = remote or self
759 if key in repo.branchmap():
759 if key in repo.branchmap():
760 return key
760 return key
761
761
762 repo = (remote and remote.local()) and remote or self
762 repo = (remote and remote.local()) and remote or self
763 return repo[key].branch()
763 return repo[key].branch()
764
764
765 def known(self, nodes):
765 def known(self, nodes):
766 nm = self.changelog.nodemap
766 nm = self.changelog.nodemap
767 pc = self._phasecache
767 pc = self._phasecache
768 result = []
768 result = []
769 for n in nodes:
769 for n in nodes:
770 r = nm.get(n)
770 r = nm.get(n)
771 resp = not (r is None or pc.phase(self, r) >= phases.secret)
771 resp = not (r is None or pc.phase(self, r) >= phases.secret)
772 result.append(resp)
772 result.append(resp)
773 return result
773 return result
774
774
775 def local(self):
775 def local(self):
776 return self
776 return self
777
777
778 def cancopy(self):
778 def cancopy(self):
779 return self.local() # so statichttprepo's override of local() works
779 return self.local() # so statichttprepo's override of local() works
780
780
781 def join(self, f):
781 def join(self, f):
782 return os.path.join(self.path, f)
782 return os.path.join(self.path, f)
783
783
784 def wjoin(self, f):
784 def wjoin(self, f):
785 return os.path.join(self.root, f)
785 return os.path.join(self.root, f)
786
786
787 def file(self, f):
787 def file(self, f):
788 if f[0] == '/':
788 if f[0] == '/':
789 f = f[1:]
789 f = f[1:]
790 return filelog.filelog(self.sopener, f)
790 return filelog.filelog(self.sopener, f)
791
791
792 def changectx(self, changeid):
792 def changectx(self, changeid):
793 return self[changeid]
793 return self[changeid]
794
794
795 def parents(self, changeid=None):
795 def parents(self, changeid=None):
796 '''get list of changectxs for parents of changeid'''
796 '''get list of changectxs for parents of changeid'''
797 return self[changeid].parents()
797 return self[changeid].parents()
798
798
799 def setparents(self, p1, p2=nullid):
799 def setparents(self, p1, p2=nullid):
800 copies = self.dirstate.setparents(p1, p2)
800 copies = self.dirstate.setparents(p1, p2)
801 if copies:
801 if copies:
802 # Adjust copy records, the dirstate cannot do it, it
802 # Adjust copy records, the dirstate cannot do it, it
803 # requires access to parents manifests. Preserve them
803 # requires access to parents manifests. Preserve them
804 # only for entries added to first parent.
804 # only for entries added to first parent.
805 pctx = self[p1]
805 pctx = self[p1]
806 for f in copies:
806 for f in copies:
807 if f not in pctx and copies[f] in pctx:
807 if f not in pctx and copies[f] in pctx:
808 self.dirstate.copy(copies[f], f)
808 self.dirstate.copy(copies[f], f)
809
809
810 def filectx(self, path, changeid=None, fileid=None):
810 def filectx(self, path, changeid=None, fileid=None):
811 """changeid can be a changeset revision, node, or tag.
811 """changeid can be a changeset revision, node, or tag.
812 fileid can be a file revision or node."""
812 fileid can be a file revision or node."""
813 return context.filectx(self, path, changeid, fileid)
813 return context.filectx(self, path, changeid, fileid)
814
814
815 def getcwd(self):
815 def getcwd(self):
816 return self.dirstate.getcwd()
816 return self.dirstate.getcwd()
817
817
818 def pathto(self, f, cwd=None):
818 def pathto(self, f, cwd=None):
819 return self.dirstate.pathto(f, cwd)
819 return self.dirstate.pathto(f, cwd)
820
820
821 def wfile(self, f, mode='r'):
821 def wfile(self, f, mode='r'):
822 return self.wopener(f, mode)
822 return self.wopener(f, mode)
823
823
824 def _link(self, f):
824 def _link(self, f):
825 return os.path.islink(self.wjoin(f))
825 return os.path.islink(self.wjoin(f))
826
826
827 def _loadfilter(self, filter):
827 def _loadfilter(self, filter):
828 if filter not in self.filterpats:
828 if filter not in self.filterpats:
829 l = []
829 l = []
830 for pat, cmd in self.ui.configitems(filter):
830 for pat, cmd in self.ui.configitems(filter):
831 if cmd == '!':
831 if cmd == '!':
832 continue
832 continue
833 mf = matchmod.match(self.root, '', [pat])
833 mf = matchmod.match(self.root, '', [pat])
834 fn = None
834 fn = None
835 params = cmd
835 params = cmd
836 for name, filterfn in self._datafilters.iteritems():
836 for name, filterfn in self._datafilters.iteritems():
837 if cmd.startswith(name):
837 if cmd.startswith(name):
838 fn = filterfn
838 fn = filterfn
839 params = cmd[len(name):].lstrip()
839 params = cmd[len(name):].lstrip()
840 break
840 break
841 if not fn:
841 if not fn:
842 fn = lambda s, c, **kwargs: util.filter(s, c)
842 fn = lambda s, c, **kwargs: util.filter(s, c)
843 # Wrap old filters not supporting keyword arguments
843 # Wrap old filters not supporting keyword arguments
844 if not inspect.getargspec(fn)[2]:
844 if not inspect.getargspec(fn)[2]:
845 oldfn = fn
845 oldfn = fn
846 fn = lambda s, c, **kwargs: oldfn(s, c)
846 fn = lambda s, c, **kwargs: oldfn(s, c)
847 l.append((mf, fn, params))
847 l.append((mf, fn, params))
848 self.filterpats[filter] = l
848 self.filterpats[filter] = l
849 return self.filterpats[filter]
849 return self.filterpats[filter]
850
850
851 def _filter(self, filterpats, filename, data):
851 def _filter(self, filterpats, filename, data):
852 for mf, fn, cmd in filterpats:
852 for mf, fn, cmd in filterpats:
853 if mf(filename):
853 if mf(filename):
854 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
854 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
855 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
855 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
856 break
856 break
857
857
858 return data
858 return data
859
859
860 @propertycache
860 @propertycache
861 def _encodefilterpats(self):
861 def _encodefilterpats(self):
862 return self._loadfilter('encode')
862 return self._loadfilter('encode')
863
863
864 @propertycache
864 @propertycache
865 def _decodefilterpats(self):
865 def _decodefilterpats(self):
866 return self._loadfilter('decode')
866 return self._loadfilter('decode')
867
867
868 def adddatafilter(self, name, filter):
868 def adddatafilter(self, name, filter):
869 self._datafilters[name] = filter
869 self._datafilters[name] = filter
870
870
871 def wread(self, filename):
871 def wread(self, filename):
872 if self._link(filename):
872 if self._link(filename):
873 data = os.readlink(self.wjoin(filename))
873 data = os.readlink(self.wjoin(filename))
874 else:
874 else:
875 data = self.wopener.read(filename)
875 data = self.wopener.read(filename)
876 return self._filter(self._encodefilterpats, filename, data)
876 return self._filter(self._encodefilterpats, filename, data)
877
877
878 def wwrite(self, filename, data, flags):
878 def wwrite(self, filename, data, flags):
879 data = self._filter(self._decodefilterpats, filename, data)
879 data = self._filter(self._decodefilterpats, filename, data)
880 if 'l' in flags:
880 if 'l' in flags:
881 self.wopener.symlink(data, filename)
881 self.wopener.symlink(data, filename)
882 else:
882 else:
883 self.wopener.write(filename, data)
883 self.wopener.write(filename, data)
884 if 'x' in flags:
884 if 'x' in flags:
885 util.setflags(self.wjoin(filename), False, True)
885 util.setflags(self.wjoin(filename), False, True)
886
886
887 def wwritedata(self, filename, data):
887 def wwritedata(self, filename, data):
888 return self._filter(self._decodefilterpats, filename, data)
888 return self._filter(self._decodefilterpats, filename, data)
889
889
890 def transaction(self, desc):
890 def transaction(self, desc):
891 tr = self._transref and self._transref() or None
891 tr = self._transref and self._transref() or None
892 if tr and tr.running():
892 if tr and tr.running():
893 return tr.nest()
893 return tr.nest()
894
894
895 # abort here if the journal already exists
895 # abort here if the journal already exists
896 if os.path.exists(self.sjoin("journal")):
896 if os.path.exists(self.sjoin("journal")):
897 raise error.RepoError(
897 raise error.RepoError(
898 _("abandoned transaction found - run hg recover"))
898 _("abandoned transaction found - run hg recover"))
899
899
900 self._writejournal(desc)
900 self._writejournal(desc)
901 renames = [(x, undoname(x)) for x in self._journalfiles()]
901 renames = [(x, undoname(x)) for x in self._journalfiles()]
902
902
903 tr = transaction.transaction(self.ui.warn, self.sopener,
903 tr = transaction.transaction(self.ui.warn, self.sopener,
904 self.sjoin("journal"),
904 self.sjoin("journal"),
905 aftertrans(renames),
905 aftertrans(renames),
906 self.store.createmode)
906 self.store.createmode)
907 self._transref = weakref.ref(tr)
907 self._transref = weakref.ref(tr)
908 return tr
908 return tr
909
909
910 def _journalfiles(self):
910 def _journalfiles(self):
911 return (self.sjoin('journal'), self.join('journal.dirstate'),
911 return (self.sjoin('journal'), self.join('journal.dirstate'),
912 self.join('journal.branch'), self.join('journal.desc'),
912 self.join('journal.branch'), self.join('journal.desc'),
913 self.join('journal.bookmarks'),
913 self.join('journal.bookmarks'),
914 self.sjoin('journal.phaseroots'))
914 self.sjoin('journal.phaseroots'))
915
915
916 def undofiles(self):
916 def undofiles(self):
917 return [undoname(x) for x in self._journalfiles()]
917 return [undoname(x) for x in self._journalfiles()]
918
918
919 def _writejournal(self, desc):
919 def _writejournal(self, desc):
920 self.opener.write("journal.dirstate",
920 self.opener.write("journal.dirstate",
921 self.opener.tryread("dirstate"))
921 self.opener.tryread("dirstate"))
922 self.opener.write("journal.branch",
922 self.opener.write("journal.branch",
923 encoding.fromlocal(self.dirstate.branch()))
923 encoding.fromlocal(self.dirstate.branch()))
924 self.opener.write("journal.desc",
924 self.opener.write("journal.desc",
925 "%d\n%s\n" % (len(self), desc))
925 "%d\n%s\n" % (len(self), desc))
926 self.opener.write("journal.bookmarks",
926 self.opener.write("journal.bookmarks",
927 self.opener.tryread("bookmarks"))
927 self.opener.tryread("bookmarks"))
928 self.sopener.write("journal.phaseroots",
928 self.sopener.write("journal.phaseroots",
929 self.sopener.tryread("phaseroots"))
929 self.sopener.tryread("phaseroots"))
930
930
931 def recover(self):
931 def recover(self):
932 lock = self.lock()
932 lock = self.lock()
933 try:
933 try:
934 if os.path.exists(self.sjoin("journal")):
934 if os.path.exists(self.sjoin("journal")):
935 self.ui.status(_("rolling back interrupted transaction\n"))
935 self.ui.status(_("rolling back interrupted transaction\n"))
936 transaction.rollback(self.sopener, self.sjoin("journal"),
936 transaction.rollback(self.sopener, self.sjoin("journal"),
937 self.ui.warn)
937 self.ui.warn)
938 self.invalidate()
938 self.invalidate()
939 return True
939 return True
940 else:
940 else:
941 self.ui.warn(_("no interrupted transaction available\n"))
941 self.ui.warn(_("no interrupted transaction available\n"))
942 return False
942 return False
943 finally:
943 finally:
944 lock.release()
944 lock.release()
945
945
946 def rollback(self, dryrun=False, force=False):
946 def rollback(self, dryrun=False, force=False):
947 wlock = lock = None
947 wlock = lock = None
948 try:
948 try:
949 wlock = self.wlock()
949 wlock = self.wlock()
950 lock = self.lock()
950 lock = self.lock()
951 if os.path.exists(self.sjoin("undo")):
951 if os.path.exists(self.sjoin("undo")):
952 return self._rollback(dryrun, force)
952 return self._rollback(dryrun, force)
953 else:
953 else:
954 self.ui.warn(_("no rollback information available\n"))
954 self.ui.warn(_("no rollback information available\n"))
955 return 1
955 return 1
956 finally:
956 finally:
957 release(lock, wlock)
957 release(lock, wlock)
958
958
959 def _rollback(self, dryrun, force):
959 def _rollback(self, dryrun, force):
960 ui = self.ui
960 ui = self.ui
961 try:
961 try:
962 args = self.opener.read('undo.desc').splitlines()
962 args = self.opener.read('undo.desc').splitlines()
963 (oldlen, desc, detail) = (int(args[0]), args[1], None)
963 (oldlen, desc, detail) = (int(args[0]), args[1], None)
964 if len(args) >= 3:
964 if len(args) >= 3:
965 detail = args[2]
965 detail = args[2]
966 oldtip = oldlen - 1
966 oldtip = oldlen - 1
967
967
968 if detail and ui.verbose:
968 if detail and ui.verbose:
969 msg = (_('repository tip rolled back to revision %s'
969 msg = (_('repository tip rolled back to revision %s'
970 ' (undo %s: %s)\n')
970 ' (undo %s: %s)\n')
971 % (oldtip, desc, detail))
971 % (oldtip, desc, detail))
972 else:
972 else:
973 msg = (_('repository tip rolled back to revision %s'
973 msg = (_('repository tip rolled back to revision %s'
974 ' (undo %s)\n')
974 ' (undo %s)\n')
975 % (oldtip, desc))
975 % (oldtip, desc))
976 except IOError:
976 except IOError:
977 msg = _('rolling back unknown transaction\n')
977 msg = _('rolling back unknown transaction\n')
978 desc = None
978 desc = None
979
979
980 if not force and self['.'] != self['tip'] and desc == 'commit':
980 if not force and self['.'] != self['tip'] and desc == 'commit':
981 raise util.Abort(
981 raise util.Abort(
982 _('rollback of last commit while not checked out '
982 _('rollback of last commit while not checked out '
983 'may lose data'), hint=_('use -f to force'))
983 'may lose data'), hint=_('use -f to force'))
984
984
985 ui.status(msg)
985 ui.status(msg)
986 if dryrun:
986 if dryrun:
987 return 0
987 return 0
988
988
989 parents = self.dirstate.parents()
989 parents = self.dirstate.parents()
990 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
990 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
991 if os.path.exists(self.join('undo.bookmarks')):
991 if os.path.exists(self.join('undo.bookmarks')):
992 util.rename(self.join('undo.bookmarks'),
992 util.rename(self.join('undo.bookmarks'),
993 self.join('bookmarks'))
993 self.join('bookmarks'))
994 if os.path.exists(self.sjoin('undo.phaseroots')):
994 if os.path.exists(self.sjoin('undo.phaseroots')):
995 util.rename(self.sjoin('undo.phaseroots'),
995 util.rename(self.sjoin('undo.phaseroots'),
996 self.sjoin('phaseroots'))
996 self.sjoin('phaseroots'))
997 self.invalidate()
997 self.invalidate()
998
998
999 parentgone = (parents[0] not in self.changelog.nodemap or
999 parentgone = (parents[0] not in self.changelog.nodemap or
1000 parents[1] not in self.changelog.nodemap)
1000 parents[1] not in self.changelog.nodemap)
1001 if parentgone:
1001 if parentgone:
1002 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
1002 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
1003 try:
1003 try:
1004 branch = self.opener.read('undo.branch')
1004 branch = self.opener.read('undo.branch')
1005 self.dirstate.setbranch(branch)
1005 self.dirstate.setbranch(branch)
1006 except IOError:
1006 except IOError:
1007 ui.warn(_('named branch could not be reset: '
1007 ui.warn(_('named branch could not be reset: '
1008 'current branch is still \'%s\'\n')
1008 'current branch is still \'%s\'\n')
1009 % self.dirstate.branch())
1009 % self.dirstate.branch())
1010
1010
1011 self.dirstate.invalidate()
1011 self.dirstate.invalidate()
1012 parents = tuple([p.rev() for p in self.parents()])
1012 parents = tuple([p.rev() for p in self.parents()])
1013 if len(parents) > 1:
1013 if len(parents) > 1:
1014 ui.status(_('working directory now based on '
1014 ui.status(_('working directory now based on '
1015 'revisions %d and %d\n') % parents)
1015 'revisions %d and %d\n') % parents)
1016 else:
1016 else:
1017 ui.status(_('working directory now based on '
1017 ui.status(_('working directory now based on '
1018 'revision %d\n') % parents)
1018 'revision %d\n') % parents)
1019 # TODO: if we know which new heads may result from this rollback, pass
1019 # TODO: if we know which new heads may result from this rollback, pass
1020 # them to destroy(), which will prevent the branchhead cache from being
1020 # them to destroy(), which will prevent the branchhead cache from being
1021 # invalidated.
1021 # invalidated.
1022 self.destroyed()
1022 self.destroyed()
1023 return 0
1023 return 0
1024
1024
1025 def invalidatecaches(self):
1025 def invalidatecaches(self):
1026 def delcache(name):
1026 def delcache(name):
1027 try:
1027 try:
1028 delattr(self, name)
1028 delattr(self, name)
1029 except AttributeError:
1029 except AttributeError:
1030 pass
1030 pass
1031
1031
1032 delcache('_tagscache')
1032 delcache('_tagscache')
1033
1033
1034 self._branchcache = None # in UTF-8
1034 self._branchcache = None # in UTF-8
1035 self._branchcachetip = None
1035 self._branchcachetip = None
1036
1036
1037 def invalidatedirstate(self):
1037 def invalidatedirstate(self):
1038 '''Invalidates the dirstate, causing the next call to dirstate
1038 '''Invalidates the dirstate, causing the next call to dirstate
1039 to check if it was modified since the last time it was read,
1039 to check if it was modified since the last time it was read,
1040 rereading it if it has.
1040 rereading it if it has.
1041
1041
1042 This is different to dirstate.invalidate() that it doesn't always
1042 This is different to dirstate.invalidate() that it doesn't always
1043 rereads the dirstate. Use dirstate.invalidate() if you want to
1043 rereads the dirstate. Use dirstate.invalidate() if you want to
1044 explicitly read the dirstate again (i.e. restoring it to a previous
1044 explicitly read the dirstate again (i.e. restoring it to a previous
1045 known good state).'''
1045 known good state).'''
1046 if 'dirstate' in self.__dict__:
1046 if 'dirstate' in self.__dict__:
1047 for k in self.dirstate._filecache:
1047 for k in self.dirstate._filecache:
1048 try:
1048 try:
1049 delattr(self.dirstate, k)
1049 delattr(self.dirstate, k)
1050 except AttributeError:
1050 except AttributeError:
1051 pass
1051 pass
1052 delattr(self, 'dirstate')
1052 delattr(self, 'dirstate')
1053
1053
1054 def invalidate(self):
1054 def invalidate(self):
1055 for k in self._filecache:
1055 for k in self._filecache:
1056 # dirstate is invalidated separately in invalidatedirstate()
1056 # dirstate is invalidated separately in invalidatedirstate()
1057 if k == 'dirstate':
1057 if k == 'dirstate':
1058 continue
1058 continue
1059
1059
1060 try:
1060 try:
1061 delattr(self, k)
1061 delattr(self, k)
1062 except AttributeError:
1062 except AttributeError:
1063 pass
1063 pass
1064 self.invalidatecaches()
1064 self.invalidatecaches()
1065
1065
1066 # Discard all cache entries to force reloading everything.
1066 # Discard all cache entries to force reloading everything.
1067 self._filecache.clear()
1067 self._filecache.clear()
1068
1068
1069 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
1069 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
1070 try:
1070 try:
1071 l = lock.lock(lockname, 0, releasefn, desc=desc)
1071 l = lock.lock(lockname, 0, releasefn, desc=desc)
1072 except error.LockHeld, inst:
1072 except error.LockHeld, inst:
1073 if not wait:
1073 if not wait:
1074 raise
1074 raise
1075 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1075 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1076 (desc, inst.locker))
1076 (desc, inst.locker))
1077 # default to 600 seconds timeout
1077 # default to 600 seconds timeout
1078 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
1078 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
1079 releasefn, desc=desc)
1079 releasefn, desc=desc)
1080 if acquirefn:
1080 if acquirefn:
1081 acquirefn()
1081 acquirefn()
1082 return l
1082 return l
1083
1083
1084 def _afterlock(self, callback):
1084 def _afterlock(self, callback):
1085 """add a callback to the current repository lock.
1085 """add a callback to the current repository lock.
1086
1086
1087 The callback will be executed on lock release."""
1087 The callback will be executed on lock release."""
1088 l = self._lockref and self._lockref()
1088 l = self._lockref and self._lockref()
1089 if l:
1089 if l:
1090 l.postrelease.append(callback)
1090 l.postrelease.append(callback)
1091 else:
1091 else:
1092 callback()
1092 callback()
1093
1093
1094 def lock(self, wait=True):
1094 def lock(self, wait=True):
1095 '''Lock the repository store (.hg/store) and return a weak reference
1095 '''Lock the repository store (.hg/store) and return a weak reference
1096 to the lock. Use this before modifying the store (e.g. committing or
1096 to the lock. Use this before modifying the store (e.g. committing or
1097 stripping). If you are opening a transaction, get a lock as well.)'''
1097 stripping). If you are opening a transaction, get a lock as well.)'''
1098 l = self._lockref and self._lockref()
1098 l = self._lockref and self._lockref()
1099 if l is not None and l.held:
1099 if l is not None and l.held:
1100 l.lock()
1100 l.lock()
1101 return l
1101 return l
1102
1102
1103 def unlock():
1103 def unlock():
1104 self.store.write()
1104 self.store.write()
1105 if '_phasecache' in vars(self):
1105 if '_phasecache' in vars(self):
1106 self._phasecache.write()
1106 self._phasecache.write()
1107 for k, ce in self._filecache.items():
1107 for k, ce in self._filecache.items():
1108 if k == 'dirstate':
1108 if k == 'dirstate':
1109 continue
1109 continue
1110 ce.refresh()
1110 ce.refresh()
1111
1111
1112 l = self._lock(self.sjoin("lock"), wait, unlock,
1112 l = self._lock(self.sjoin("lock"), wait, unlock,
1113 self.invalidate, _('repository %s') % self.origroot)
1113 self.invalidate, _('repository %s') % self.origroot)
1114 self._lockref = weakref.ref(l)
1114 self._lockref = weakref.ref(l)
1115 return l
1115 return l
1116
1116
1117 def wlock(self, wait=True):
1117 def wlock(self, wait=True):
1118 '''Lock the non-store parts of the repository (everything under
1118 '''Lock the non-store parts of the repository (everything under
1119 .hg except .hg/store) and return a weak reference to the lock.
1119 .hg except .hg/store) and return a weak reference to the lock.
1120 Use this before modifying files in .hg.'''
1120 Use this before modifying files in .hg.'''
1121 l = self._wlockref and self._wlockref()
1121 l = self._wlockref and self._wlockref()
1122 if l is not None and l.held:
1122 if l is not None and l.held:
1123 l.lock()
1123 l.lock()
1124 return l
1124 return l
1125
1125
1126 def unlock():
1126 def unlock():
1127 self.dirstate.write()
1127 self.dirstate.write()
1128 ce = self._filecache.get('dirstate')
1128 ce = self._filecache.get('dirstate')
1129 if ce:
1129 if ce:
1130 ce.refresh()
1130 ce.refresh()
1131
1131
1132 l = self._lock(self.join("wlock"), wait, unlock,
1132 l = self._lock(self.join("wlock"), wait, unlock,
1133 self.invalidatedirstate, _('working directory of %s') %
1133 self.invalidatedirstate, _('working directory of %s') %
1134 self.origroot)
1134 self.origroot)
1135 self._wlockref = weakref.ref(l)
1135 self._wlockref = weakref.ref(l)
1136 return l
1136 return l
1137
1137
1138 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1138 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1139 """
1139 """
1140 commit an individual file as part of a larger transaction
1140 commit an individual file as part of a larger transaction
1141 """
1141 """
1142
1142
1143 fname = fctx.path()
1143 fname = fctx.path()
1144 text = fctx.data()
1144 text = fctx.data()
1145 flog = self.file(fname)
1145 flog = self.file(fname)
1146 fparent1 = manifest1.get(fname, nullid)
1146 fparent1 = manifest1.get(fname, nullid)
1147 fparent2 = fparent2o = manifest2.get(fname, nullid)
1147 fparent2 = fparent2o = manifest2.get(fname, nullid)
1148
1148
1149 meta = {}
1149 meta = {}
1150 copy = fctx.renamed()
1150 copy = fctx.renamed()
1151 if copy and copy[0] != fname:
1151 if copy and copy[0] != fname:
1152 # Mark the new revision of this file as a copy of another
1152 # Mark the new revision of this file as a copy of another
1153 # file. This copy data will effectively act as a parent
1153 # file. This copy data will effectively act as a parent
1154 # of this new revision. If this is a merge, the first
1154 # of this new revision. If this is a merge, the first
1155 # parent will be the nullid (meaning "look up the copy data")
1155 # parent will be the nullid (meaning "look up the copy data")
1156 # and the second one will be the other parent. For example:
1156 # and the second one will be the other parent. For example:
1157 #
1157 #
1158 # 0 --- 1 --- 3 rev1 changes file foo
1158 # 0 --- 1 --- 3 rev1 changes file foo
1159 # \ / rev2 renames foo to bar and changes it
1159 # \ / rev2 renames foo to bar and changes it
1160 # \- 2 -/ rev3 should have bar with all changes and
1160 # \- 2 -/ rev3 should have bar with all changes and
1161 # should record that bar descends from
1161 # should record that bar descends from
1162 # bar in rev2 and foo in rev1
1162 # bar in rev2 and foo in rev1
1163 #
1163 #
1164 # this allows this merge to succeed:
1164 # this allows this merge to succeed:
1165 #
1165 #
1166 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1166 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1167 # \ / merging rev3 and rev4 should use bar@rev2
1167 # \ / merging rev3 and rev4 should use bar@rev2
1168 # \- 2 --- 4 as the merge base
1168 # \- 2 --- 4 as the merge base
1169 #
1169 #
1170
1170
1171 cfname = copy[0]
1171 cfname = copy[0]
1172 crev = manifest1.get(cfname)
1172 crev = manifest1.get(cfname)
1173 newfparent = fparent2
1173 newfparent = fparent2
1174
1174
1175 if manifest2: # branch merge
1175 if manifest2: # branch merge
1176 if fparent2 == nullid or crev is None: # copied on remote side
1176 if fparent2 == nullid or crev is None: # copied on remote side
1177 if cfname in manifest2:
1177 if cfname in manifest2:
1178 crev = manifest2[cfname]
1178 crev = manifest2[cfname]
1179 newfparent = fparent1
1179 newfparent = fparent1
1180
1180
1181 # find source in nearest ancestor if we've lost track
1181 # find source in nearest ancestor if we've lost track
1182 if not crev:
1182 if not crev:
1183 self.ui.debug(" %s: searching for copy revision for %s\n" %
1183 self.ui.debug(" %s: searching for copy revision for %s\n" %
1184 (fname, cfname))
1184 (fname, cfname))
1185 for ancestor in self[None].ancestors():
1185 for ancestor in self[None].ancestors():
1186 if cfname in ancestor:
1186 if cfname in ancestor:
1187 crev = ancestor[cfname].filenode()
1187 crev = ancestor[cfname].filenode()
1188 break
1188 break
1189
1189
1190 if crev:
1190 if crev:
1191 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1191 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1192 meta["copy"] = cfname
1192 meta["copy"] = cfname
1193 meta["copyrev"] = hex(crev)
1193 meta["copyrev"] = hex(crev)
1194 fparent1, fparent2 = nullid, newfparent
1194 fparent1, fparent2 = nullid, newfparent
1195 else:
1195 else:
1196 self.ui.warn(_("warning: can't find ancestor for '%s' "
1196 self.ui.warn(_("warning: can't find ancestor for '%s' "
1197 "copied from '%s'!\n") % (fname, cfname))
1197 "copied from '%s'!\n") % (fname, cfname))
1198
1198
1199 elif fparent2 != nullid:
1199 elif fparent2 != nullid:
1200 # is one parent an ancestor of the other?
1200 # is one parent an ancestor of the other?
1201 fparentancestor = flog.ancestor(fparent1, fparent2)
1201 fparentancestor = flog.ancestor(fparent1, fparent2)
1202 if fparentancestor == fparent1:
1202 if fparentancestor == fparent1:
1203 fparent1, fparent2 = fparent2, nullid
1203 fparent1, fparent2 = fparent2, nullid
1204 elif fparentancestor == fparent2:
1204 elif fparentancestor == fparent2:
1205 fparent2 = nullid
1205 fparent2 = nullid
1206
1206
1207 # is the file changed?
1207 # is the file changed?
1208 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1208 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1209 changelist.append(fname)
1209 changelist.append(fname)
1210 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1210 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1211
1211
1212 # are just the flags changed during merge?
1212 # are just the flags changed during merge?
1213 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1213 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1214 changelist.append(fname)
1214 changelist.append(fname)
1215
1215
1216 return fparent1
1216 return fparent1
1217
1217
1218 def commit(self, text="", user=None, date=None, match=None, force=False,
1218 def commit(self, text="", user=None, date=None, match=None, force=False,
1219 editor=False, extra={}):
1219 editor=False, extra={}):
1220 """Add a new revision to current repository.
1220 """Add a new revision to current repository.
1221
1221
1222 Revision information is gathered from the working directory,
1222 Revision information is gathered from the working directory,
1223 match can be used to filter the committed files. If editor is
1223 match can be used to filter the committed files. If editor is
1224 supplied, it is called to get a commit message.
1224 supplied, it is called to get a commit message.
1225 """
1225 """
1226
1226
1227 def fail(f, msg):
1227 def fail(f, msg):
1228 raise util.Abort('%s: %s' % (f, msg))
1228 raise util.Abort('%s: %s' % (f, msg))
1229
1229
1230 if not match:
1230 if not match:
1231 match = matchmod.always(self.root, '')
1231 match = matchmod.always(self.root, '')
1232
1232
1233 if not force:
1233 if not force:
1234 vdirs = []
1234 vdirs = []
1235 match.dir = vdirs.append
1235 match.dir = vdirs.append
1236 match.bad = fail
1236 match.bad = fail
1237
1237
1238 wlock = self.wlock()
1238 wlock = self.wlock()
1239 try:
1239 try:
1240 wctx = self[None]
1240 wctx = self[None]
1241 merge = len(wctx.parents()) > 1
1241 merge = len(wctx.parents()) > 1
1242
1242
1243 if (not force and merge and match and
1243 if (not force and merge and match and
1244 (match.files() or match.anypats())):
1244 (match.files() or match.anypats())):
1245 raise util.Abort(_('cannot partially commit a merge '
1245 raise util.Abort(_('cannot partially commit a merge '
1246 '(do not specify files or patterns)'))
1246 '(do not specify files or patterns)'))
1247
1247
1248 changes = self.status(match=match, clean=force)
1248 changes = self.status(match=match, clean=force)
1249 if force:
1249 if force:
1250 changes[0].extend(changes[6]) # mq may commit unchanged files
1250 changes[0].extend(changes[6]) # mq may commit unchanged files
1251
1251
1252 # check subrepos
1252 # check subrepos
1253 subs = []
1253 subs = []
1254 commitsubs = set()
1254 commitsubs = set()
1255 newstate = wctx.substate.copy()
1255 newstate = wctx.substate.copy()
1256 # only manage subrepos and .hgsubstate if .hgsub is present
1256 # only manage subrepos and .hgsubstate if .hgsub is present
1257 if '.hgsub' in wctx:
1257 if '.hgsub' in wctx:
1258 # we'll decide whether to track this ourselves, thanks
1258 # we'll decide whether to track this ourselves, thanks
1259 if '.hgsubstate' in changes[0]:
1259 if '.hgsubstate' in changes[0]:
1260 changes[0].remove('.hgsubstate')
1260 changes[0].remove('.hgsubstate')
1261 if '.hgsubstate' in changes[2]:
1261 if '.hgsubstate' in changes[2]:
1262 changes[2].remove('.hgsubstate')
1262 changes[2].remove('.hgsubstate')
1263
1263
1264 # compare current state to last committed state
1264 # compare current state to last committed state
1265 # build new substate based on last committed state
1265 # build new substate based on last committed state
1266 oldstate = wctx.p1().substate
1266 oldstate = wctx.p1().substate
1267 for s in sorted(newstate.keys()):
1267 for s in sorted(newstate.keys()):
1268 if not match(s):
1268 if not match(s):
1269 # ignore working copy, use old state if present
1269 # ignore working copy, use old state if present
1270 if s in oldstate:
1270 if s in oldstate:
1271 newstate[s] = oldstate[s]
1271 newstate[s] = oldstate[s]
1272 continue
1272 continue
1273 if not force:
1273 if not force:
1274 raise util.Abort(
1274 raise util.Abort(
1275 _("commit with new subrepo %s excluded") % s)
1275 _("commit with new subrepo %s excluded") % s)
1276 if wctx.sub(s).dirty(True):
1276 if wctx.sub(s).dirty(True):
1277 if not self.ui.configbool('ui', 'commitsubrepos'):
1277 if not self.ui.configbool('ui', 'commitsubrepos'):
1278 raise util.Abort(
1278 raise util.Abort(
1279 _("uncommitted changes in subrepo %s") % s,
1279 _("uncommitted changes in subrepo %s") % s,
1280 hint=_("use --subrepos for recursive commit"))
1280 hint=_("use --subrepos for recursive commit"))
1281 subs.append(s)
1281 subs.append(s)
1282 commitsubs.add(s)
1282 commitsubs.add(s)
1283 else:
1283 else:
1284 bs = wctx.sub(s).basestate()
1284 bs = wctx.sub(s).basestate()
1285 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1285 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1286 if oldstate.get(s, (None, None, None))[1] != bs:
1286 if oldstate.get(s, (None, None, None))[1] != bs:
1287 subs.append(s)
1287 subs.append(s)
1288
1288
1289 # check for removed subrepos
1289 # check for removed subrepos
1290 for p in wctx.parents():
1290 for p in wctx.parents():
1291 r = [s for s in p.substate if s not in newstate]
1291 r = [s for s in p.substate if s not in newstate]
1292 subs += [s for s in r if match(s)]
1292 subs += [s for s in r if match(s)]
1293 if subs:
1293 if subs:
1294 if (not match('.hgsub') and
1294 if (not match('.hgsub') and
1295 '.hgsub' in (wctx.modified() + wctx.added())):
1295 '.hgsub' in (wctx.modified() + wctx.added())):
1296 raise util.Abort(
1296 raise util.Abort(
1297 _("can't commit subrepos without .hgsub"))
1297 _("can't commit subrepos without .hgsub"))
1298 changes[0].insert(0, '.hgsubstate')
1298 changes[0].insert(0, '.hgsubstate')
1299
1299
1300 elif '.hgsub' in changes[2]:
1300 elif '.hgsub' in changes[2]:
1301 # clean up .hgsubstate when .hgsub is removed
1301 # clean up .hgsubstate when .hgsub is removed
1302 if ('.hgsubstate' in wctx and
1302 if ('.hgsubstate' in wctx and
1303 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1303 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1304 changes[2].insert(0, '.hgsubstate')
1304 changes[2].insert(0, '.hgsubstate')
1305
1305
1306 # make sure all explicit patterns are matched
1306 # make sure all explicit patterns are matched
1307 if not force and match.files():
1307 if not force and match.files():
1308 matched = set(changes[0] + changes[1] + changes[2])
1308 matched = set(changes[0] + changes[1] + changes[2])
1309
1309
1310 for f in match.files():
1310 for f in match.files():
1311 if f == '.' or f in matched or f in wctx.substate:
1311 if f == '.' or f in matched or f in wctx.substate:
1312 continue
1312 continue
1313 if f in changes[3]: # missing
1313 if f in changes[3]: # missing
1314 fail(f, _('file not found!'))
1314 fail(f, _('file not found!'))
1315 if f in vdirs: # visited directory
1315 if f in vdirs: # visited directory
1316 d = f + '/'
1316 d = f + '/'
1317 for mf in matched:
1317 for mf in matched:
1318 if mf.startswith(d):
1318 if mf.startswith(d):
1319 break
1319 break
1320 else:
1320 else:
1321 fail(f, _("no match under directory!"))
1321 fail(f, _("no match under directory!"))
1322 elif f not in self.dirstate:
1322 elif f not in self.dirstate:
1323 fail(f, _("file not tracked!"))
1323 fail(f, _("file not tracked!"))
1324
1324
1325 if (not force and not extra.get("close") and not merge
1325 if (not force and not extra.get("close") and not merge
1326 and not (changes[0] or changes[1] or changes[2])
1326 and not (changes[0] or changes[1] or changes[2])
1327 and wctx.branch() == wctx.p1().branch()):
1327 and wctx.branch() == wctx.p1().branch()):
1328 return None
1328 return None
1329
1329
1330 if merge and changes[3]:
1330 if merge and changes[3]:
1331 raise util.Abort(_("cannot commit merge with missing files"))
1331 raise util.Abort(_("cannot commit merge with missing files"))
1332
1332
1333 ms = mergemod.mergestate(self)
1333 ms = mergemod.mergestate(self)
1334 for f in changes[0]:
1334 for f in changes[0]:
1335 if f in ms and ms[f] == 'u':
1335 if f in ms and ms[f] == 'u':
1336 raise util.Abort(_("unresolved merge conflicts "
1336 raise util.Abort(_("unresolved merge conflicts "
1337 "(see hg help resolve)"))
1337 "(see hg help resolve)"))
1338
1338
1339 cctx = context.workingctx(self, text, user, date, extra, changes)
1339 cctx = context.workingctx(self, text, user, date, extra, changes)
1340 if editor:
1340 if editor:
1341 cctx._text = editor(self, cctx, subs)
1341 cctx._text = editor(self, cctx, subs)
1342 edited = (text != cctx._text)
1342 edited = (text != cctx._text)
1343
1343
1344 # commit subs and write new state
1344 # commit subs and write new state
1345 if subs:
1345 if subs:
1346 for s in sorted(commitsubs):
1346 for s in sorted(commitsubs):
1347 sub = wctx.sub(s)
1347 sub = wctx.sub(s)
1348 self.ui.status(_('committing subrepository %s\n') %
1348 self.ui.status(_('committing subrepository %s\n') %
1349 subrepo.subrelpath(sub))
1349 subrepo.subrelpath(sub))
1350 sr = sub.commit(cctx._text, user, date)
1350 sr = sub.commit(cctx._text, user, date)
1351 newstate[s] = (newstate[s][0], sr)
1351 newstate[s] = (newstate[s][0], sr)
1352 subrepo.writestate(self, newstate)
1352 subrepo.writestate(self, newstate)
1353
1353
1354 # Save commit message in case this transaction gets rolled back
1354 # Save commit message in case this transaction gets rolled back
1355 # (e.g. by a pretxncommit hook). Leave the content alone on
1355 # (e.g. by a pretxncommit hook). Leave the content alone on
1356 # the assumption that the user will use the same editor again.
1356 # the assumption that the user will use the same editor again.
1357 msgfn = self.savecommitmessage(cctx._text)
1357 msgfn = self.savecommitmessage(cctx._text)
1358
1358
1359 p1, p2 = self.dirstate.parents()
1359 p1, p2 = self.dirstate.parents()
1360 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1360 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1361 try:
1361 try:
1362 self.hook("precommit", throw=True, parent1=hookp1,
1362 self.hook("precommit", throw=True, parent1=hookp1,
1363 parent2=hookp2)
1363 parent2=hookp2)
1364 ret = self.commitctx(cctx, True)
1364 ret = self.commitctx(cctx, True)
1365 except: # re-raises
1365 except: # re-raises
1366 if edited:
1366 if edited:
1367 self.ui.write(
1367 self.ui.write(
1368 _('note: commit message saved in %s\n') % msgfn)
1368 _('note: commit message saved in %s\n') % msgfn)
1369 raise
1369 raise
1370
1370
1371 # update bookmarks, dirstate and mergestate
1371 # update bookmarks, dirstate and mergestate
1372 bookmarks.update(self, [p1, p2], ret)
1372 bookmarks.update(self, [p1, p2], ret)
1373 for f in changes[0] + changes[1]:
1373 for f in changes[0] + changes[1]:
1374 self.dirstate.normal(f)
1374 self.dirstate.normal(f)
1375 for f in changes[2]:
1375 for f in changes[2]:
1376 self.dirstate.drop(f)
1376 self.dirstate.drop(f)
1377 self.dirstate.setparents(ret)
1377 self.dirstate.setparents(ret)
1378 ms.reset()
1378 ms.reset()
1379 finally:
1379 finally:
1380 wlock.release()
1380 wlock.release()
1381
1381
1382 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1382 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1383 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1383 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1384 self._afterlock(commithook)
1384 self._afterlock(commithook)
1385 return ret
1385 return ret
1386
1386
1387 def commitctx(self, ctx, error=False):
1387 def commitctx(self, ctx, error=False):
1388 """Add a new revision to current repository.
1388 """Add a new revision to current repository.
1389 Revision information is passed via the context argument.
1389 Revision information is passed via the context argument.
1390 """
1390 """
1391
1391
1392 tr = lock = None
1392 tr = lock = None
1393 removed = list(ctx.removed())
1393 removed = list(ctx.removed())
1394 p1, p2 = ctx.p1(), ctx.p2()
1394 p1, p2 = ctx.p1(), ctx.p2()
1395 user = ctx.user()
1395 user = ctx.user()
1396
1396
1397 lock = self.lock()
1397 lock = self.lock()
1398 try:
1398 try:
1399 tr = self.transaction("commit")
1399 tr = self.transaction("commit")
1400 trp = weakref.proxy(tr)
1400 trp = weakref.proxy(tr)
1401
1401
1402 if ctx.files():
1402 if ctx.files():
1403 m1 = p1.manifest().copy()
1403 m1 = p1.manifest().copy()
1404 m2 = p2.manifest()
1404 m2 = p2.manifest()
1405
1405
1406 # check in files
1406 # check in files
1407 new = {}
1407 new = {}
1408 changed = []
1408 changed = []
1409 linkrev = len(self)
1409 linkrev = len(self)
1410 for f in sorted(ctx.modified() + ctx.added()):
1410 for f in sorted(ctx.modified() + ctx.added()):
1411 self.ui.note(f + "\n")
1411 self.ui.note(f + "\n")
1412 try:
1412 try:
1413 fctx = ctx[f]
1413 fctx = ctx[f]
1414 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1414 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1415 changed)
1415 changed)
1416 m1.set(f, fctx.flags())
1416 m1.set(f, fctx.flags())
1417 except OSError, inst:
1417 except OSError, inst:
1418 self.ui.warn(_("trouble committing %s!\n") % f)
1418 self.ui.warn(_("trouble committing %s!\n") % f)
1419 raise
1419 raise
1420 except IOError, inst:
1420 except IOError, inst:
1421 errcode = getattr(inst, 'errno', errno.ENOENT)
1421 errcode = getattr(inst, 'errno', errno.ENOENT)
1422 if error or errcode and errcode != errno.ENOENT:
1422 if error or errcode and errcode != errno.ENOENT:
1423 self.ui.warn(_("trouble committing %s!\n") % f)
1423 self.ui.warn(_("trouble committing %s!\n") % f)
1424 raise
1424 raise
1425 else:
1425 else:
1426 removed.append(f)
1426 removed.append(f)
1427
1427
1428 # update manifest
1428 # update manifest
1429 m1.update(new)
1429 m1.update(new)
1430 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1430 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1431 drop = [f for f in removed if f in m1]
1431 drop = [f for f in removed if f in m1]
1432 for f in drop:
1432 for f in drop:
1433 del m1[f]
1433 del m1[f]
1434 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1434 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1435 p2.manifestnode(), (new, drop))
1435 p2.manifestnode(), (new, drop))
1436 files = changed + removed
1436 files = changed + removed
1437 else:
1437 else:
1438 mn = p1.manifestnode()
1438 mn = p1.manifestnode()
1439 files = []
1439 files = []
1440
1440
1441 # update changelog
1441 # update changelog
1442 self.changelog.delayupdate()
1442 self.changelog.delayupdate()
1443 n = self.changelog.add(mn, files, ctx.description(),
1443 n = self.changelog.add(mn, files, ctx.description(),
1444 trp, p1.node(), p2.node(),
1444 trp, p1.node(), p2.node(),
1445 user, ctx.date(), ctx.extra().copy())
1445 user, ctx.date(), ctx.extra().copy())
1446 p = lambda: self.changelog.writepending() and self.root or ""
1446 p = lambda: self.changelog.writepending() and self.root or ""
1447 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1447 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1448 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1448 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1449 parent2=xp2, pending=p)
1449 parent2=xp2, pending=p)
1450 self.changelog.finalize(trp)
1450 self.changelog.finalize(trp)
1451 # set the new commit is proper phase
1451 # set the new commit is proper phase
1452 targetphase = phases.newcommitphase(self.ui)
1452 targetphase = phases.newcommitphase(self.ui)
1453 if targetphase:
1453 if targetphase:
1454 # retract boundary do not alter parent changeset.
1454 # retract boundary do not alter parent changeset.
1455 # if a parent have higher the resulting phase will
1455 # if a parent have higher the resulting phase will
1456 # be compliant anyway
1456 # be compliant anyway
1457 #
1457 #
1458 # if minimal phase was 0 we don't need to retract anything
1458 # if minimal phase was 0 we don't need to retract anything
1459 phases.retractboundary(self, targetphase, [n])
1459 phases.retractboundary(self, targetphase, [n])
1460 tr.close()
1460 tr.close()
1461 self.updatebranchcache()
1461 self.updatebranchcache()
1462 return n
1462 return n
1463 finally:
1463 finally:
1464 if tr:
1464 if tr:
1465 tr.release()
1465 tr.release()
1466 lock.release()
1466 lock.release()
1467
1467
1468 def destroyed(self, newheadnodes=None):
1468 def destroyed(self, newheadnodes=None):
1469 '''Inform the repository that nodes have been destroyed.
1469 '''Inform the repository that nodes have been destroyed.
1470 Intended for use by strip and rollback, so there's a common
1470 Intended for use by strip and rollback, so there's a common
1471 place for anything that has to be done after destroying history.
1471 place for anything that has to be done after destroying history.
1472
1472
1473 If you know the branchheadcache was uptodate before nodes were removed
1473 If you know the branchheadcache was uptodate before nodes were removed
1474 and you also know the set of candidate new heads that may have resulted
1474 and you also know the set of candidate new heads that may have resulted
1475 from the destruction, you can set newheadnodes. This will enable the
1475 from the destruction, you can set newheadnodes. This will enable the
1476 code to update the branchheads cache, rather than having future code
1476 code to update the branchheads cache, rather than having future code
1477 decide it's invalid and regenrating it from scratch.
1477 decide it's invalid and regenrating it from scratch.
1478 '''
1478 '''
1479 # If we have info, newheadnodes, on how to update the branch cache, do
1479 # If we have info, newheadnodes, on how to update the branch cache, do
1480 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1480 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1481 # will be caught the next time it is read.
1481 # will be caught the next time it is read.
1482 if newheadnodes:
1482 if newheadnodes:
1483 tiprev = len(self) - 1
1483 tiprev = len(self) - 1
1484 ctxgen = (self[node] for node in newheadnodes
1484 ctxgen = (self[node] for node in newheadnodes
1485 if self.changelog.hasnode(node))
1485 if self.changelog.hasnode(node))
1486 self._updatebranchcache(self._branchcache, ctxgen)
1486 self._updatebranchcache(self._branchcache, ctxgen)
1487 self._writebranchcache(self._branchcache, self.changelog.tip(),
1487 self._writebranchcache(self._branchcache, self.changelog.tip(),
1488 tiprev)
1488 tiprev)
1489
1489
1490 # Ensure the persistent tag cache is updated. Doing it now
1490 # Ensure the persistent tag cache is updated. Doing it now
1491 # means that the tag cache only has to worry about destroyed
1491 # means that the tag cache only has to worry about destroyed
1492 # heads immediately after a strip/rollback. That in turn
1492 # heads immediately after a strip/rollback. That in turn
1493 # guarantees that "cachetip == currenttip" (comparing both rev
1493 # guarantees that "cachetip == currenttip" (comparing both rev
1494 # and node) always means no nodes have been added or destroyed.
1494 # and node) always means no nodes have been added or destroyed.
1495
1495
1496 # XXX this is suboptimal when qrefresh'ing: we strip the current
1496 # XXX this is suboptimal when qrefresh'ing: we strip the current
1497 # head, refresh the tag cache, then immediately add a new head.
1497 # head, refresh the tag cache, then immediately add a new head.
1498 # But I think doing it this way is necessary for the "instant
1498 # But I think doing it this way is necessary for the "instant
1499 # tag cache retrieval" case to work.
1499 # tag cache retrieval" case to work.
1500 self.invalidatecaches()
1500 self.invalidatecaches()
1501
1501
1502 def walk(self, match, node=None):
1502 def walk(self, match, node=None):
1503 '''
1503 '''
1504 walk recursively through the directory tree or a given
1504 walk recursively through the directory tree or a given
1505 changeset, finding all files matched by the match
1505 changeset, finding all files matched by the match
1506 function
1506 function
1507 '''
1507 '''
1508 return self[node].walk(match)
1508 return self[node].walk(match)
1509
1509
1510 def status(self, node1='.', node2=None, match=None,
1510 def status(self, node1='.', node2=None, match=None,
1511 ignored=False, clean=False, unknown=False,
1511 ignored=False, clean=False, unknown=False,
1512 listsubrepos=False):
1512 listsubrepos=False):
1513 """return status of files between two nodes or node and working
1513 """return status of files between two nodes or node and working
1514 directory.
1514 directory.
1515
1515
1516 If node1 is None, use the first dirstate parent instead.
1516 If node1 is None, use the first dirstate parent instead.
1517 If node2 is None, compare node1 with working directory.
1517 If node2 is None, compare node1 with working directory.
1518 """
1518 """
1519
1519
1520 def mfmatches(ctx):
1520 def mfmatches(ctx):
1521 mf = ctx.manifest().copy()
1521 mf = ctx.manifest().copy()
1522 if match.always():
1522 if match.always():
1523 return mf
1523 return mf
1524 for fn in mf.keys():
1524 for fn in mf.keys():
1525 if not match(fn):
1525 if not match(fn):
1526 del mf[fn]
1526 del mf[fn]
1527 return mf
1527 return mf
1528
1528
1529 if isinstance(node1, context.changectx):
1529 if isinstance(node1, context.changectx):
1530 ctx1 = node1
1530 ctx1 = node1
1531 else:
1531 else:
1532 ctx1 = self[node1]
1532 ctx1 = self[node1]
1533 if isinstance(node2, context.changectx):
1533 if isinstance(node2, context.changectx):
1534 ctx2 = node2
1534 ctx2 = node2
1535 else:
1535 else:
1536 ctx2 = self[node2]
1536 ctx2 = self[node2]
1537
1537
1538 working = ctx2.rev() is None
1538 working = ctx2.rev() is None
1539 parentworking = working and ctx1 == self['.']
1539 parentworking = working and ctx1 == self['.']
1540 match = match or matchmod.always(self.root, self.getcwd())
1540 match = match or matchmod.always(self.root, self.getcwd())
1541 listignored, listclean, listunknown = ignored, clean, unknown
1541 listignored, listclean, listunknown = ignored, clean, unknown
1542
1542
1543 # load earliest manifest first for caching reasons
1543 # load earliest manifest first for caching reasons
1544 if not working and ctx2.rev() < ctx1.rev():
1544 if not working and ctx2.rev() < ctx1.rev():
1545 ctx2.manifest()
1545 ctx2.manifest()
1546
1546
1547 if not parentworking:
1547 if not parentworking:
1548 def bad(f, msg):
1548 def bad(f, msg):
1549 # 'f' may be a directory pattern from 'match.files()',
1549 # 'f' may be a directory pattern from 'match.files()',
1550 # so 'f not in ctx1' is not enough
1550 # so 'f not in ctx1' is not enough
1551 if f not in ctx1 and f not in ctx1.dirs():
1551 if f not in ctx1 and f not in ctx1.dirs():
1552 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1552 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1553 match.bad = bad
1553 match.bad = bad
1554
1554
1555 if working: # we need to scan the working dir
1555 if working: # we need to scan the working dir
1556 subrepos = []
1556 subrepos = []
1557 if '.hgsub' in self.dirstate:
1557 if '.hgsub' in self.dirstate:
1558 subrepos = ctx2.substate.keys()
1558 subrepos = ctx2.substate.keys()
1559 s = self.dirstate.status(match, subrepos, listignored,
1559 s = self.dirstate.status(match, subrepos, listignored,
1560 listclean, listunknown)
1560 listclean, listunknown)
1561 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1561 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1562
1562
1563 # check for any possibly clean files
1563 # check for any possibly clean files
1564 if parentworking and cmp:
1564 if parentworking and cmp:
1565 fixup = []
1565 fixup = []
1566 # do a full compare of any files that might have changed
1566 # do a full compare of any files that might have changed
1567 for f in sorted(cmp):
1567 for f in sorted(cmp):
1568 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1568 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1569 or ctx1[f].cmp(ctx2[f])):
1569 or ctx1[f].cmp(ctx2[f])):
1570 modified.append(f)
1570 modified.append(f)
1571 else:
1571 else:
1572 fixup.append(f)
1572 fixup.append(f)
1573
1573
1574 # update dirstate for files that are actually clean
1574 # update dirstate for files that are actually clean
1575 if fixup:
1575 if fixup:
1576 if listclean:
1576 if listclean:
1577 clean += fixup
1577 clean += fixup
1578
1578
1579 try:
1579 try:
1580 # updating the dirstate is optional
1580 # updating the dirstate is optional
1581 # so we don't wait on the lock
1581 # so we don't wait on the lock
1582 wlock = self.wlock(False)
1582 wlock = self.wlock(False)
1583 try:
1583 try:
1584 for f in fixup:
1584 for f in fixup:
1585 self.dirstate.normal(f)
1585 self.dirstate.normal(f)
1586 finally:
1586 finally:
1587 wlock.release()
1587 wlock.release()
1588 except error.LockError:
1588 except error.LockError:
1589 pass
1589 pass
1590
1590
1591 if not parentworking:
1591 if not parentworking:
1592 mf1 = mfmatches(ctx1)
1592 mf1 = mfmatches(ctx1)
1593 if working:
1593 if working:
1594 # we are comparing working dir against non-parent
1594 # we are comparing working dir against non-parent
1595 # generate a pseudo-manifest for the working dir
1595 # generate a pseudo-manifest for the working dir
1596 mf2 = mfmatches(self['.'])
1596 mf2 = mfmatches(self['.'])
1597 for f in cmp + modified + added:
1597 for f in cmp + modified + added:
1598 mf2[f] = None
1598 mf2[f] = None
1599 mf2.set(f, ctx2.flags(f))
1599 mf2.set(f, ctx2.flags(f))
1600 for f in removed:
1600 for f in removed:
1601 if f in mf2:
1601 if f in mf2:
1602 del mf2[f]
1602 del mf2[f]
1603 else:
1603 else:
1604 # we are comparing two revisions
1604 # we are comparing two revisions
1605 deleted, unknown, ignored = [], [], []
1605 deleted, unknown, ignored = [], [], []
1606 mf2 = mfmatches(ctx2)
1606 mf2 = mfmatches(ctx2)
1607
1607
1608 modified, added, clean = [], [], []
1608 modified, added, clean = [], [], []
1609 withflags = mf1.withflags() | mf2.withflags()
1609 withflags = mf1.withflags() | mf2.withflags()
1610 for fn in mf2:
1610 for fn in mf2:
1611 if fn in mf1:
1611 if fn in mf1:
1612 if (fn not in deleted and
1612 if (fn not in deleted and
1613 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1613 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1614 (mf1[fn] != mf2[fn] and
1614 (mf1[fn] != mf2[fn] and
1615 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1615 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1616 modified.append(fn)
1616 modified.append(fn)
1617 elif listclean:
1617 elif listclean:
1618 clean.append(fn)
1618 clean.append(fn)
1619 del mf1[fn]
1619 del mf1[fn]
1620 elif fn not in deleted:
1620 elif fn not in deleted:
1621 added.append(fn)
1621 added.append(fn)
1622 removed = mf1.keys()
1622 removed = mf1.keys()
1623
1623
1624 if working and modified and not self.dirstate._checklink:
1624 if working and modified and not self.dirstate._checklink:
1625 # Symlink placeholders may get non-symlink-like contents
1625 # Symlink placeholders may get non-symlink-like contents
1626 # via user error or dereferencing by NFS or Samba servers,
1626 # via user error or dereferencing by NFS or Samba servers,
1627 # so we filter out any placeholders that don't look like a
1627 # so we filter out any placeholders that don't look like a
1628 # symlink
1628 # symlink
1629 sane = []
1629 sane = []
1630 for f in modified:
1630 for f in modified:
1631 if ctx2.flags(f) == 'l':
1631 if ctx2.flags(f) == 'l':
1632 d = ctx2[f].data()
1632 d = ctx2[f].data()
1633 if len(d) >= 1024 or '\n' in d or util.binary(d):
1633 if len(d) >= 1024 or '\n' in d or util.binary(d):
1634 self.ui.debug('ignoring suspect symlink placeholder'
1634 self.ui.debug('ignoring suspect symlink placeholder'
1635 ' "%s"\n' % f)
1635 ' "%s"\n' % f)
1636 continue
1636 continue
1637 sane.append(f)
1637 sane.append(f)
1638 modified = sane
1638 modified = sane
1639
1639
1640 r = modified, added, removed, deleted, unknown, ignored, clean
1640 r = modified, added, removed, deleted, unknown, ignored, clean
1641
1641
1642 if listsubrepos:
1642 if listsubrepos:
1643 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1643 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1644 if working:
1644 if working:
1645 rev2 = None
1645 rev2 = None
1646 else:
1646 else:
1647 rev2 = ctx2.substate[subpath][1]
1647 rev2 = ctx2.substate[subpath][1]
1648 try:
1648 try:
1649 submatch = matchmod.narrowmatcher(subpath, match)
1649 submatch = matchmod.narrowmatcher(subpath, match)
1650 s = sub.status(rev2, match=submatch, ignored=listignored,
1650 s = sub.status(rev2, match=submatch, ignored=listignored,
1651 clean=listclean, unknown=listunknown,
1651 clean=listclean, unknown=listunknown,
1652 listsubrepos=True)
1652 listsubrepos=True)
1653 for rfiles, sfiles in zip(r, s):
1653 for rfiles, sfiles in zip(r, s):
1654 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1654 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1655 except error.LookupError:
1655 except error.LookupError:
1656 self.ui.status(_("skipping missing subrepository: %s\n")
1656 self.ui.status(_("skipping missing subrepository: %s\n")
1657 % subpath)
1657 % subpath)
1658
1658
1659 for l in r:
1659 for l in r:
1660 l.sort()
1660 l.sort()
1661 return r
1661 return r
1662
1662
1663 def heads(self, start=None):
1663 def heads(self, start=None):
1664 heads = self.changelog.heads(start)
1664 heads = self.changelog.heads(start)
1665 # sort the output in rev descending order
1665 # sort the output in rev descending order
1666 return sorted(heads, key=self.changelog.rev, reverse=True)
1666 return sorted(heads, key=self.changelog.rev, reverse=True)
1667
1667
1668 def branchheads(self, branch=None, start=None, closed=False):
1668 def branchheads(self, branch=None, start=None, closed=False):
1669 '''return a (possibly filtered) list of heads for the given branch
1669 '''return a (possibly filtered) list of heads for the given branch
1670
1670
1671 Heads are returned in topological order, from newest to oldest.
1671 Heads are returned in topological order, from newest to oldest.
1672 If branch is None, use the dirstate branch.
1672 If branch is None, use the dirstate branch.
1673 If start is not None, return only heads reachable from start.
1673 If start is not None, return only heads reachable from start.
1674 If closed is True, return heads that are marked as closed as well.
1674 If closed is True, return heads that are marked as closed as well.
1675 '''
1675 '''
1676 if branch is None:
1676 if branch is None:
1677 branch = self[None].branch()
1677 branch = self[None].branch()
1678 branches = self.branchmap()
1678 branches = self.branchmap()
1679 if branch not in branches:
1679 if branch not in branches:
1680 return []
1680 return []
1681 # the cache returns heads ordered lowest to highest
1681 # the cache returns heads ordered lowest to highest
1682 bheads = list(reversed(branches[branch]))
1682 bheads = list(reversed(branches[branch]))
1683 if start is not None:
1683 if start is not None:
1684 # filter out the heads that cannot be reached from startrev
1684 # filter out the heads that cannot be reached from startrev
1685 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1685 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1686 bheads = [h for h in bheads if h in fbheads]
1686 bheads = [h for h in bheads if h in fbheads]
1687 if not closed:
1687 if not closed:
1688 bheads = [h for h in bheads if not self[h].closesbranch()]
1688 bheads = [h for h in bheads if not self[h].closesbranch()]
1689 return bheads
1689 return bheads
1690
1690
1691 def branches(self, nodes):
1691 def branches(self, nodes):
1692 if not nodes:
1692 if not nodes:
1693 nodes = [self.changelog.tip()]
1693 nodes = [self.changelog.tip()]
1694 b = []
1694 b = []
1695 for n in nodes:
1695 for n in nodes:
1696 t = n
1696 t = n
1697 while True:
1697 while True:
1698 p = self.changelog.parents(n)
1698 p = self.changelog.parents(n)
1699 if p[1] != nullid or p[0] == nullid:
1699 if p[1] != nullid or p[0] == nullid:
1700 b.append((t, n, p[0], p[1]))
1700 b.append((t, n, p[0], p[1]))
1701 break
1701 break
1702 n = p[0]
1702 n = p[0]
1703 return b
1703 return b
1704
1704
1705 def between(self, pairs):
1705 def between(self, pairs):
1706 r = []
1706 r = []
1707
1707
1708 for top, bottom in pairs:
1708 for top, bottom in pairs:
1709 n, l, i = top, [], 0
1709 n, l, i = top, [], 0
1710 f = 1
1710 f = 1
1711
1711
1712 while n != bottom and n != nullid:
1712 while n != bottom and n != nullid:
1713 p = self.changelog.parents(n)[0]
1713 p = self.changelog.parents(n)[0]
1714 if i == f:
1714 if i == f:
1715 l.append(n)
1715 l.append(n)
1716 f = f * 2
1716 f = f * 2
1717 n = p
1717 n = p
1718 i += 1
1718 i += 1
1719
1719
1720 r.append(l)
1720 r.append(l)
1721
1721
1722 return r
1722 return r
1723
1723
1724 def pull(self, remote, heads=None, force=False):
1724 def pull(self, remote, heads=None, force=False):
1725 # don't open transaction for nothing or you break future useful
1725 # don't open transaction for nothing or you break future useful
1726 # rollback call
1726 # rollback call
1727 tr = None
1727 tr = None
1728 trname = 'pull\n' + util.hidepassword(remote.url())
1728 trname = 'pull\n' + util.hidepassword(remote.url())
1729 lock = self.lock()
1729 lock = self.lock()
1730 try:
1730 try:
1731 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1731 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1732 force=force)
1732 force=force)
1733 common, fetch, rheads = tmp
1733 common, fetch, rheads = tmp
1734 if not fetch:
1734 if not fetch:
1735 self.ui.status(_("no changes found\n"))
1735 self.ui.status(_("no changes found\n"))
1736 added = []
1736 added = []
1737 result = 0
1737 result = 0
1738 else:
1738 else:
1739 tr = self.transaction(trname)
1739 tr = self.transaction(trname)
1740 if heads is None and list(common) == [nullid]:
1740 if heads is None and list(common) == [nullid]:
1741 self.ui.status(_("requesting all changes\n"))
1741 self.ui.status(_("requesting all changes\n"))
1742 elif heads is None and remote.capable('changegroupsubset'):
1742 elif heads is None and remote.capable('changegroupsubset'):
1743 # issue1320, avoid a race if remote changed after discovery
1743 # issue1320, avoid a race if remote changed after discovery
1744 heads = rheads
1744 heads = rheads
1745
1745
1746 if remote.capable('getbundle'):
1746 if remote.capable('getbundle'):
1747 cg = remote.getbundle('pull', common=common,
1747 cg = remote.getbundle('pull', common=common,
1748 heads=heads or rheads)
1748 heads=heads or rheads)
1749 elif heads is None:
1749 elif heads is None:
1750 cg = remote.changegroup(fetch, 'pull')
1750 cg = remote.changegroup(fetch, 'pull')
1751 elif not remote.capable('changegroupsubset'):
1751 elif not remote.capable('changegroupsubset'):
1752 raise util.Abort(_("partial pull cannot be done because "
1752 raise util.Abort(_("partial pull cannot be done because "
1753 "other repository doesn't support "
1753 "other repository doesn't support "
1754 "changegroupsubset."))
1754 "changegroupsubset."))
1755 else:
1755 else:
1756 cg = remote.changegroupsubset(fetch, heads, 'pull')
1756 cg = remote.changegroupsubset(fetch, heads, 'pull')
1757 clstart = len(self.changelog)
1757 clstart = len(self.changelog)
1758 result = self.addchangegroup(cg, 'pull', remote.url())
1758 result = self.addchangegroup(cg, 'pull', remote.url())
1759 clend = len(self.changelog)
1759 clend = len(self.changelog)
1760 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1760 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1761
1761
1762 # compute target subset
1762 # compute target subset
1763 if heads is None:
1763 if heads is None:
1764 # We pulled every thing possible
1764 # We pulled every thing possible
1765 # sync on everything common
1765 # sync on everything common
1766 subset = common + added
1766 subset = common + added
1767 else:
1767 else:
1768 # We pulled a specific subset
1768 # We pulled a specific subset
1769 # sync on this subset
1769 # sync on this subset
1770 subset = heads
1770 subset = heads
1771
1771
1772 # Get remote phases data from remote
1772 # Get remote phases data from remote
1773 remotephases = remote.listkeys('phases')
1773 remotephases = remote.listkeys('phases')
1774 publishing = bool(remotephases.get('publishing', False))
1774 publishing = bool(remotephases.get('publishing', False))
1775 if remotephases and not publishing:
1775 if remotephases and not publishing:
1776 # remote is new and unpublishing
1776 # remote is new and unpublishing
1777 pheads, _dr = phases.analyzeremotephases(self, subset,
1777 pheads, _dr = phases.analyzeremotephases(self, subset,
1778 remotephases)
1778 remotephases)
1779 phases.advanceboundary(self, phases.public, pheads)
1779 phases.advanceboundary(self, phases.public, pheads)
1780 phases.advanceboundary(self, phases.draft, subset)
1780 phases.advanceboundary(self, phases.draft, subset)
1781 else:
1781 else:
1782 # Remote is old or publishing all common changesets
1782 # Remote is old or publishing all common changesets
1783 # should be seen as public
1783 # should be seen as public
1784 phases.advanceboundary(self, phases.public, subset)
1784 phases.advanceboundary(self, phases.public, subset)
1785
1785
1786 remoteobs = remote.listkeys('obsolete')
1786 remoteobs = remote.listkeys('obsolete')
1787 if 'dump' in remoteobs:
1787 if 'dump' in remoteobs:
1788 if tr is None:
1788 if tr is None:
1789 tr = self.transaction(trname)
1789 tr = self.transaction(trname)
1790 data = base85.b85decode(remoteobs['dump'])
1790 data = base85.b85decode(remoteobs['dump'])
1791 self.obsstore.mergemarkers(tr, data)
1791 self.obsstore.mergemarkers(tr, data)
1792 if tr is not None:
1792 if tr is not None:
1793 tr.close()
1793 tr.close()
1794 finally:
1794 finally:
1795 if tr is not None:
1795 if tr is not None:
1796 tr.release()
1796 tr.release()
1797 lock.release()
1797 lock.release()
1798
1798
1799 return result
1799 return result
1800
1800
1801 def checkpush(self, force, revs):
1801 def checkpush(self, force, revs):
1802 """Extensions can override this function if additional checks have
1802 """Extensions can override this function if additional checks have
1803 to be performed before pushing, or call it if they override push
1803 to be performed before pushing, or call it if they override push
1804 command.
1804 command.
1805 """
1805 """
1806 pass
1806 pass
1807
1807
1808 def push(self, remote, force=False, revs=None, newbranch=False):
1808 def push(self, remote, force=False, revs=None, newbranch=False):
1809 '''Push outgoing changesets (limited by revs) from the current
1809 '''Push outgoing changesets (limited by revs) from the current
1810 repository to remote. Return an integer:
1810 repository to remote. Return an integer:
1811 - None means nothing to push
1811 - None means nothing to push
1812 - 0 means HTTP error
1812 - 0 means HTTP error
1813 - 1 means we pushed and remote head count is unchanged *or*
1813 - 1 means we pushed and remote head count is unchanged *or*
1814 we have outgoing changesets but refused to push
1814 we have outgoing changesets but refused to push
1815 - other values as described by addchangegroup()
1815 - other values as described by addchangegroup()
1816 '''
1816 '''
1817 # there are two ways to push to remote repo:
1817 # there are two ways to push to remote repo:
1818 #
1818 #
1819 # addchangegroup assumes local user can lock remote
1819 # addchangegroup assumes local user can lock remote
1820 # repo (local filesystem, old ssh servers).
1820 # repo (local filesystem, old ssh servers).
1821 #
1821 #
1822 # unbundle assumes local user cannot lock remote repo (new ssh
1822 # unbundle assumes local user cannot lock remote repo (new ssh
1823 # servers, http servers).
1823 # servers, http servers).
1824
1824
1825 if not remote.canpush():
1825 if not remote.canpush():
1826 raise util.Abort(_("destination does not support push"))
1826 raise util.Abort(_("destination does not support push"))
1827 # get local lock as we might write phase data
1827 # get local lock as we might write phase data
1828 locallock = self.lock()
1828 locallock = self.lock()
1829 try:
1829 try:
1830 self.checkpush(force, revs)
1830 self.checkpush(force, revs)
1831 lock = None
1831 lock = None
1832 unbundle = remote.capable('unbundle')
1832 unbundle = remote.capable('unbundle')
1833 if not unbundle:
1833 if not unbundle:
1834 lock = remote.lock()
1834 lock = remote.lock()
1835 try:
1835 try:
1836 # discovery
1836 # discovery
1837 fci = discovery.findcommonincoming
1837 fci = discovery.findcommonincoming
1838 commoninc = fci(self, remote, force=force)
1838 commoninc = fci(self, remote, force=force)
1839 common, inc, remoteheads = commoninc
1839 common, inc, remoteheads = commoninc
1840 fco = discovery.findcommonoutgoing
1840 fco = discovery.findcommonoutgoing
1841 outgoing = fco(self, remote, onlyheads=revs,
1841 outgoing = fco(self, remote, onlyheads=revs,
1842 commoninc=commoninc, force=force)
1842 commoninc=commoninc, force=force)
1843
1843
1844
1844
1845 if not outgoing.missing:
1845 if not outgoing.missing:
1846 # nothing to push
1846 # nothing to push
1847 scmutil.nochangesfound(self.ui, self, outgoing.excluded)
1847 scmutil.nochangesfound(self.ui, self, outgoing.excluded)
1848 ret = None
1848 ret = None
1849 else:
1849 else:
1850 # something to push
1850 # something to push
1851 if not force:
1851 if not force:
1852 # if self.obsstore == False --> no obsolete
1852 # if self.obsstore == False --> no obsolete
1853 # then, save the iteration
1853 # then, save the iteration
1854 if self.obsstore:
1854 if self.obsstore:
1855 # this message are here for 80 char limit reason
1855 # this message are here for 80 char limit reason
1856 mso = _("push includes an obsolete changeset: %s!")
1856 mso = _("push includes an obsolete changeset: %s!")
1857 msu = _("push includes an unstable changeset: %s!")
1857 msu = _("push includes an unstable changeset: %s!")
1858 # If we are to push if there is at least one
1858 # If we are to push if there is at least one
1859 # obsolete or unstable changeset in missing, at
1859 # obsolete or unstable changeset in missing, at
1860 # least one of the missinghead will be obsolete or
1860 # least one of the missinghead will be obsolete or
1861 # unstable. So checking heads only is ok
1861 # unstable. So checking heads only is ok
1862 for node in outgoing.missingheads:
1862 for node in outgoing.missingheads:
1863 ctx = self[node]
1863 ctx = self[node]
1864 if ctx.obsolete():
1864 if ctx.obsolete():
1865 raise util.Abort(_(mso) % ctx)
1865 raise util.Abort(_(mso) % ctx)
1866 elif ctx.unstable():
1866 elif ctx.unstable():
1867 raise util.Abort(_(msu) % ctx)
1867 raise util.Abort(_(msu) % ctx)
1868 discovery.checkheads(self, remote, outgoing,
1868 discovery.checkheads(self, remote, outgoing,
1869 remoteheads, newbranch,
1869 remoteheads, newbranch,
1870 bool(inc))
1870 bool(inc))
1871
1871
1872 # create a changegroup from local
1872 # create a changegroup from local
1873 if revs is None and not outgoing.excluded:
1873 if revs is None and not outgoing.excluded:
1874 # push everything,
1874 # push everything,
1875 # use the fast path, no race possible on push
1875 # use the fast path, no race possible on push
1876 cg = self._changegroup(outgoing.missing, 'push')
1876 cg = self._changegroup(outgoing.missing, 'push')
1877 else:
1877 else:
1878 cg = self.getlocalbundle('push', outgoing)
1878 cg = self.getlocalbundle('push', outgoing)
1879
1879
1880 # apply changegroup to remote
1880 # apply changegroup to remote
1881 if unbundle:
1881 if unbundle:
1882 # local repo finds heads on server, finds out what
1882 # local repo finds heads on server, finds out what
1883 # revs it must push. once revs transferred, if server
1883 # revs it must push. once revs transferred, if server
1884 # finds it has different heads (someone else won
1884 # finds it has different heads (someone else won
1885 # commit/push race), server aborts.
1885 # commit/push race), server aborts.
1886 if force:
1886 if force:
1887 remoteheads = ['force']
1887 remoteheads = ['force']
1888 # ssh: return remote's addchangegroup()
1888 # ssh: return remote's addchangegroup()
1889 # http: return remote's addchangegroup() or 0 for error
1889 # http: return remote's addchangegroup() or 0 for error
1890 ret = remote.unbundle(cg, remoteheads, 'push')
1890 ret = remote.unbundle(cg, remoteheads, 'push')
1891 else:
1891 else:
1892 # we return an integer indicating remote head count
1892 # we return an integer indicating remote head count
1893 # change
1893 # change
1894 ret = remote.addchangegroup(cg, 'push', self.url())
1894 ret = remote.addchangegroup(cg, 'push', self.url())
1895
1895
1896 if ret:
1896 if ret:
1897 # push succeed, synchonize target of the push
1897 # push succeed, synchonize target of the push
1898 cheads = outgoing.missingheads
1898 cheads = outgoing.missingheads
1899 elif revs is None:
1899 elif revs is None:
1900 # All out push fails. synchronize all common
1900 # All out push fails. synchronize all common
1901 cheads = outgoing.commonheads
1901 cheads = outgoing.commonheads
1902 else:
1902 else:
1903 # I want cheads = heads(::missingheads and ::commonheads)
1903 # I want cheads = heads(::missingheads and ::commonheads)
1904 # (missingheads is revs with secret changeset filtered out)
1904 # (missingheads is revs with secret changeset filtered out)
1905 #
1905 #
1906 # This can be expressed as:
1906 # This can be expressed as:
1907 # cheads = ( (missingheads and ::commonheads)
1907 # cheads = ( (missingheads and ::commonheads)
1908 # + (commonheads and ::missingheads))"
1908 # + (commonheads and ::missingheads))"
1909 # )
1909 # )
1910 #
1910 #
1911 # while trying to push we already computed the following:
1911 # while trying to push we already computed the following:
1912 # common = (::commonheads)
1912 # common = (::commonheads)
1913 # missing = ((commonheads::missingheads) - commonheads)
1913 # missing = ((commonheads::missingheads) - commonheads)
1914 #
1914 #
1915 # We can pick:
1915 # We can pick:
1916 # * missingheads part of comon (::commonheads)
1916 # * missingheads part of comon (::commonheads)
1917 common = set(outgoing.common)
1917 common = set(outgoing.common)
1918 cheads = [node for node in revs if node in common]
1918 cheads = [node for node in revs if node in common]
1919 # and
1919 # and
1920 # * commonheads parents on missing
1920 # * commonheads parents on missing
1921 revset = self.set('%ln and parents(roots(%ln))',
1921 revset = self.set('%ln and parents(roots(%ln))',
1922 outgoing.commonheads,
1922 outgoing.commonheads,
1923 outgoing.missing)
1923 outgoing.missing)
1924 cheads.extend(c.node() for c in revset)
1924 cheads.extend(c.node() for c in revset)
1925 # even when we don't push, exchanging phase data is useful
1925 # even when we don't push, exchanging phase data is useful
1926 remotephases = remote.listkeys('phases')
1926 remotephases = remote.listkeys('phases')
1927 if not remotephases: # old server or public only repo
1927 if not remotephases: # old server or public only repo
1928 phases.advanceboundary(self, phases.public, cheads)
1928 phases.advanceboundary(self, phases.public, cheads)
1929 # don't push any phase data as there is nothing to push
1929 # don't push any phase data as there is nothing to push
1930 else:
1930 else:
1931 ana = phases.analyzeremotephases(self, cheads, remotephases)
1931 ana = phases.analyzeremotephases(self, cheads, remotephases)
1932 pheads, droots = ana
1932 pheads, droots = ana
1933 ### Apply remote phase on local
1933 ### Apply remote phase on local
1934 if remotephases.get('publishing', False):
1934 if remotephases.get('publishing', False):
1935 phases.advanceboundary(self, phases.public, cheads)
1935 phases.advanceboundary(self, phases.public, cheads)
1936 else: # publish = False
1936 else: # publish = False
1937 phases.advanceboundary(self, phases.public, pheads)
1937 phases.advanceboundary(self, phases.public, pheads)
1938 phases.advanceboundary(self, phases.draft, cheads)
1938 phases.advanceboundary(self, phases.draft, cheads)
1939 ### Apply local phase on remote
1939 ### Apply local phase on remote
1940
1940
1941 # Get the list of all revs draft on remote by public here.
1941 # Get the list of all revs draft on remote by public here.
1942 # XXX Beware that revset break if droots is not strictly
1942 # XXX Beware that revset break if droots is not strictly
1943 # XXX root we may want to ensure it is but it is costly
1943 # XXX root we may want to ensure it is but it is costly
1944 outdated = self.set('heads((%ln::%ln) and public())',
1944 outdated = self.set('heads((%ln::%ln) and public())',
1945 droots, cheads)
1945 droots, cheads)
1946 for newremotehead in outdated:
1946 for newremotehead in outdated:
1947 r = remote.pushkey('phases',
1947 r = remote.pushkey('phases',
1948 newremotehead.hex(),
1948 newremotehead.hex(),
1949 str(phases.draft),
1949 str(phases.draft),
1950 str(phases.public))
1950 str(phases.public))
1951 if not r:
1951 if not r:
1952 self.ui.warn(_('updating %s to public failed!\n')
1952 self.ui.warn(_('updating %s to public failed!\n')
1953 % newremotehead)
1953 % newremotehead)
1954 if ('obsolete' in remote.listkeys('namespaces')
1954 if (self.obsstore and
1955 and self.obsstore):
1955 'obsolete' in remote.listkeys('namespaces')):
1956 data = self.listkeys('obsolete')['dump']
1956 data = self.listkeys('obsolete')['dump']
1957 r = remote.pushkey('obsolete', 'dump', '', data)
1957 r = remote.pushkey('obsolete', 'dump', '', data)
1958 if not r:
1958 if not r:
1959 self.ui.warn(_('failed to push obsolete markers!\n'))
1959 self.ui.warn(_('failed to push obsolete markers!\n'))
1960 finally:
1960 finally:
1961 if lock is not None:
1961 if lock is not None:
1962 lock.release()
1962 lock.release()
1963 finally:
1963 finally:
1964 locallock.release()
1964 locallock.release()
1965
1965
1966 self.ui.debug("checking for updated bookmarks\n")
1966 self.ui.debug("checking for updated bookmarks\n")
1967 rb = remote.listkeys('bookmarks')
1967 rb = remote.listkeys('bookmarks')
1968 for k in rb.keys():
1968 for k in rb.keys():
1969 if k in self._bookmarks:
1969 if k in self._bookmarks:
1970 nr, nl = rb[k], hex(self._bookmarks[k])
1970 nr, nl = rb[k], hex(self._bookmarks[k])
1971 if nr in self:
1971 if nr in self:
1972 cr = self[nr]
1972 cr = self[nr]
1973 cl = self[nl]
1973 cl = self[nl]
1974 if cl in cr.descendants():
1974 if cl in cr.descendants():
1975 r = remote.pushkey('bookmarks', k, nr, nl)
1975 r = remote.pushkey('bookmarks', k, nr, nl)
1976 if r:
1976 if r:
1977 self.ui.status(_("updating bookmark %s\n") % k)
1977 self.ui.status(_("updating bookmark %s\n") % k)
1978 else:
1978 else:
1979 self.ui.warn(_('updating bookmark %s'
1979 self.ui.warn(_('updating bookmark %s'
1980 ' failed!\n') % k)
1980 ' failed!\n') % k)
1981
1981
1982 return ret
1982 return ret
1983
1983
1984 def changegroupinfo(self, nodes, source):
1984 def changegroupinfo(self, nodes, source):
1985 if self.ui.verbose or source == 'bundle':
1985 if self.ui.verbose or source == 'bundle':
1986 self.ui.status(_("%d changesets found\n") % len(nodes))
1986 self.ui.status(_("%d changesets found\n") % len(nodes))
1987 if self.ui.debugflag:
1987 if self.ui.debugflag:
1988 self.ui.debug("list of changesets:\n")
1988 self.ui.debug("list of changesets:\n")
1989 for node in nodes:
1989 for node in nodes:
1990 self.ui.debug("%s\n" % hex(node))
1990 self.ui.debug("%s\n" % hex(node))
1991
1991
1992 def changegroupsubset(self, bases, heads, source):
1992 def changegroupsubset(self, bases, heads, source):
1993 """Compute a changegroup consisting of all the nodes that are
1993 """Compute a changegroup consisting of all the nodes that are
1994 descendants of any of the bases and ancestors of any of the heads.
1994 descendants of any of the bases and ancestors of any of the heads.
1995 Return a chunkbuffer object whose read() method will return
1995 Return a chunkbuffer object whose read() method will return
1996 successive changegroup chunks.
1996 successive changegroup chunks.
1997
1997
1998 It is fairly complex as determining which filenodes and which
1998 It is fairly complex as determining which filenodes and which
1999 manifest nodes need to be included for the changeset to be complete
1999 manifest nodes need to be included for the changeset to be complete
2000 is non-trivial.
2000 is non-trivial.
2001
2001
2002 Another wrinkle is doing the reverse, figuring out which changeset in
2002 Another wrinkle is doing the reverse, figuring out which changeset in
2003 the changegroup a particular filenode or manifestnode belongs to.
2003 the changegroup a particular filenode or manifestnode belongs to.
2004 """
2004 """
2005 cl = self.changelog
2005 cl = self.changelog
2006 if not bases:
2006 if not bases:
2007 bases = [nullid]
2007 bases = [nullid]
2008 csets, bases, heads = cl.nodesbetween(bases, heads)
2008 csets, bases, heads = cl.nodesbetween(bases, heads)
2009 # We assume that all ancestors of bases are known
2009 # We assume that all ancestors of bases are known
2010 common = set(cl.ancestors([cl.rev(n) for n in bases]))
2010 common = set(cl.ancestors([cl.rev(n) for n in bases]))
2011 return self._changegroupsubset(common, csets, heads, source)
2011 return self._changegroupsubset(common, csets, heads, source)
2012
2012
2013 def getlocalbundle(self, source, outgoing):
2013 def getlocalbundle(self, source, outgoing):
2014 """Like getbundle, but taking a discovery.outgoing as an argument.
2014 """Like getbundle, but taking a discovery.outgoing as an argument.
2015
2015
2016 This is only implemented for local repos and reuses potentially
2016 This is only implemented for local repos and reuses potentially
2017 precomputed sets in outgoing."""
2017 precomputed sets in outgoing."""
2018 if not outgoing.missing:
2018 if not outgoing.missing:
2019 return None
2019 return None
2020 return self._changegroupsubset(outgoing.common,
2020 return self._changegroupsubset(outgoing.common,
2021 outgoing.missing,
2021 outgoing.missing,
2022 outgoing.missingheads,
2022 outgoing.missingheads,
2023 source)
2023 source)
2024
2024
2025 def getbundle(self, source, heads=None, common=None):
2025 def getbundle(self, source, heads=None, common=None):
2026 """Like changegroupsubset, but returns the set difference between the
2026 """Like changegroupsubset, but returns the set difference between the
2027 ancestors of heads and the ancestors common.
2027 ancestors of heads and the ancestors common.
2028
2028
2029 If heads is None, use the local heads. If common is None, use [nullid].
2029 If heads is None, use the local heads. If common is None, use [nullid].
2030
2030
2031 The nodes in common might not all be known locally due to the way the
2031 The nodes in common might not all be known locally due to the way the
2032 current discovery protocol works.
2032 current discovery protocol works.
2033 """
2033 """
2034 cl = self.changelog
2034 cl = self.changelog
2035 if common:
2035 if common:
2036 nm = cl.nodemap
2036 nm = cl.nodemap
2037 common = [n for n in common if n in nm]
2037 common = [n for n in common if n in nm]
2038 else:
2038 else:
2039 common = [nullid]
2039 common = [nullid]
2040 if not heads:
2040 if not heads:
2041 heads = cl.heads()
2041 heads = cl.heads()
2042 return self.getlocalbundle(source,
2042 return self.getlocalbundle(source,
2043 discovery.outgoing(cl, common, heads))
2043 discovery.outgoing(cl, common, heads))
2044
2044
2045 def _changegroupsubset(self, commonrevs, csets, heads, source):
2045 def _changegroupsubset(self, commonrevs, csets, heads, source):
2046
2046
2047 cl = self.changelog
2047 cl = self.changelog
2048 mf = self.manifest
2048 mf = self.manifest
2049 mfs = {} # needed manifests
2049 mfs = {} # needed manifests
2050 fnodes = {} # needed file nodes
2050 fnodes = {} # needed file nodes
2051 changedfiles = set()
2051 changedfiles = set()
2052 fstate = ['', {}]
2052 fstate = ['', {}]
2053 count = [0, 0]
2053 count = [0, 0]
2054
2054
2055 # can we go through the fast path ?
2055 # can we go through the fast path ?
2056 heads.sort()
2056 heads.sort()
2057 if heads == sorted(self.heads()):
2057 if heads == sorted(self.heads()):
2058 return self._changegroup(csets, source)
2058 return self._changegroup(csets, source)
2059
2059
2060 # slow path
2060 # slow path
2061 self.hook('preoutgoing', throw=True, source=source)
2061 self.hook('preoutgoing', throw=True, source=source)
2062 self.changegroupinfo(csets, source)
2062 self.changegroupinfo(csets, source)
2063
2063
2064 # filter any nodes that claim to be part of the known set
2064 # filter any nodes that claim to be part of the known set
2065 def prune(revlog, missing):
2065 def prune(revlog, missing):
2066 rr, rl = revlog.rev, revlog.linkrev
2066 rr, rl = revlog.rev, revlog.linkrev
2067 return [n for n in missing
2067 return [n for n in missing
2068 if rl(rr(n)) not in commonrevs]
2068 if rl(rr(n)) not in commonrevs]
2069
2069
2070 progress = self.ui.progress
2070 progress = self.ui.progress
2071 _bundling = _('bundling')
2071 _bundling = _('bundling')
2072 _changesets = _('changesets')
2072 _changesets = _('changesets')
2073 _manifests = _('manifests')
2073 _manifests = _('manifests')
2074 _files = _('files')
2074 _files = _('files')
2075
2075
2076 def lookup(revlog, x):
2076 def lookup(revlog, x):
2077 if revlog == cl:
2077 if revlog == cl:
2078 c = cl.read(x)
2078 c = cl.read(x)
2079 changedfiles.update(c[3])
2079 changedfiles.update(c[3])
2080 mfs.setdefault(c[0], x)
2080 mfs.setdefault(c[0], x)
2081 count[0] += 1
2081 count[0] += 1
2082 progress(_bundling, count[0],
2082 progress(_bundling, count[0],
2083 unit=_changesets, total=count[1])
2083 unit=_changesets, total=count[1])
2084 return x
2084 return x
2085 elif revlog == mf:
2085 elif revlog == mf:
2086 clnode = mfs[x]
2086 clnode = mfs[x]
2087 mdata = mf.readfast(x)
2087 mdata = mf.readfast(x)
2088 for f, n in mdata.iteritems():
2088 for f, n in mdata.iteritems():
2089 if f in changedfiles:
2089 if f in changedfiles:
2090 fnodes[f].setdefault(n, clnode)
2090 fnodes[f].setdefault(n, clnode)
2091 count[0] += 1
2091 count[0] += 1
2092 progress(_bundling, count[0],
2092 progress(_bundling, count[0],
2093 unit=_manifests, total=count[1])
2093 unit=_manifests, total=count[1])
2094 return clnode
2094 return clnode
2095 else:
2095 else:
2096 progress(_bundling, count[0], item=fstate[0],
2096 progress(_bundling, count[0], item=fstate[0],
2097 unit=_files, total=count[1])
2097 unit=_files, total=count[1])
2098 return fstate[1][x]
2098 return fstate[1][x]
2099
2099
2100 bundler = changegroup.bundle10(lookup)
2100 bundler = changegroup.bundle10(lookup)
2101 reorder = self.ui.config('bundle', 'reorder', 'auto')
2101 reorder = self.ui.config('bundle', 'reorder', 'auto')
2102 if reorder == 'auto':
2102 if reorder == 'auto':
2103 reorder = None
2103 reorder = None
2104 else:
2104 else:
2105 reorder = util.parsebool(reorder)
2105 reorder = util.parsebool(reorder)
2106
2106
2107 def gengroup():
2107 def gengroup():
2108 # Create a changenode group generator that will call our functions
2108 # Create a changenode group generator that will call our functions
2109 # back to lookup the owning changenode and collect information.
2109 # back to lookup the owning changenode and collect information.
2110 count[:] = [0, len(csets)]
2110 count[:] = [0, len(csets)]
2111 for chunk in cl.group(csets, bundler, reorder=reorder):
2111 for chunk in cl.group(csets, bundler, reorder=reorder):
2112 yield chunk
2112 yield chunk
2113 progress(_bundling, None)
2113 progress(_bundling, None)
2114
2114
2115 # Create a generator for the manifestnodes that calls our lookup
2115 # Create a generator for the manifestnodes that calls our lookup
2116 # and data collection functions back.
2116 # and data collection functions back.
2117 for f in changedfiles:
2117 for f in changedfiles:
2118 fnodes[f] = {}
2118 fnodes[f] = {}
2119 count[:] = [0, len(mfs)]
2119 count[:] = [0, len(mfs)]
2120 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
2120 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
2121 yield chunk
2121 yield chunk
2122 progress(_bundling, None)
2122 progress(_bundling, None)
2123
2123
2124 mfs.clear()
2124 mfs.clear()
2125
2125
2126 # Go through all our files in order sorted by name.
2126 # Go through all our files in order sorted by name.
2127 count[:] = [0, len(changedfiles)]
2127 count[:] = [0, len(changedfiles)]
2128 for fname in sorted(changedfiles):
2128 for fname in sorted(changedfiles):
2129 filerevlog = self.file(fname)
2129 filerevlog = self.file(fname)
2130 if not len(filerevlog):
2130 if not len(filerevlog):
2131 raise util.Abort(_("empty or missing revlog for %s")
2131 raise util.Abort(_("empty or missing revlog for %s")
2132 % fname)
2132 % fname)
2133 fstate[0] = fname
2133 fstate[0] = fname
2134 fstate[1] = fnodes.pop(fname, {})
2134 fstate[1] = fnodes.pop(fname, {})
2135
2135
2136 nodelist = prune(filerevlog, fstate[1])
2136 nodelist = prune(filerevlog, fstate[1])
2137 if nodelist:
2137 if nodelist:
2138 count[0] += 1
2138 count[0] += 1
2139 yield bundler.fileheader(fname)
2139 yield bundler.fileheader(fname)
2140 for chunk in filerevlog.group(nodelist, bundler, reorder):
2140 for chunk in filerevlog.group(nodelist, bundler, reorder):
2141 yield chunk
2141 yield chunk
2142
2142
2143 # Signal that no more groups are left.
2143 # Signal that no more groups are left.
2144 yield bundler.close()
2144 yield bundler.close()
2145 progress(_bundling, None)
2145 progress(_bundling, None)
2146
2146
2147 if csets:
2147 if csets:
2148 self.hook('outgoing', node=hex(csets[0]), source=source)
2148 self.hook('outgoing', node=hex(csets[0]), source=source)
2149
2149
2150 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2150 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2151
2151
2152 def changegroup(self, basenodes, source):
2152 def changegroup(self, basenodes, source):
2153 # to avoid a race we use changegroupsubset() (issue1320)
2153 # to avoid a race we use changegroupsubset() (issue1320)
2154 return self.changegroupsubset(basenodes, self.heads(), source)
2154 return self.changegroupsubset(basenodes, self.heads(), source)
2155
2155
2156 def _changegroup(self, nodes, source):
2156 def _changegroup(self, nodes, source):
2157 """Compute the changegroup of all nodes that we have that a recipient
2157 """Compute the changegroup of all nodes that we have that a recipient
2158 doesn't. Return a chunkbuffer object whose read() method will return
2158 doesn't. Return a chunkbuffer object whose read() method will return
2159 successive changegroup chunks.
2159 successive changegroup chunks.
2160
2160
2161 This is much easier than the previous function as we can assume that
2161 This is much easier than the previous function as we can assume that
2162 the recipient has any changenode we aren't sending them.
2162 the recipient has any changenode we aren't sending them.
2163
2163
2164 nodes is the set of nodes to send"""
2164 nodes is the set of nodes to send"""
2165
2165
2166 cl = self.changelog
2166 cl = self.changelog
2167 mf = self.manifest
2167 mf = self.manifest
2168 mfs = {}
2168 mfs = {}
2169 changedfiles = set()
2169 changedfiles = set()
2170 fstate = ['']
2170 fstate = ['']
2171 count = [0, 0]
2171 count = [0, 0]
2172
2172
2173 self.hook('preoutgoing', throw=True, source=source)
2173 self.hook('preoutgoing', throw=True, source=source)
2174 self.changegroupinfo(nodes, source)
2174 self.changegroupinfo(nodes, source)
2175
2175
2176 revset = set([cl.rev(n) for n in nodes])
2176 revset = set([cl.rev(n) for n in nodes])
2177
2177
2178 def gennodelst(log):
2178 def gennodelst(log):
2179 ln, llr = log.node, log.linkrev
2179 ln, llr = log.node, log.linkrev
2180 return [ln(r) for r in log if llr(r) in revset]
2180 return [ln(r) for r in log if llr(r) in revset]
2181
2181
2182 progress = self.ui.progress
2182 progress = self.ui.progress
2183 _bundling = _('bundling')
2183 _bundling = _('bundling')
2184 _changesets = _('changesets')
2184 _changesets = _('changesets')
2185 _manifests = _('manifests')
2185 _manifests = _('manifests')
2186 _files = _('files')
2186 _files = _('files')
2187
2187
2188 def lookup(revlog, x):
2188 def lookup(revlog, x):
2189 if revlog == cl:
2189 if revlog == cl:
2190 c = cl.read(x)
2190 c = cl.read(x)
2191 changedfiles.update(c[3])
2191 changedfiles.update(c[3])
2192 mfs.setdefault(c[0], x)
2192 mfs.setdefault(c[0], x)
2193 count[0] += 1
2193 count[0] += 1
2194 progress(_bundling, count[0],
2194 progress(_bundling, count[0],
2195 unit=_changesets, total=count[1])
2195 unit=_changesets, total=count[1])
2196 return x
2196 return x
2197 elif revlog == mf:
2197 elif revlog == mf:
2198 count[0] += 1
2198 count[0] += 1
2199 progress(_bundling, count[0],
2199 progress(_bundling, count[0],
2200 unit=_manifests, total=count[1])
2200 unit=_manifests, total=count[1])
2201 return cl.node(revlog.linkrev(revlog.rev(x)))
2201 return cl.node(revlog.linkrev(revlog.rev(x)))
2202 else:
2202 else:
2203 progress(_bundling, count[0], item=fstate[0],
2203 progress(_bundling, count[0], item=fstate[0],
2204 total=count[1], unit=_files)
2204 total=count[1], unit=_files)
2205 return cl.node(revlog.linkrev(revlog.rev(x)))
2205 return cl.node(revlog.linkrev(revlog.rev(x)))
2206
2206
2207 bundler = changegroup.bundle10(lookup)
2207 bundler = changegroup.bundle10(lookup)
2208 reorder = self.ui.config('bundle', 'reorder', 'auto')
2208 reorder = self.ui.config('bundle', 'reorder', 'auto')
2209 if reorder == 'auto':
2209 if reorder == 'auto':
2210 reorder = None
2210 reorder = None
2211 else:
2211 else:
2212 reorder = util.parsebool(reorder)
2212 reorder = util.parsebool(reorder)
2213
2213
2214 def gengroup():
2214 def gengroup():
2215 '''yield a sequence of changegroup chunks (strings)'''
2215 '''yield a sequence of changegroup chunks (strings)'''
2216 # construct a list of all changed files
2216 # construct a list of all changed files
2217
2217
2218 count[:] = [0, len(nodes)]
2218 count[:] = [0, len(nodes)]
2219 for chunk in cl.group(nodes, bundler, reorder=reorder):
2219 for chunk in cl.group(nodes, bundler, reorder=reorder):
2220 yield chunk
2220 yield chunk
2221 progress(_bundling, None)
2221 progress(_bundling, None)
2222
2222
2223 count[:] = [0, len(mfs)]
2223 count[:] = [0, len(mfs)]
2224 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2224 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2225 yield chunk
2225 yield chunk
2226 progress(_bundling, None)
2226 progress(_bundling, None)
2227
2227
2228 count[:] = [0, len(changedfiles)]
2228 count[:] = [0, len(changedfiles)]
2229 for fname in sorted(changedfiles):
2229 for fname in sorted(changedfiles):
2230 filerevlog = self.file(fname)
2230 filerevlog = self.file(fname)
2231 if not len(filerevlog):
2231 if not len(filerevlog):
2232 raise util.Abort(_("empty or missing revlog for %s")
2232 raise util.Abort(_("empty or missing revlog for %s")
2233 % fname)
2233 % fname)
2234 fstate[0] = fname
2234 fstate[0] = fname
2235 nodelist = gennodelst(filerevlog)
2235 nodelist = gennodelst(filerevlog)
2236 if nodelist:
2236 if nodelist:
2237 count[0] += 1
2237 count[0] += 1
2238 yield bundler.fileheader(fname)
2238 yield bundler.fileheader(fname)
2239 for chunk in filerevlog.group(nodelist, bundler, reorder):
2239 for chunk in filerevlog.group(nodelist, bundler, reorder):
2240 yield chunk
2240 yield chunk
2241 yield bundler.close()
2241 yield bundler.close()
2242 progress(_bundling, None)
2242 progress(_bundling, None)
2243
2243
2244 if nodes:
2244 if nodes:
2245 self.hook('outgoing', node=hex(nodes[0]), source=source)
2245 self.hook('outgoing', node=hex(nodes[0]), source=source)
2246
2246
2247 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2247 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2248
2248
2249 def addchangegroup(self, source, srctype, url, emptyok=False):
2249 def addchangegroup(self, source, srctype, url, emptyok=False):
2250 """Add the changegroup returned by source.read() to this repo.
2250 """Add the changegroup returned by source.read() to this repo.
2251 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2251 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2252 the URL of the repo where this changegroup is coming from.
2252 the URL of the repo where this changegroup is coming from.
2253
2253
2254 Return an integer summarizing the change to this repo:
2254 Return an integer summarizing the change to this repo:
2255 - nothing changed or no source: 0
2255 - nothing changed or no source: 0
2256 - more heads than before: 1+added heads (2..n)
2256 - more heads than before: 1+added heads (2..n)
2257 - fewer heads than before: -1-removed heads (-2..-n)
2257 - fewer heads than before: -1-removed heads (-2..-n)
2258 - number of heads stays the same: 1
2258 - number of heads stays the same: 1
2259 """
2259 """
2260 def csmap(x):
2260 def csmap(x):
2261 self.ui.debug("add changeset %s\n" % short(x))
2261 self.ui.debug("add changeset %s\n" % short(x))
2262 return len(cl)
2262 return len(cl)
2263
2263
2264 def revmap(x):
2264 def revmap(x):
2265 return cl.rev(x)
2265 return cl.rev(x)
2266
2266
2267 if not source:
2267 if not source:
2268 return 0
2268 return 0
2269
2269
2270 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2270 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2271
2271
2272 changesets = files = revisions = 0
2272 changesets = files = revisions = 0
2273 efiles = set()
2273 efiles = set()
2274
2274
2275 # write changelog data to temp files so concurrent readers will not see
2275 # write changelog data to temp files so concurrent readers will not see
2276 # inconsistent view
2276 # inconsistent view
2277 cl = self.changelog
2277 cl = self.changelog
2278 cl.delayupdate()
2278 cl.delayupdate()
2279 oldheads = cl.heads()
2279 oldheads = cl.heads()
2280
2280
2281 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2281 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2282 try:
2282 try:
2283 trp = weakref.proxy(tr)
2283 trp = weakref.proxy(tr)
2284 # pull off the changeset group
2284 # pull off the changeset group
2285 self.ui.status(_("adding changesets\n"))
2285 self.ui.status(_("adding changesets\n"))
2286 clstart = len(cl)
2286 clstart = len(cl)
2287 class prog(object):
2287 class prog(object):
2288 step = _('changesets')
2288 step = _('changesets')
2289 count = 1
2289 count = 1
2290 ui = self.ui
2290 ui = self.ui
2291 total = None
2291 total = None
2292 def __call__(self):
2292 def __call__(self):
2293 self.ui.progress(self.step, self.count, unit=_('chunks'),
2293 self.ui.progress(self.step, self.count, unit=_('chunks'),
2294 total=self.total)
2294 total=self.total)
2295 self.count += 1
2295 self.count += 1
2296 pr = prog()
2296 pr = prog()
2297 source.callback = pr
2297 source.callback = pr
2298
2298
2299 source.changelogheader()
2299 source.changelogheader()
2300 srccontent = cl.addgroup(source, csmap, trp)
2300 srccontent = cl.addgroup(source, csmap, trp)
2301 if not (srccontent or emptyok):
2301 if not (srccontent or emptyok):
2302 raise util.Abort(_("received changelog group is empty"))
2302 raise util.Abort(_("received changelog group is empty"))
2303 clend = len(cl)
2303 clend = len(cl)
2304 changesets = clend - clstart
2304 changesets = clend - clstart
2305 for c in xrange(clstart, clend):
2305 for c in xrange(clstart, clend):
2306 efiles.update(self[c].files())
2306 efiles.update(self[c].files())
2307 efiles = len(efiles)
2307 efiles = len(efiles)
2308 self.ui.progress(_('changesets'), None)
2308 self.ui.progress(_('changesets'), None)
2309
2309
2310 # pull off the manifest group
2310 # pull off the manifest group
2311 self.ui.status(_("adding manifests\n"))
2311 self.ui.status(_("adding manifests\n"))
2312 pr.step = _('manifests')
2312 pr.step = _('manifests')
2313 pr.count = 1
2313 pr.count = 1
2314 pr.total = changesets # manifests <= changesets
2314 pr.total = changesets # manifests <= changesets
2315 # no need to check for empty manifest group here:
2315 # no need to check for empty manifest group here:
2316 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2316 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2317 # no new manifest will be created and the manifest group will
2317 # no new manifest will be created and the manifest group will
2318 # be empty during the pull
2318 # be empty during the pull
2319 source.manifestheader()
2319 source.manifestheader()
2320 self.manifest.addgroup(source, revmap, trp)
2320 self.manifest.addgroup(source, revmap, trp)
2321 self.ui.progress(_('manifests'), None)
2321 self.ui.progress(_('manifests'), None)
2322
2322
2323 needfiles = {}
2323 needfiles = {}
2324 if self.ui.configbool('server', 'validate', default=False):
2324 if self.ui.configbool('server', 'validate', default=False):
2325 # validate incoming csets have their manifests
2325 # validate incoming csets have their manifests
2326 for cset in xrange(clstart, clend):
2326 for cset in xrange(clstart, clend):
2327 mfest = self.changelog.read(self.changelog.node(cset))[0]
2327 mfest = self.changelog.read(self.changelog.node(cset))[0]
2328 mfest = self.manifest.readdelta(mfest)
2328 mfest = self.manifest.readdelta(mfest)
2329 # store file nodes we must see
2329 # store file nodes we must see
2330 for f, n in mfest.iteritems():
2330 for f, n in mfest.iteritems():
2331 needfiles.setdefault(f, set()).add(n)
2331 needfiles.setdefault(f, set()).add(n)
2332
2332
2333 # process the files
2333 # process the files
2334 self.ui.status(_("adding file changes\n"))
2334 self.ui.status(_("adding file changes\n"))
2335 pr.step = _('files')
2335 pr.step = _('files')
2336 pr.count = 1
2336 pr.count = 1
2337 pr.total = efiles
2337 pr.total = efiles
2338 source.callback = None
2338 source.callback = None
2339
2339
2340 while True:
2340 while True:
2341 chunkdata = source.filelogheader()
2341 chunkdata = source.filelogheader()
2342 if not chunkdata:
2342 if not chunkdata:
2343 break
2343 break
2344 f = chunkdata["filename"]
2344 f = chunkdata["filename"]
2345 self.ui.debug("adding %s revisions\n" % f)
2345 self.ui.debug("adding %s revisions\n" % f)
2346 pr()
2346 pr()
2347 fl = self.file(f)
2347 fl = self.file(f)
2348 o = len(fl)
2348 o = len(fl)
2349 if not fl.addgroup(source, revmap, trp):
2349 if not fl.addgroup(source, revmap, trp):
2350 raise util.Abort(_("received file revlog group is empty"))
2350 raise util.Abort(_("received file revlog group is empty"))
2351 revisions += len(fl) - o
2351 revisions += len(fl) - o
2352 files += 1
2352 files += 1
2353 if f in needfiles:
2353 if f in needfiles:
2354 needs = needfiles[f]
2354 needs = needfiles[f]
2355 for new in xrange(o, len(fl)):
2355 for new in xrange(o, len(fl)):
2356 n = fl.node(new)
2356 n = fl.node(new)
2357 if n in needs:
2357 if n in needs:
2358 needs.remove(n)
2358 needs.remove(n)
2359 if not needs:
2359 if not needs:
2360 del needfiles[f]
2360 del needfiles[f]
2361 self.ui.progress(_('files'), None)
2361 self.ui.progress(_('files'), None)
2362
2362
2363 for f, needs in needfiles.iteritems():
2363 for f, needs in needfiles.iteritems():
2364 fl = self.file(f)
2364 fl = self.file(f)
2365 for n in needs:
2365 for n in needs:
2366 try:
2366 try:
2367 fl.rev(n)
2367 fl.rev(n)
2368 except error.LookupError:
2368 except error.LookupError:
2369 raise util.Abort(
2369 raise util.Abort(
2370 _('missing file data for %s:%s - run hg verify') %
2370 _('missing file data for %s:%s - run hg verify') %
2371 (f, hex(n)))
2371 (f, hex(n)))
2372
2372
2373 dh = 0
2373 dh = 0
2374 if oldheads:
2374 if oldheads:
2375 heads = cl.heads()
2375 heads = cl.heads()
2376 dh = len(heads) - len(oldheads)
2376 dh = len(heads) - len(oldheads)
2377 for h in heads:
2377 for h in heads:
2378 if h not in oldheads and self[h].closesbranch():
2378 if h not in oldheads and self[h].closesbranch():
2379 dh -= 1
2379 dh -= 1
2380 htext = ""
2380 htext = ""
2381 if dh:
2381 if dh:
2382 htext = _(" (%+d heads)") % dh
2382 htext = _(" (%+d heads)") % dh
2383
2383
2384 self.ui.status(_("added %d changesets"
2384 self.ui.status(_("added %d changesets"
2385 " with %d changes to %d files%s\n")
2385 " with %d changes to %d files%s\n")
2386 % (changesets, revisions, files, htext))
2386 % (changesets, revisions, files, htext))
2387
2387
2388 if changesets > 0:
2388 if changesets > 0:
2389 p = lambda: cl.writepending() and self.root or ""
2389 p = lambda: cl.writepending() and self.root or ""
2390 self.hook('pretxnchangegroup', throw=True,
2390 self.hook('pretxnchangegroup', throw=True,
2391 node=hex(cl.node(clstart)), source=srctype,
2391 node=hex(cl.node(clstart)), source=srctype,
2392 url=url, pending=p)
2392 url=url, pending=p)
2393
2393
2394 added = [cl.node(r) for r in xrange(clstart, clend)]
2394 added = [cl.node(r) for r in xrange(clstart, clend)]
2395 publishing = self.ui.configbool('phases', 'publish', True)
2395 publishing = self.ui.configbool('phases', 'publish', True)
2396 if srctype == 'push':
2396 if srctype == 'push':
2397 # Old server can not push the boundary themself.
2397 # Old server can not push the boundary themself.
2398 # New server won't push the boundary if changeset already
2398 # New server won't push the boundary if changeset already
2399 # existed locally as secrete
2399 # existed locally as secrete
2400 #
2400 #
2401 # We should not use added here but the list of all change in
2401 # We should not use added here but the list of all change in
2402 # the bundle
2402 # the bundle
2403 if publishing:
2403 if publishing:
2404 phases.advanceboundary(self, phases.public, srccontent)
2404 phases.advanceboundary(self, phases.public, srccontent)
2405 else:
2405 else:
2406 phases.advanceboundary(self, phases.draft, srccontent)
2406 phases.advanceboundary(self, phases.draft, srccontent)
2407 phases.retractboundary(self, phases.draft, added)
2407 phases.retractboundary(self, phases.draft, added)
2408 elif srctype != 'strip':
2408 elif srctype != 'strip':
2409 # publishing only alter behavior during push
2409 # publishing only alter behavior during push
2410 #
2410 #
2411 # strip should not touch boundary at all
2411 # strip should not touch boundary at all
2412 phases.retractboundary(self, phases.draft, added)
2412 phases.retractboundary(self, phases.draft, added)
2413
2413
2414 # make changelog see real files again
2414 # make changelog see real files again
2415 cl.finalize(trp)
2415 cl.finalize(trp)
2416
2416
2417 tr.close()
2417 tr.close()
2418
2418
2419 if changesets > 0:
2419 if changesets > 0:
2420 def runhooks():
2420 def runhooks():
2421 # forcefully update the on-disk branch cache
2421 # forcefully update the on-disk branch cache
2422 self.ui.debug("updating the branch cache\n")
2422 self.ui.debug("updating the branch cache\n")
2423 self.updatebranchcache()
2423 self.updatebranchcache()
2424 self.hook("changegroup", node=hex(cl.node(clstart)),
2424 self.hook("changegroup", node=hex(cl.node(clstart)),
2425 source=srctype, url=url)
2425 source=srctype, url=url)
2426
2426
2427 for n in added:
2427 for n in added:
2428 self.hook("incoming", node=hex(n), source=srctype,
2428 self.hook("incoming", node=hex(n), source=srctype,
2429 url=url)
2429 url=url)
2430 self._afterlock(runhooks)
2430 self._afterlock(runhooks)
2431
2431
2432 finally:
2432 finally:
2433 tr.release()
2433 tr.release()
2434 # never return 0 here:
2434 # never return 0 here:
2435 if dh < 0:
2435 if dh < 0:
2436 return dh - 1
2436 return dh - 1
2437 else:
2437 else:
2438 return dh + 1
2438 return dh + 1
2439
2439
2440 def stream_in(self, remote, requirements):
2440 def stream_in(self, remote, requirements):
2441 lock = self.lock()
2441 lock = self.lock()
2442 try:
2442 try:
2443 fp = remote.stream_out()
2443 fp = remote.stream_out()
2444 l = fp.readline()
2444 l = fp.readline()
2445 try:
2445 try:
2446 resp = int(l)
2446 resp = int(l)
2447 except ValueError:
2447 except ValueError:
2448 raise error.ResponseError(
2448 raise error.ResponseError(
2449 _('unexpected response from remote server:'), l)
2449 _('unexpected response from remote server:'), l)
2450 if resp == 1:
2450 if resp == 1:
2451 raise util.Abort(_('operation forbidden by server'))
2451 raise util.Abort(_('operation forbidden by server'))
2452 elif resp == 2:
2452 elif resp == 2:
2453 raise util.Abort(_('locking the remote repository failed'))
2453 raise util.Abort(_('locking the remote repository failed'))
2454 elif resp != 0:
2454 elif resp != 0:
2455 raise util.Abort(_('the server sent an unknown error code'))
2455 raise util.Abort(_('the server sent an unknown error code'))
2456 self.ui.status(_('streaming all changes\n'))
2456 self.ui.status(_('streaming all changes\n'))
2457 l = fp.readline()
2457 l = fp.readline()
2458 try:
2458 try:
2459 total_files, total_bytes = map(int, l.split(' ', 1))
2459 total_files, total_bytes = map(int, l.split(' ', 1))
2460 except (ValueError, TypeError):
2460 except (ValueError, TypeError):
2461 raise error.ResponseError(
2461 raise error.ResponseError(
2462 _('unexpected response from remote server:'), l)
2462 _('unexpected response from remote server:'), l)
2463 self.ui.status(_('%d files to transfer, %s of data\n') %
2463 self.ui.status(_('%d files to transfer, %s of data\n') %
2464 (total_files, util.bytecount(total_bytes)))
2464 (total_files, util.bytecount(total_bytes)))
2465 handled_bytes = 0
2465 handled_bytes = 0
2466 self.ui.progress(_('clone'), 0, total=total_bytes)
2466 self.ui.progress(_('clone'), 0, total=total_bytes)
2467 start = time.time()
2467 start = time.time()
2468 for i in xrange(total_files):
2468 for i in xrange(total_files):
2469 # XXX doesn't support '\n' or '\r' in filenames
2469 # XXX doesn't support '\n' or '\r' in filenames
2470 l = fp.readline()
2470 l = fp.readline()
2471 try:
2471 try:
2472 name, size = l.split('\0', 1)
2472 name, size = l.split('\0', 1)
2473 size = int(size)
2473 size = int(size)
2474 except (ValueError, TypeError):
2474 except (ValueError, TypeError):
2475 raise error.ResponseError(
2475 raise error.ResponseError(
2476 _('unexpected response from remote server:'), l)
2476 _('unexpected response from remote server:'), l)
2477 if self.ui.debugflag:
2477 if self.ui.debugflag:
2478 self.ui.debug('adding %s (%s)\n' %
2478 self.ui.debug('adding %s (%s)\n' %
2479 (name, util.bytecount(size)))
2479 (name, util.bytecount(size)))
2480 # for backwards compat, name was partially encoded
2480 # for backwards compat, name was partially encoded
2481 ofp = self.sopener(store.decodedir(name), 'w')
2481 ofp = self.sopener(store.decodedir(name), 'w')
2482 for chunk in util.filechunkiter(fp, limit=size):
2482 for chunk in util.filechunkiter(fp, limit=size):
2483 handled_bytes += len(chunk)
2483 handled_bytes += len(chunk)
2484 self.ui.progress(_('clone'), handled_bytes,
2484 self.ui.progress(_('clone'), handled_bytes,
2485 total=total_bytes)
2485 total=total_bytes)
2486 ofp.write(chunk)
2486 ofp.write(chunk)
2487 ofp.close()
2487 ofp.close()
2488 elapsed = time.time() - start
2488 elapsed = time.time() - start
2489 if elapsed <= 0:
2489 if elapsed <= 0:
2490 elapsed = 0.001
2490 elapsed = 0.001
2491 self.ui.progress(_('clone'), None)
2491 self.ui.progress(_('clone'), None)
2492 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2492 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2493 (util.bytecount(total_bytes), elapsed,
2493 (util.bytecount(total_bytes), elapsed,
2494 util.bytecount(total_bytes / elapsed)))
2494 util.bytecount(total_bytes / elapsed)))
2495
2495
2496 # new requirements = old non-format requirements +
2496 # new requirements = old non-format requirements +
2497 # new format-related
2497 # new format-related
2498 # requirements from the streamed-in repository
2498 # requirements from the streamed-in repository
2499 requirements.update(set(self.requirements) - self.supportedformats)
2499 requirements.update(set(self.requirements) - self.supportedformats)
2500 self._applyrequirements(requirements)
2500 self._applyrequirements(requirements)
2501 self._writerequirements()
2501 self._writerequirements()
2502
2502
2503 self.invalidate()
2503 self.invalidate()
2504 return len(self.heads()) + 1
2504 return len(self.heads()) + 1
2505 finally:
2505 finally:
2506 lock.release()
2506 lock.release()
2507
2507
2508 def clone(self, remote, heads=[], stream=False):
2508 def clone(self, remote, heads=[], stream=False):
2509 '''clone remote repository.
2509 '''clone remote repository.
2510
2510
2511 keyword arguments:
2511 keyword arguments:
2512 heads: list of revs to clone (forces use of pull)
2512 heads: list of revs to clone (forces use of pull)
2513 stream: use streaming clone if possible'''
2513 stream: use streaming clone if possible'''
2514
2514
2515 # now, all clients that can request uncompressed clones can
2515 # now, all clients that can request uncompressed clones can
2516 # read repo formats supported by all servers that can serve
2516 # read repo formats supported by all servers that can serve
2517 # them.
2517 # them.
2518
2518
2519 # if revlog format changes, client will have to check version
2519 # if revlog format changes, client will have to check version
2520 # and format flags on "stream" capability, and use
2520 # and format flags on "stream" capability, and use
2521 # uncompressed only if compatible.
2521 # uncompressed only if compatible.
2522
2522
2523 if not stream:
2523 if not stream:
2524 # if the server explicitely prefer to stream (for fast LANs)
2524 # if the server explicitely prefer to stream (for fast LANs)
2525 stream = remote.capable('stream-preferred')
2525 stream = remote.capable('stream-preferred')
2526
2526
2527 if stream and not heads:
2527 if stream and not heads:
2528 # 'stream' means remote revlog format is revlogv1 only
2528 # 'stream' means remote revlog format is revlogv1 only
2529 if remote.capable('stream'):
2529 if remote.capable('stream'):
2530 return self.stream_in(remote, set(('revlogv1',)))
2530 return self.stream_in(remote, set(('revlogv1',)))
2531 # otherwise, 'streamreqs' contains the remote revlog format
2531 # otherwise, 'streamreqs' contains the remote revlog format
2532 streamreqs = remote.capable('streamreqs')
2532 streamreqs = remote.capable('streamreqs')
2533 if streamreqs:
2533 if streamreqs:
2534 streamreqs = set(streamreqs.split(','))
2534 streamreqs = set(streamreqs.split(','))
2535 # if we support it, stream in and adjust our requirements
2535 # if we support it, stream in and adjust our requirements
2536 if not streamreqs - self.supportedformats:
2536 if not streamreqs - self.supportedformats:
2537 return self.stream_in(remote, streamreqs)
2537 return self.stream_in(remote, streamreqs)
2538 return self.pull(remote, heads)
2538 return self.pull(remote, heads)
2539
2539
2540 def pushkey(self, namespace, key, old, new):
2540 def pushkey(self, namespace, key, old, new):
2541 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2541 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2542 old=old, new=new)
2542 old=old, new=new)
2543 ret = pushkey.push(self, namespace, key, old, new)
2543 ret = pushkey.push(self, namespace, key, old, new)
2544 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2544 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2545 ret=ret)
2545 ret=ret)
2546 return ret
2546 return ret
2547
2547
2548 def listkeys(self, namespace):
2548 def listkeys(self, namespace):
2549 self.hook('prelistkeys', throw=True, namespace=namespace)
2549 self.hook('prelistkeys', throw=True, namespace=namespace)
2550 values = pushkey.list(self, namespace)
2550 values = pushkey.list(self, namespace)
2551 self.hook('listkeys', namespace=namespace, values=values)
2551 self.hook('listkeys', namespace=namespace, values=values)
2552 return values
2552 return values
2553
2553
2554 def debugwireargs(self, one, two, three=None, four=None, five=None):
2554 def debugwireargs(self, one, two, three=None, four=None, five=None):
2555 '''used to test argument passing over the wire'''
2555 '''used to test argument passing over the wire'''
2556 return "%s %s %s %s %s" % (one, two, three, four, five)
2556 return "%s %s %s %s %s" % (one, two, three, four, five)
2557
2557
2558 def savecommitmessage(self, text):
2558 def savecommitmessage(self, text):
2559 fp = self.opener('last-message.txt', 'wb')
2559 fp = self.opener('last-message.txt', 'wb')
2560 try:
2560 try:
2561 fp.write(text)
2561 fp.write(text)
2562 finally:
2562 finally:
2563 fp.close()
2563 fp.close()
2564 return self.pathto(fp.name[len(self.root)+1:])
2564 return self.pathto(fp.name[len(self.root)+1:])
2565
2565
2566 # used to avoid circular references so destructors work
2566 # used to avoid circular references so destructors work
2567 def aftertrans(files):
2567 def aftertrans(files):
2568 renamefiles = [tuple(t) for t in files]
2568 renamefiles = [tuple(t) for t in files]
2569 def a():
2569 def a():
2570 for src, dest in renamefiles:
2570 for src, dest in renamefiles:
2571 try:
2571 try:
2572 util.rename(src, dest)
2572 util.rename(src, dest)
2573 except OSError: # journal file does not yet exist
2573 except OSError: # journal file does not yet exist
2574 pass
2574 pass
2575 return a
2575 return a
2576
2576
2577 def undoname(fn):
2577 def undoname(fn):
2578 base, name = os.path.split(fn)
2578 base, name = os.path.split(fn)
2579 assert name.startswith('journal')
2579 assert name.startswith('journal')
2580 return os.path.join(base, name.replace('journal', 'undo', 1))
2580 return os.path.join(base, name.replace('journal', 'undo', 1))
2581
2581
2582 def instance(ui, path, create):
2582 def instance(ui, path, create):
2583 return localrepository(ui, util.urllocalpath(path), create)
2583 return localrepository(ui, util.urllocalpath(path), create)
2584
2584
2585 def islocal(path):
2585 def islocal(path):
2586 return True
2586 return True
@@ -1,646 +1,645
1 commit hooks can see env vars
1 commit hooks can see env vars
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ cat > .hg/hgrc <<EOF
5 $ cat > .hg/hgrc <<EOF
6 > [hooks]
6 > [hooks]
7 > commit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit"
7 > commit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit"
8 > commit.b = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit.b"
8 > commit.b = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit.b"
9 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= python \"$TESTDIR/printenv.py\" precommit"
9 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= python \"$TESTDIR/printenv.py\" precommit"
10 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxncommit"
10 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxncommit"
11 > pretxncommit.tip = hg -q tip
11 > pretxncommit.tip = hg -q tip
12 > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
12 > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
13 > pre-cat = python "$TESTDIR/printenv.py" pre-cat
13 > pre-cat = python "$TESTDIR/printenv.py" pre-cat
14 > post-cat = python "$TESTDIR/printenv.py" post-cat
14 > post-cat = python "$TESTDIR/printenv.py" post-cat
15 > EOF
15 > EOF
16 $ echo a > a
16 $ echo a > a
17 $ hg add a
17 $ hg add a
18 $ hg commit -m a
18 $ hg commit -m a
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
21 0:cb9a9f314b8b
21 0:cb9a9f314b8b
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
24
24
25 $ hg clone . ../b
25 $ hg clone . ../b
26 updating to branch default
26 updating to branch default
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ cd ../b
28 $ cd ../b
29
29
30 changegroup hooks can see env vars
30 changegroup hooks can see env vars
31
31
32 $ cat > .hg/hgrc <<EOF
32 $ cat > .hg/hgrc <<EOF
33 > [hooks]
33 > [hooks]
34 > prechangegroup = python "$TESTDIR/printenv.py" prechangegroup
34 > prechangegroup = python "$TESTDIR/printenv.py" prechangegroup
35 > changegroup = python "$TESTDIR/printenv.py" changegroup
35 > changegroup = python "$TESTDIR/printenv.py" changegroup
36 > incoming = python "$TESTDIR/printenv.py" incoming
36 > incoming = python "$TESTDIR/printenv.py" incoming
37 > EOF
37 > EOF
38
38
39 pretxncommit and commit hooks can see both parents of merge
39 pretxncommit and commit hooks can see both parents of merge
40
40
41 $ cd ../a
41 $ cd ../a
42 $ echo b >> a
42 $ echo b >> a
43 $ hg commit -m a1 -d "1 0"
43 $ hg commit -m a1 -d "1 0"
44 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
44 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
45 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
45 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
46 1:ab228980c14d
46 1:ab228980c14d
47 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
47 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
48 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
48 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
49 $ hg update -C 0
49 $ hg update -C 0
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 $ echo b > b
51 $ echo b > b
52 $ hg add b
52 $ hg add b
53 $ hg commit -m b -d '1 0'
53 $ hg commit -m b -d '1 0'
54 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
54 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
55 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
55 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
56 2:ee9deb46ab31
56 2:ee9deb46ab31
57 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
57 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
58 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
58 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
59 created new head
59 created new head
60 $ hg merge 1
60 $ hg merge 1
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 (branch merge, don't forget to commit)
62 (branch merge, don't forget to commit)
63 $ hg commit -m merge -d '2 0'
63 $ hg commit -m merge -d '2 0'
64 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
64 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
65 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
65 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
66 3:07f3376c1e65
66 3:07f3376c1e65
67 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
67 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
68 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
68 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
69
69
70 test generic hooks
70 test generic hooks
71
71
72 $ hg id
72 $ hg id
73 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
73 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
74 warning: pre-identify hook exited with status 1
74 warning: pre-identify hook exited with status 1
75 [1]
75 [1]
76 $ hg cat b
76 $ hg cat b
77 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
77 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
78 b
78 b
79 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
79 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
80
80
81 $ cd ../b
81 $ cd ../b
82 $ hg pull ../a
82 $ hg pull ../a
83 pulling from ../a
83 pulling from ../a
84 searching for changes
84 searching for changes
85 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
85 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
86 adding changesets
86 adding changesets
87 adding manifests
87 adding manifests
88 adding file changes
88 adding file changes
89 added 3 changesets with 2 changes to 2 files
89 added 3 changesets with 2 changes to 2 files
90 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
90 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
93 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
93 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
94 (run 'hg update' to get a working copy)
94 (run 'hg update' to get a working copy)
95
95
96 tag hooks can see env vars
96 tag hooks can see env vars
97
97
98 $ cd ../a
98 $ cd ../a
99 $ cat >> .hg/hgrc <<EOF
99 $ cat >> .hg/hgrc <<EOF
100 > pretag = python "$TESTDIR/printenv.py" pretag
100 > pretag = python "$TESTDIR/printenv.py" pretag
101 > tag = sh -c "HG_PARENT1= HG_PARENT2= python \"$TESTDIR/printenv.py\" tag"
101 > tag = sh -c "HG_PARENT1= HG_PARENT2= python \"$TESTDIR/printenv.py\" tag"
102 > EOF
102 > EOF
103 $ hg tag -d '3 0' a
103 $ hg tag -d '3 0' a
104 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
104 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
105 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
105 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
106 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
107 4:539e4b31b6dc
107 4:539e4b31b6dc
108 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
108 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
109 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
109 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
110 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
110 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
111 $ hg tag -l la
111 $ hg tag -l la
112 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
112 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
113 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
113 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
114
114
115 pretag hook can forbid tagging
115 pretag hook can forbid tagging
116
116
117 $ echo "pretag.forbid = python \"$TESTDIR/printenv.py\" pretag.forbid 1" >> .hg/hgrc
117 $ echo "pretag.forbid = python \"$TESTDIR/printenv.py\" pretag.forbid 1" >> .hg/hgrc
118 $ hg tag -d '4 0' fa
118 $ hg tag -d '4 0' fa
119 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
119 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
120 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
120 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
121 abort: pretag.forbid hook exited with status 1
121 abort: pretag.forbid hook exited with status 1
122 [255]
122 [255]
123 $ hg tag -l fla
123 $ hg tag -l fla
124 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
124 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
125 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
125 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
126 abort: pretag.forbid hook exited with status 1
126 abort: pretag.forbid hook exited with status 1
127 [255]
127 [255]
128
128
129 pretxncommit hook can see changeset, can roll back txn, changeset no
129 pretxncommit hook can see changeset, can roll back txn, changeset no
130 more there after
130 more there after
131
131
132 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
132 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
133 $ echo "pretxncommit.forbid1 = python \"$TESTDIR/printenv.py\" pretxncommit.forbid 1" >> .hg/hgrc
133 $ echo "pretxncommit.forbid1 = python \"$TESTDIR/printenv.py\" pretxncommit.forbid 1" >> .hg/hgrc
134 $ echo z > z
134 $ echo z > z
135 $ hg add z
135 $ hg add z
136 $ hg -q tip
136 $ hg -q tip
137 4:539e4b31b6dc
137 4:539e4b31b6dc
138 $ hg commit -m 'fail' -d '4 0'
138 $ hg commit -m 'fail' -d '4 0'
139 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
139 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
140 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
140 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
141 5:6f611f8018c1
141 5:6f611f8018c1
142 5:6f611f8018c1
142 5:6f611f8018c1
143 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
143 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
144 transaction abort!
144 transaction abort!
145 rollback completed
145 rollback completed
146 abort: pretxncommit.forbid1 hook exited with status 1
146 abort: pretxncommit.forbid1 hook exited with status 1
147 [255]
147 [255]
148 $ hg -q tip
148 $ hg -q tip
149 4:539e4b31b6dc
149 4:539e4b31b6dc
150
150
151 precommit hook can prevent commit
151 precommit hook can prevent commit
152
152
153 $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
153 $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
154 $ hg commit -m 'fail' -d '4 0'
154 $ hg commit -m 'fail' -d '4 0'
155 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
155 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
156 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
156 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
157 abort: precommit.forbid hook exited with status 1
157 abort: precommit.forbid hook exited with status 1
158 [255]
158 [255]
159 $ hg -q tip
159 $ hg -q tip
160 4:539e4b31b6dc
160 4:539e4b31b6dc
161
161
162 preupdate hook can prevent update
162 preupdate hook can prevent update
163
163
164 $ echo "preupdate = python \"$TESTDIR/printenv.py\" preupdate" >> .hg/hgrc
164 $ echo "preupdate = python \"$TESTDIR/printenv.py\" preupdate" >> .hg/hgrc
165 $ hg update 1
165 $ hg update 1
166 preupdate hook: HG_PARENT1=ab228980c14d
166 preupdate hook: HG_PARENT1=ab228980c14d
167 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
167 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
168
168
169 update hook
169 update hook
170
170
171 $ echo "update = python \"$TESTDIR/printenv.py\" update" >> .hg/hgrc
171 $ echo "update = python \"$TESTDIR/printenv.py\" update" >> .hg/hgrc
172 $ hg update
172 $ hg update
173 preupdate hook: HG_PARENT1=539e4b31b6dc
173 preupdate hook: HG_PARENT1=539e4b31b6dc
174 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
174 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
175 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
176
176
177 pushkey hook
177 pushkey hook
178
178
179 $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey" >> .hg/hgrc
179 $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey" >> .hg/hgrc
180 $ cd ../b
180 $ cd ../b
181 $ hg bookmark -r null foo
181 $ hg bookmark -r null foo
182 $ hg push -B foo ../a
182 $ hg push -B foo ../a
183 pushing to ../a
183 pushing to ../a
184 searching for changes
184 searching for changes
185 no changes found
185 no changes found
186 exporting bookmark foo
186 exporting bookmark foo
187 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
187 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
188 [1]
188 [1]
189 $ cd ../a
189 $ cd ../a
190
190
191 listkeys hook
191 listkeys hook
192
192
193 $ echo "listkeys = python \"$TESTDIR/printenv.py\" listkeys" >> .hg/hgrc
193 $ echo "listkeys = python \"$TESTDIR/printenv.py\" listkeys" >> .hg/hgrc
194 $ hg bookmark -r null bar
194 $ hg bookmark -r null bar
195 $ cd ../b
195 $ cd ../b
196 $ hg pull -B bar ../a
196 $ hg pull -B bar ../a
197 pulling from ../a
197 pulling from ../a
198 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
198 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
199 no changes found
199 no changes found
200 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
200 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
201 listkeys hook: HG_NAMESPACE=obsolete HG_VALUES={}
201 listkeys hook: HG_NAMESPACE=obsolete HG_VALUES={}
202 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
202 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
203 adding remote bookmark bar
203 adding remote bookmark bar
204 importing bookmark bar
204 importing bookmark bar
205 $ cd ../a
205 $ cd ../a
206
206
207 test that prepushkey can prevent incoming keys
207 test that prepushkey can prevent incoming keys
208
208
209 $ echo "prepushkey = python \"$TESTDIR/printenv.py\" prepushkey.forbid 1" >> .hg/hgrc
209 $ echo "prepushkey = python \"$TESTDIR/printenv.py\" prepushkey.forbid 1" >> .hg/hgrc
210 $ cd ../b
210 $ cd ../b
211 $ hg bookmark -r null baz
211 $ hg bookmark -r null baz
212 $ hg push -B baz ../a
212 $ hg push -B baz ../a
213 pushing to ../a
213 pushing to ../a
214 searching for changes
214 searching for changes
215 no changes found
215 no changes found
216 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
216 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
217 listkeys hook: HG_NAMESPACE=namespaces HG_VALUES={'bookmarks': '', 'namespaces': '', 'obsolete': '', 'phases': ''}
218 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
217 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
219 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
218 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
220 exporting bookmark baz
219 exporting bookmark baz
221 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
220 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
222 abort: prepushkey hook exited with status 1
221 abort: prepushkey hook exited with status 1
223 [255]
222 [255]
224 $ cd ../a
223 $ cd ../a
225
224
226 test that prelistkeys can prevent listing keys
225 test that prelistkeys can prevent listing keys
227
226
228 $ echo "prelistkeys = python \"$TESTDIR/printenv.py\" prelistkeys.forbid 1" >> .hg/hgrc
227 $ echo "prelistkeys = python \"$TESTDIR/printenv.py\" prelistkeys.forbid 1" >> .hg/hgrc
229 $ hg bookmark -r null quux
228 $ hg bookmark -r null quux
230 $ cd ../b
229 $ cd ../b
231 $ hg pull -B quux ../a
230 $ hg pull -B quux ../a
232 pulling from ../a
231 pulling from ../a
233 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
232 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
234 abort: prelistkeys hook exited with status 1
233 abort: prelistkeys hook exited with status 1
235 [255]
234 [255]
236 $ cd ../a
235 $ cd ../a
237
236
238 prechangegroup hook can prevent incoming changes
237 prechangegroup hook can prevent incoming changes
239
238
240 $ cd ../b
239 $ cd ../b
241 $ hg -q tip
240 $ hg -q tip
242 3:07f3376c1e65
241 3:07f3376c1e65
243 $ cat > .hg/hgrc <<EOF
242 $ cat > .hg/hgrc <<EOF
244 > [hooks]
243 > [hooks]
245 > prechangegroup.forbid = python "$TESTDIR/printenv.py" prechangegroup.forbid 1
244 > prechangegroup.forbid = python "$TESTDIR/printenv.py" prechangegroup.forbid 1
246 > EOF
245 > EOF
247 $ hg pull ../a
246 $ hg pull ../a
248 pulling from ../a
247 pulling from ../a
249 searching for changes
248 searching for changes
250 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
249 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
251 abort: prechangegroup.forbid hook exited with status 1
250 abort: prechangegroup.forbid hook exited with status 1
252 [255]
251 [255]
253
252
254 pretxnchangegroup hook can see incoming changes, can roll back txn,
253 pretxnchangegroup hook can see incoming changes, can roll back txn,
255 incoming changes no longer there after
254 incoming changes no longer there after
256
255
257 $ cat > .hg/hgrc <<EOF
256 $ cat > .hg/hgrc <<EOF
258 > [hooks]
257 > [hooks]
259 > pretxnchangegroup.forbid0 = hg tip -q
258 > pretxnchangegroup.forbid0 = hg tip -q
260 > pretxnchangegroup.forbid1 = python "$TESTDIR/printenv.py" pretxnchangegroup.forbid 1
259 > pretxnchangegroup.forbid1 = python "$TESTDIR/printenv.py" pretxnchangegroup.forbid 1
261 > EOF
260 > EOF
262 $ hg pull ../a
261 $ hg pull ../a
263 pulling from ../a
262 pulling from ../a
264 searching for changes
263 searching for changes
265 adding changesets
264 adding changesets
266 adding manifests
265 adding manifests
267 adding file changes
266 adding file changes
268 added 1 changesets with 1 changes to 1 files
267 added 1 changesets with 1 changes to 1 files
269 4:539e4b31b6dc
268 4:539e4b31b6dc
270 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
269 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
271 transaction abort!
270 transaction abort!
272 rollback completed
271 rollback completed
273 abort: pretxnchangegroup.forbid1 hook exited with status 1
272 abort: pretxnchangegroup.forbid1 hook exited with status 1
274 [255]
273 [255]
275 $ hg -q tip
274 $ hg -q tip
276 3:07f3376c1e65
275 3:07f3376c1e65
277
276
278 outgoing hooks can see env vars
277 outgoing hooks can see env vars
279
278
280 $ rm .hg/hgrc
279 $ rm .hg/hgrc
281 $ cat > ../a/.hg/hgrc <<EOF
280 $ cat > ../a/.hg/hgrc <<EOF
282 > [hooks]
281 > [hooks]
283 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
282 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
284 > outgoing = python "$TESTDIR/printenv.py" outgoing
283 > outgoing = python "$TESTDIR/printenv.py" outgoing
285 > EOF
284 > EOF
286 $ hg pull ../a
285 $ hg pull ../a
287 pulling from ../a
286 pulling from ../a
288 searching for changes
287 searching for changes
289 preoutgoing hook: HG_SOURCE=pull
288 preoutgoing hook: HG_SOURCE=pull
290 adding changesets
289 adding changesets
291 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
290 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
292 adding manifests
291 adding manifests
293 adding file changes
292 adding file changes
294 added 1 changesets with 1 changes to 1 files
293 added 1 changesets with 1 changes to 1 files
295 adding remote bookmark quux
294 adding remote bookmark quux
296 (run 'hg update' to get a working copy)
295 (run 'hg update' to get a working copy)
297 $ hg rollback
296 $ hg rollback
298 repository tip rolled back to revision 3 (undo pull)
297 repository tip rolled back to revision 3 (undo pull)
299
298
300 preoutgoing hook can prevent outgoing changes
299 preoutgoing hook can prevent outgoing changes
301
300
302 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> ../a/.hg/hgrc
301 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> ../a/.hg/hgrc
303 $ hg pull ../a
302 $ hg pull ../a
304 pulling from ../a
303 pulling from ../a
305 searching for changes
304 searching for changes
306 preoutgoing hook: HG_SOURCE=pull
305 preoutgoing hook: HG_SOURCE=pull
307 preoutgoing.forbid hook: HG_SOURCE=pull
306 preoutgoing.forbid hook: HG_SOURCE=pull
308 abort: preoutgoing.forbid hook exited with status 1
307 abort: preoutgoing.forbid hook exited with status 1
309 [255]
308 [255]
310
309
311 outgoing hooks work for local clones
310 outgoing hooks work for local clones
312
311
313 $ cd ..
312 $ cd ..
314 $ cat > a/.hg/hgrc <<EOF
313 $ cat > a/.hg/hgrc <<EOF
315 > [hooks]
314 > [hooks]
316 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
315 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
317 > outgoing = python "$TESTDIR/printenv.py" outgoing
316 > outgoing = python "$TESTDIR/printenv.py" outgoing
318 > EOF
317 > EOF
319 $ hg clone a c
318 $ hg clone a c
320 preoutgoing hook: HG_SOURCE=clone
319 preoutgoing hook: HG_SOURCE=clone
321 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
320 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
322 updating to branch default
321 updating to branch default
323 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
324 $ rm -rf c
323 $ rm -rf c
325
324
326 preoutgoing hook can prevent outgoing changes for local clones
325 preoutgoing hook can prevent outgoing changes for local clones
327
326
328 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> a/.hg/hgrc
327 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> a/.hg/hgrc
329 $ hg clone a zzz
328 $ hg clone a zzz
330 preoutgoing hook: HG_SOURCE=clone
329 preoutgoing hook: HG_SOURCE=clone
331 preoutgoing.forbid hook: HG_SOURCE=clone
330 preoutgoing.forbid hook: HG_SOURCE=clone
332 abort: preoutgoing.forbid hook exited with status 1
331 abort: preoutgoing.forbid hook exited with status 1
333 [255]
332 [255]
334
333
335 $ cd "$TESTTMP/b"
334 $ cd "$TESTTMP/b"
336
335
337 $ cat > hooktests.py <<EOF
336 $ cat > hooktests.py <<EOF
338 > from mercurial import util
337 > from mercurial import util
339 >
338 >
340 > uncallable = 0
339 > uncallable = 0
341 >
340 >
342 > def printargs(args):
341 > def printargs(args):
343 > args.pop('ui', None)
342 > args.pop('ui', None)
344 > args.pop('repo', None)
343 > args.pop('repo', None)
345 > a = list(args.items())
344 > a = list(args.items())
346 > a.sort()
345 > a.sort()
347 > print 'hook args:'
346 > print 'hook args:'
348 > for k, v in a:
347 > for k, v in a:
349 > print ' ', k, v
348 > print ' ', k, v
350 >
349 >
351 > def passhook(**args):
350 > def passhook(**args):
352 > printargs(args)
351 > printargs(args)
353 >
352 >
354 > def failhook(**args):
353 > def failhook(**args):
355 > printargs(args)
354 > printargs(args)
356 > return True
355 > return True
357 >
356 >
358 > class LocalException(Exception):
357 > class LocalException(Exception):
359 > pass
358 > pass
360 >
359 >
361 > def raisehook(**args):
360 > def raisehook(**args):
362 > raise LocalException('exception from hook')
361 > raise LocalException('exception from hook')
363 >
362 >
364 > def aborthook(**args):
363 > def aborthook(**args):
365 > raise util.Abort('raise abort from hook')
364 > raise util.Abort('raise abort from hook')
366 >
365 >
367 > def brokenhook(**args):
366 > def brokenhook(**args):
368 > return 1 + {}
367 > return 1 + {}
369 >
368 >
370 > def verbosehook(ui, **args):
369 > def verbosehook(ui, **args):
371 > ui.note('verbose output from hook\n')
370 > ui.note('verbose output from hook\n')
372 >
371 >
373 > def printtags(ui, repo, **args):
372 > def printtags(ui, repo, **args):
374 > print repo.tags().keys()
373 > print repo.tags().keys()
375 >
374 >
376 > class container:
375 > class container:
377 > unreachable = 1
376 > unreachable = 1
378 > EOF
377 > EOF
379
378
380 test python hooks
379 test python hooks
381
380
382 #if windows
381 #if windows
383 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
382 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
384 #else
383 #else
385 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
384 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
386 #endif
385 #endif
387 $ export PYTHONPATH
386 $ export PYTHONPATH
388
387
389 $ echo '[hooks]' > ../a/.hg/hgrc
388 $ echo '[hooks]' > ../a/.hg/hgrc
390 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
389 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
391 $ hg pull ../a 2>&1 | grep 'raised an exception'
390 $ hg pull ../a 2>&1 | grep 'raised an exception'
392 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
391 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
393
392
394 $ echo '[hooks]' > ../a/.hg/hgrc
393 $ echo '[hooks]' > ../a/.hg/hgrc
395 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
394 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
396 $ hg pull ../a 2>&1 | grep 'raised an exception'
395 $ hg pull ../a 2>&1 | grep 'raised an exception'
397 error: preoutgoing.raise hook raised an exception: exception from hook
396 error: preoutgoing.raise hook raised an exception: exception from hook
398
397
399 $ echo '[hooks]' > ../a/.hg/hgrc
398 $ echo '[hooks]' > ../a/.hg/hgrc
400 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
399 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
401 $ hg pull ../a
400 $ hg pull ../a
402 pulling from ../a
401 pulling from ../a
403 searching for changes
402 searching for changes
404 error: preoutgoing.abort hook failed: raise abort from hook
403 error: preoutgoing.abort hook failed: raise abort from hook
405 abort: raise abort from hook
404 abort: raise abort from hook
406 [255]
405 [255]
407
406
408 $ echo '[hooks]' > ../a/.hg/hgrc
407 $ echo '[hooks]' > ../a/.hg/hgrc
409 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
408 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
410 $ hg pull ../a
409 $ hg pull ../a
411 pulling from ../a
410 pulling from ../a
412 searching for changes
411 searching for changes
413 hook args:
412 hook args:
414 hooktype preoutgoing
413 hooktype preoutgoing
415 source pull
414 source pull
416 abort: preoutgoing.fail hook failed
415 abort: preoutgoing.fail hook failed
417 [255]
416 [255]
418
417
419 $ echo '[hooks]' > ../a/.hg/hgrc
418 $ echo '[hooks]' > ../a/.hg/hgrc
420 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
419 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
421 $ hg pull ../a
420 $ hg pull ../a
422 pulling from ../a
421 pulling from ../a
423 searching for changes
422 searching for changes
424 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
423 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
425 [255]
424 [255]
426
425
427 $ echo '[hooks]' > ../a/.hg/hgrc
426 $ echo '[hooks]' > ../a/.hg/hgrc
428 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
427 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
429 $ hg pull ../a
428 $ hg pull ../a
430 pulling from ../a
429 pulling from ../a
431 searching for changes
430 searching for changes
432 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
431 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
433 [255]
432 [255]
434
433
435 $ echo '[hooks]' > ../a/.hg/hgrc
434 $ echo '[hooks]' > ../a/.hg/hgrc
436 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
435 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
437 $ hg pull ../a
436 $ hg pull ../a
438 pulling from ../a
437 pulling from ../a
439 searching for changes
438 searching for changes
440 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
439 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
441 [255]
440 [255]
442
441
443 $ echo '[hooks]' > ../a/.hg/hgrc
442 $ echo '[hooks]' > ../a/.hg/hgrc
444 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
443 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
445 $ hg pull ../a
444 $ hg pull ../a
446 pulling from ../a
445 pulling from ../a
447 searching for changes
446 searching for changes
448 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
447 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
449 [255]
448 [255]
450
449
451 $ echo '[hooks]' > ../a/.hg/hgrc
450 $ echo '[hooks]' > ../a/.hg/hgrc
452 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
451 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
453 $ hg pull ../a
452 $ hg pull ../a
454 pulling from ../a
453 pulling from ../a
455 searching for changes
454 searching for changes
456 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
455 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
457 [255]
456 [255]
458
457
459 $ echo '[hooks]' > ../a/.hg/hgrc
458 $ echo '[hooks]' > ../a/.hg/hgrc
460 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
459 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
461 $ hg pull ../a
460 $ hg pull ../a
462 pulling from ../a
461 pulling from ../a
463 searching for changes
462 searching for changes
464 hook args:
463 hook args:
465 hooktype preoutgoing
464 hooktype preoutgoing
466 source pull
465 source pull
467 adding changesets
466 adding changesets
468 adding manifests
467 adding manifests
469 adding file changes
468 adding file changes
470 added 1 changesets with 1 changes to 1 files
469 added 1 changesets with 1 changes to 1 files
471 adding remote bookmark quux
470 adding remote bookmark quux
472 (run 'hg update' to get a working copy)
471 (run 'hg update' to get a working copy)
473
472
474 make sure --traceback works
473 make sure --traceback works
475
474
476 $ echo '[hooks]' > .hg/hgrc
475 $ echo '[hooks]' > .hg/hgrc
477 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
476 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
478
477
479 $ echo aa > a
478 $ echo aa > a
480 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
479 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
481 Traceback (most recent call last):
480 Traceback (most recent call last):
482
481
483 $ cd ..
482 $ cd ..
484 $ hg init c
483 $ hg init c
485 $ cd c
484 $ cd c
486
485
487 $ cat > hookext.py <<EOF
486 $ cat > hookext.py <<EOF
488 > def autohook(**args):
487 > def autohook(**args):
489 > print "Automatically installed hook"
488 > print "Automatically installed hook"
490 >
489 >
491 > def reposetup(ui, repo):
490 > def reposetup(ui, repo):
492 > repo.ui.setconfig("hooks", "commit.auto", autohook)
491 > repo.ui.setconfig("hooks", "commit.auto", autohook)
493 > EOF
492 > EOF
494 $ echo '[extensions]' >> .hg/hgrc
493 $ echo '[extensions]' >> .hg/hgrc
495 $ echo 'hookext = hookext.py' >> .hg/hgrc
494 $ echo 'hookext = hookext.py' >> .hg/hgrc
496
495
497 $ touch foo
496 $ touch foo
498 $ hg add foo
497 $ hg add foo
499 $ hg ci -d '0 0' -m 'add foo'
498 $ hg ci -d '0 0' -m 'add foo'
500 Automatically installed hook
499 Automatically installed hook
501 $ echo >> foo
500 $ echo >> foo
502 $ hg ci --debug -d '0 0' -m 'change foo'
501 $ hg ci --debug -d '0 0' -m 'change foo'
503 foo
502 foo
504 calling hook commit.auto: <function autohook at *> (glob)
503 calling hook commit.auto: <function autohook at *> (glob)
505 Automatically installed hook
504 Automatically installed hook
506 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
505 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
507
506
508 $ hg showconfig hooks
507 $ hg showconfig hooks
509 hooks.commit.auto=<function autohook at *> (glob)
508 hooks.commit.auto=<function autohook at *> (glob)
510
509
511 test python hook configured with python:[file]:[hook] syntax
510 test python hook configured with python:[file]:[hook] syntax
512
511
513 $ cd ..
512 $ cd ..
514 $ mkdir d
513 $ mkdir d
515 $ cd d
514 $ cd d
516 $ hg init repo
515 $ hg init repo
517 $ mkdir hooks
516 $ mkdir hooks
518
517
519 $ cd hooks
518 $ cd hooks
520 $ cat > testhooks.py <<EOF
519 $ cat > testhooks.py <<EOF
521 > def testhook(**args):
520 > def testhook(**args):
522 > print 'hook works'
521 > print 'hook works'
523 > EOF
522 > EOF
524 $ echo '[hooks]' > ../repo/.hg/hgrc
523 $ echo '[hooks]' > ../repo/.hg/hgrc
525 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
524 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
526
525
527 $ cd ../repo
526 $ cd ../repo
528 $ hg commit -d '0 0'
527 $ hg commit -d '0 0'
529 hook works
528 hook works
530 nothing changed
529 nothing changed
531 [1]
530 [1]
532
531
533 $ echo '[hooks]' > .hg/hgrc
532 $ echo '[hooks]' > .hg/hgrc
534 $ echo "update.ne = python:`pwd`/nonexisting.py:testhook" >> .hg/hgrc
533 $ echo "update.ne = python:`pwd`/nonexisting.py:testhook" >> .hg/hgrc
535 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
534 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
536
535
537 $ hg up null
536 $ hg up null
538 loading update.ne hook failed:
537 loading update.ne hook failed:
539 abort: No such file or directory: $TESTTMP/d/repo/nonexisting.py
538 abort: No such file or directory: $TESTTMP/d/repo/nonexisting.py
540 [255]
539 [255]
541
540
542 $ hg id
541 $ hg id
543 loading pre-identify.npmd hook failed:
542 loading pre-identify.npmd hook failed:
544 abort: No module named repo!
543 abort: No module named repo!
545 [255]
544 [255]
546
545
547 $ cd ../../b
546 $ cd ../../b
548
547
549 make sure --traceback works on hook import failure
548 make sure --traceback works on hook import failure
550
549
551 $ cat > importfail.py <<EOF
550 $ cat > importfail.py <<EOF
552 > import somebogusmodule
551 > import somebogusmodule
553 > # dereference something in the module to force demandimport to load it
552 > # dereference something in the module to force demandimport to load it
554 > somebogusmodule.whatever
553 > somebogusmodule.whatever
555 > EOF
554 > EOF
556
555
557 $ echo '[hooks]' > .hg/hgrc
556 $ echo '[hooks]' > .hg/hgrc
558 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
557 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
559
558
560 $ echo a >> a
559 $ echo a >> a
561 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| {4}[a-zA-Z(])'
560 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| {4}[a-zA-Z(])'
562 exception from first failed import attempt:
561 exception from first failed import attempt:
563 Traceback (most recent call last):
562 Traceback (most recent call last):
564 ImportError: No module named somebogusmodule
563 ImportError: No module named somebogusmodule
565 exception from second failed import attempt:
564 exception from second failed import attempt:
566 Traceback (most recent call last):
565 Traceback (most recent call last):
567 ImportError: No module named hgext_importfail
566 ImportError: No module named hgext_importfail
568 Traceback (most recent call last):
567 Traceback (most recent call last):
569 Abort: precommit.importfail hook is invalid (import of "importfail" failed)
568 Abort: precommit.importfail hook is invalid (import of "importfail" failed)
570 abort: precommit.importfail hook is invalid (import of "importfail" failed)
569 abort: precommit.importfail hook is invalid (import of "importfail" failed)
571
570
572 Issue1827: Hooks Update & Commit not completely post operation
571 Issue1827: Hooks Update & Commit not completely post operation
573
572
574 commit and update hooks should run after command completion
573 commit and update hooks should run after command completion
575
574
576 $ echo '[hooks]' > .hg/hgrc
575 $ echo '[hooks]' > .hg/hgrc
577 $ echo 'commit = hg id' >> .hg/hgrc
576 $ echo 'commit = hg id' >> .hg/hgrc
578 $ echo 'update = hg id' >> .hg/hgrc
577 $ echo 'update = hg id' >> .hg/hgrc
579 $ echo bb > a
578 $ echo bb > a
580 $ hg ci -ma
579 $ hg ci -ma
581 223eafe2750c tip
580 223eafe2750c tip
582 $ hg up 0
581 $ hg up 0
583 cb9a9f314b8b
582 cb9a9f314b8b
584 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
583 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
585
584
586 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
585 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
587 that is passed to pre/post hooks
586 that is passed to pre/post hooks
588
587
589 $ echo '[hooks]' > .hg/hgrc
588 $ echo '[hooks]' > .hg/hgrc
590 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
589 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
591 $ hg id
590 $ hg id
592 cb9a9f314b8b
591 cb9a9f314b8b
593 $ hg id --verbose
592 $ hg id --verbose
594 calling hook pre-identify: hooktests.verbosehook
593 calling hook pre-identify: hooktests.verbosehook
595 verbose output from hook
594 verbose output from hook
596 cb9a9f314b8b
595 cb9a9f314b8b
597
596
598 Ensure hooks can be prioritized
597 Ensure hooks can be prioritized
599
598
600 $ echo '[hooks]' > .hg/hgrc
599 $ echo '[hooks]' > .hg/hgrc
601 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
600 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
602 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
601 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
603 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
602 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
604 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
603 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
605 $ hg id --verbose
604 $ hg id --verbose
606 calling hook pre-identify.b: hooktests.verbosehook
605 calling hook pre-identify.b: hooktests.verbosehook
607 verbose output from hook
606 verbose output from hook
608 calling hook pre-identify.a: hooktests.verbosehook
607 calling hook pre-identify.a: hooktests.verbosehook
609 verbose output from hook
608 verbose output from hook
610 calling hook pre-identify.c: hooktests.verbosehook
609 calling hook pre-identify.c: hooktests.verbosehook
611 verbose output from hook
610 verbose output from hook
612 cb9a9f314b8b
611 cb9a9f314b8b
613
612
614 new tags must be visible in pretxncommit (issue3210)
613 new tags must be visible in pretxncommit (issue3210)
615
614
616 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
615 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
617 $ hg tag -f foo
616 $ hg tag -f foo
618 ['a', 'foo', 'tip']
617 ['a', 'foo', 'tip']
619
618
620 new commits must be visible in pretxnchangegroup (issue3428)
619 new commits must be visible in pretxnchangegroup (issue3428)
621
620
622 $ cd ..
621 $ cd ..
623 $ hg init to
622 $ hg init to
624 $ echo '[hooks]' >> to/.hg/hgrc
623 $ echo '[hooks]' >> to/.hg/hgrc
625 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
624 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
626 $ echo a >> to/a
625 $ echo a >> to/a
627 $ hg --cwd to ci -Ama
626 $ hg --cwd to ci -Ama
628 adding a
627 adding a
629 $ hg clone to from
628 $ hg clone to from
630 updating to branch default
629 updating to branch default
631 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
630 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
632 $ echo aa >> from/a
631 $ echo aa >> from/a
633 $ hg --cwd from ci -mb
632 $ hg --cwd from ci -mb
634 $ hg --cwd from push
633 $ hg --cwd from push
635 pushing to $TESTTMP/to (glob)
634 pushing to $TESTTMP/to (glob)
636 searching for changes
635 searching for changes
637 adding changesets
636 adding changesets
638 adding manifests
637 adding manifests
639 adding file changes
638 adding file changes
640 added 1 changesets with 1 changes to 1 files
639 added 1 changesets with 1 changes to 1 files
641 changeset: 1:9836a07b9b9d
640 changeset: 1:9836a07b9b9d
642 tag: tip
641 tag: tip
643 user: test
642 user: test
644 date: Thu Jan 01 00:00:00 1970 +0000
643 date: Thu Jan 01 00:00:00 1970 +0000
645 summary: b
644 summary: b
646
645
@@ -1,453 +1,473
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [extensions]
2 > [extensions]
3 > graphlog=
3 > graphlog=
4 > [phases]
4 > [phases]
5 > # public changeset are not obsolete
5 > # public changeset are not obsolete
6 > publish=false
6 > publish=false
7 > EOF
7 > EOF
8 $ mkcommit() {
8 $ mkcommit() {
9 > echo "$1" > "$1"
9 > echo "$1" > "$1"
10 > hg add "$1"
10 > hg add "$1"
11 > hg ci -m "add $1"
11 > hg ci -m "add $1"
12 > }
12 > }
13 $ getid() {
13 $ getid() {
14 > hg id --debug -ir "desc('$1')"
14 > hg id --debug -ir "desc('$1')"
15 > }
15 > }
16
16
17 $ cat > debugkeys.py <<EOF
18 > def reposetup(ui, repo):
19 > class debugkeysrepo(repo.__class__):
20 > def listkeys(self, namespace):
21 > ui.write('listkeys %s\n' % (namespace,))
22 > return super(debugkeysrepo, self).listkeys(namespace)
23 >
24 > if repo.local():
25 > repo.__class__ = debugkeysrepo
26 > EOF
17
27
18 $ hg init tmpa
28 $ hg init tmpa
19 $ cd tmpa
29 $ cd tmpa
20
30
21 Killing a single changeset without replacement
31 Killing a single changeset without replacement
22
32
23 $ mkcommit kill_me
33 $ mkcommit kill_me
24 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
25 $ hg debugobsolete
35 $ hg debugobsolete
26 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
36 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
27 $ cd ..
37 $ cd ..
28
38
29 Killing a single changeset with replacement
39 Killing a single changeset with replacement
30
40
31 $ hg init tmpb
41 $ hg init tmpb
32 $ cd tmpb
42 $ cd tmpb
33 $ mkcommit a
43 $ mkcommit a
34 $ mkcommit b
44 $ mkcommit b
35 $ mkcommit original_c
45 $ mkcommit original_c
36 $ hg up "desc('b')"
46 $ hg up "desc('b')"
37 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
38 $ mkcommit new_c
48 $ mkcommit new_c
39 created new head
49 created new head
40 $ hg debugobsolete `getid original_c` `getid new_c` -d '56 12'
50 $ hg debugobsolete `getid original_c` `getid new_c` -d '56 12'
41 $ hg debugobsolete
51 $ hg debugobsolete
42 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
52 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
43
53
44 do it again (it read the obsstore before adding new changeset)
54 do it again (it read the obsstore before adding new changeset)
45
55
46 $ hg up '.^'
56 $ hg up '.^'
47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
57 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 $ mkcommit new_2_c
58 $ mkcommit new_2_c
49 created new head
59 created new head
50 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
60 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
51 $ hg debugobsolete
61 $ hg debugobsolete
52 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
62 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
53 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
63 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
54
64
55 Register two markers with a missing node
65 Register two markers with a missing node
56
66
57 $ hg up '.^'
67 $ hg up '.^'
58 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
68 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
59 $ mkcommit new_3_c
69 $ mkcommit new_3_c
60 created new head
70 created new head
61 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
71 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
62 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
72 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
63 $ hg debugobsolete
73 $ hg debugobsolete
64 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
74 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
65 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
75 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
66 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
76 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
67 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
77 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
68
78
69 Check that graphlog detect that a changeset is obsolete:
79 Check that graphlog detect that a changeset is obsolete:
70
80
71 $ hg glog
81 $ hg glog
72 @ changeset: 5:5601fb93a350
82 @ changeset: 5:5601fb93a350
73 | tag: tip
83 | tag: tip
74 | parent: 1:7c3bad9141dc
84 | parent: 1:7c3bad9141dc
75 | user: test
85 | user: test
76 | date: Thu Jan 01 00:00:00 1970 +0000
86 | date: Thu Jan 01 00:00:00 1970 +0000
77 | summary: add new_3_c
87 | summary: add new_3_c
78 |
88 |
79 o changeset: 1:7c3bad9141dc
89 o changeset: 1:7c3bad9141dc
80 | user: test
90 | user: test
81 | date: Thu Jan 01 00:00:00 1970 +0000
91 | date: Thu Jan 01 00:00:00 1970 +0000
82 | summary: add b
92 | summary: add b
83 |
93 |
84 o changeset: 0:1f0dee641bb7
94 o changeset: 0:1f0dee641bb7
85 user: test
95 user: test
86 date: Thu Jan 01 00:00:00 1970 +0000
96 date: Thu Jan 01 00:00:00 1970 +0000
87 summary: add a
97 summary: add a
88
98
89
99
90 Check that public changeset are not accounted as obsolete:
100 Check that public changeset are not accounted as obsolete:
91
101
92 $ hg phase --public 2
102 $ hg phase --public 2
93 $ hg --config 'extensions.graphlog=' glog
103 $ hg --config 'extensions.graphlog=' glog
94 @ changeset: 5:5601fb93a350
104 @ changeset: 5:5601fb93a350
95 | tag: tip
105 | tag: tip
96 | parent: 1:7c3bad9141dc
106 | parent: 1:7c3bad9141dc
97 | user: test
107 | user: test
98 | date: Thu Jan 01 00:00:00 1970 +0000
108 | date: Thu Jan 01 00:00:00 1970 +0000
99 | summary: add new_3_c
109 | summary: add new_3_c
100 |
110 |
101 | o changeset: 2:245bde4270cd
111 | o changeset: 2:245bde4270cd
102 |/ user: test
112 |/ user: test
103 | date: Thu Jan 01 00:00:00 1970 +0000
113 | date: Thu Jan 01 00:00:00 1970 +0000
104 | summary: add original_c
114 | summary: add original_c
105 |
115 |
106 o changeset: 1:7c3bad9141dc
116 o changeset: 1:7c3bad9141dc
107 | user: test
117 | user: test
108 | date: Thu Jan 01 00:00:00 1970 +0000
118 | date: Thu Jan 01 00:00:00 1970 +0000
109 | summary: add b
119 | summary: add b
110 |
120 |
111 o changeset: 0:1f0dee641bb7
121 o changeset: 0:1f0dee641bb7
112 user: test
122 user: test
113 date: Thu Jan 01 00:00:00 1970 +0000
123 date: Thu Jan 01 00:00:00 1970 +0000
114 summary: add a
124 summary: add a
115
125
116
126
117 $ cd ..
127 $ cd ..
118
128
119 Exchange Test
129 Exchange Test
120 ============================
130 ============================
121
131
122 Destination repo does not have any data
132 Destination repo does not have any data
123 ---------------------------------------
133 ---------------------------------------
124
134
125 Try to pull markers
135 Try to pull markers
126 (extinct changeset are excluded but marker are pushed)
136 (extinct changeset are excluded but marker are pushed)
127
137
128 $ hg init tmpc
138 $ hg init tmpc
129 $ cd tmpc
139 $ cd tmpc
130 $ hg pull ../tmpb
140 $ hg pull ../tmpb
131 pulling from ../tmpb
141 pulling from ../tmpb
132 requesting all changes
142 requesting all changes
133 adding changesets
143 adding changesets
134 adding manifests
144 adding manifests
135 adding file changes
145 adding file changes
136 added 4 changesets with 4 changes to 4 files (+1 heads)
146 added 4 changesets with 4 changes to 4 files (+1 heads)
137 (run 'hg heads' to see heads, 'hg merge' to merge)
147 (run 'hg heads' to see heads, 'hg merge' to merge)
138 $ hg debugobsolete
148 $ hg debugobsolete
139 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
149 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
140 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
150 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
141 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
151 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
142 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
152 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
143
153
144 Rollback//Transaction support
154 Rollback//Transaction support
145
155
146 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
156 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
147 $ hg debugobsolete
157 $ hg debugobsolete
148 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
158 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
149 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
159 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
150 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
160 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
151 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
161 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
152 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 {'date': '1340 0', 'user': 'test'}
162 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 {'date': '1340 0', 'user': 'test'}
153 $ hg rollback -n
163 $ hg rollback -n
154 repository tip rolled back to revision 3 (undo debugobsolete)
164 repository tip rolled back to revision 3 (undo debugobsolete)
155 $ hg rollback
165 $ hg rollback
156 repository tip rolled back to revision 3 (undo debugobsolete)
166 repository tip rolled back to revision 3 (undo debugobsolete)
157 $ hg debugobsolete
167 $ hg debugobsolete
158 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
168 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
159 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
169 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
160 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
170 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
161 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
171 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
162
172
163 $ cd ..
173 $ cd ..
164
174
165 Try to pull markers
175 Try to pull markers
166
176
167 $ hg init tmpd
177 $ hg init tmpd
168 $ hg -R tmpb push tmpd
178 $ hg -R tmpb push tmpd
169 pushing to tmpd
179 pushing to tmpd
170 searching for changes
180 searching for changes
171 adding changesets
181 adding changesets
172 adding manifests
182 adding manifests
173 adding file changes
183 adding file changes
174 added 4 changesets with 4 changes to 4 files (+1 heads)
184 added 4 changesets with 4 changes to 4 files (+1 heads)
175 $ hg -R tmpd debugobsolete
185 $ hg -R tmpd debugobsolete
176 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
186 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
177 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
187 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
178 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
188 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
179 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
189 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
180
190
191 Check obsolete keys are exchanged only if source has an obsolete store
192
193 $ hg init empty
194 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
195 pushing to tmpd
196 no changes found
197 listkeys phases
198 listkeys bookmarks
199 [1]
200
181 clone support
201 clone support
182 (markers are copied and extinct changesets are included to allow hardlinks)
202 (markers are copied and extinct changesets are included to allow hardlinks)
183
203
184 $ hg clone tmpb clone-dest
204 $ hg clone tmpb clone-dest
185 updating to branch default
205 updating to branch default
186 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
206 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
187 $ hg -R clone-dest log -G --hidden
207 $ hg -R clone-dest log -G --hidden
188 @ changeset: 5:5601fb93a350
208 @ changeset: 5:5601fb93a350
189 | tag: tip
209 | tag: tip
190 | parent: 1:7c3bad9141dc
210 | parent: 1:7c3bad9141dc
191 | user: test
211 | user: test
192 | date: Thu Jan 01 00:00:00 1970 +0000
212 | date: Thu Jan 01 00:00:00 1970 +0000
193 | summary: add new_3_c
213 | summary: add new_3_c
194 |
214 |
195 | x changeset: 4:ca819180edb9
215 | x changeset: 4:ca819180edb9
196 |/ parent: 1:7c3bad9141dc
216 |/ parent: 1:7c3bad9141dc
197 | user: test
217 | user: test
198 | date: Thu Jan 01 00:00:00 1970 +0000
218 | date: Thu Jan 01 00:00:00 1970 +0000
199 | summary: add new_2_c
219 | summary: add new_2_c
200 |
220 |
201 | x changeset: 3:cdbce2fbb163
221 | x changeset: 3:cdbce2fbb163
202 |/ parent: 1:7c3bad9141dc
222 |/ parent: 1:7c3bad9141dc
203 | user: test
223 | user: test
204 | date: Thu Jan 01 00:00:00 1970 +0000
224 | date: Thu Jan 01 00:00:00 1970 +0000
205 | summary: add new_c
225 | summary: add new_c
206 |
226 |
207 | o changeset: 2:245bde4270cd
227 | o changeset: 2:245bde4270cd
208 |/ user: test
228 |/ user: test
209 | date: Thu Jan 01 00:00:00 1970 +0000
229 | date: Thu Jan 01 00:00:00 1970 +0000
210 | summary: add original_c
230 | summary: add original_c
211 |
231 |
212 o changeset: 1:7c3bad9141dc
232 o changeset: 1:7c3bad9141dc
213 | user: test
233 | user: test
214 | date: Thu Jan 01 00:00:00 1970 +0000
234 | date: Thu Jan 01 00:00:00 1970 +0000
215 | summary: add b
235 | summary: add b
216 |
236 |
217 o changeset: 0:1f0dee641bb7
237 o changeset: 0:1f0dee641bb7
218 user: test
238 user: test
219 date: Thu Jan 01 00:00:00 1970 +0000
239 date: Thu Jan 01 00:00:00 1970 +0000
220 summary: add a
240 summary: add a
221
241
222 $ hg -R clone-dest debugobsolete
242 $ hg -R clone-dest debugobsolete
223 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
243 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
224 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
244 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
225 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
245 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
226 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
246 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
227
247
228
248
229 Destination repo have existing data
249 Destination repo have existing data
230 ---------------------------------------
250 ---------------------------------------
231
251
232 On pull
252 On pull
233
253
234 $ hg init tmpe
254 $ hg init tmpe
235 $ cd tmpe
255 $ cd tmpe
236 $ hg debugobsolete -d '1339 0' 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339
256 $ hg debugobsolete -d '1339 0' 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339
237 $ hg pull ../tmpb
257 $ hg pull ../tmpb
238 pulling from ../tmpb
258 pulling from ../tmpb
239 requesting all changes
259 requesting all changes
240 adding changesets
260 adding changesets
241 adding manifests
261 adding manifests
242 adding file changes
262 adding file changes
243 added 4 changesets with 4 changes to 4 files (+1 heads)
263 added 4 changesets with 4 changes to 4 files (+1 heads)
244 (run 'hg heads' to see heads, 'hg merge' to merge)
264 (run 'hg heads' to see heads, 'hg merge' to merge)
245 $ hg debugobsolete
265 $ hg debugobsolete
246 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
266 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
247 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
267 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
248 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
268 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
249 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
269 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
250 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
270 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
251
271
252
272
253 On push
273 On push
254
274
255 $ hg push ../tmpc
275 $ hg push ../tmpc
256 pushing to ../tmpc
276 pushing to ../tmpc
257 searching for changes
277 searching for changes
258 no changes found
278 no changes found
259 [1]
279 [1]
260 $ hg -R ../tmpc debugobsolete
280 $ hg -R ../tmpc debugobsolete
261 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
281 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
262 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
282 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
263 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
283 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
264 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
284 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
265 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
285 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
266
286
267 detect outgoing obsolete and unstable
287 detect outgoing obsolete and unstable
268 ---------------------------------------
288 ---------------------------------------
269
289
270
290
271 $ hg glog
291 $ hg glog
272 o changeset: 3:5601fb93a350
292 o changeset: 3:5601fb93a350
273 | tag: tip
293 | tag: tip
274 | parent: 1:7c3bad9141dc
294 | parent: 1:7c3bad9141dc
275 | user: test
295 | user: test
276 | date: Thu Jan 01 00:00:00 1970 +0000
296 | date: Thu Jan 01 00:00:00 1970 +0000
277 | summary: add new_3_c
297 | summary: add new_3_c
278 |
298 |
279 | o changeset: 2:245bde4270cd
299 | o changeset: 2:245bde4270cd
280 |/ user: test
300 |/ user: test
281 | date: Thu Jan 01 00:00:00 1970 +0000
301 | date: Thu Jan 01 00:00:00 1970 +0000
282 | summary: add original_c
302 | summary: add original_c
283 |
303 |
284 o changeset: 1:7c3bad9141dc
304 o changeset: 1:7c3bad9141dc
285 | user: test
305 | user: test
286 | date: Thu Jan 01 00:00:00 1970 +0000
306 | date: Thu Jan 01 00:00:00 1970 +0000
287 | summary: add b
307 | summary: add b
288 |
308 |
289 o changeset: 0:1f0dee641bb7
309 o changeset: 0:1f0dee641bb7
290 user: test
310 user: test
291 date: Thu Jan 01 00:00:00 1970 +0000
311 date: Thu Jan 01 00:00:00 1970 +0000
292 summary: add a
312 summary: add a
293
313
294 $ hg up 'desc("new_3_c")'
314 $ hg up 'desc("new_3_c")'
295 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
315 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
296 $ mkcommit original_d
316 $ mkcommit original_d
297 $ mkcommit original_e
317 $ mkcommit original_e
298 $ hg debugobsolete `getid original_d` -d '0 0'
318 $ hg debugobsolete `getid original_d` -d '0 0'
299 $ hg log -r 'obsolete()'
319 $ hg log -r 'obsolete()'
300 changeset: 4:7c694bff0650
320 changeset: 4:7c694bff0650
301 user: test
321 user: test
302 date: Thu Jan 01 00:00:00 1970 +0000
322 date: Thu Jan 01 00:00:00 1970 +0000
303 summary: add original_d
323 summary: add original_d
304
324
305 $ hg glog -r '::unstable()'
325 $ hg glog -r '::unstable()'
306 @ changeset: 5:6e572121998e
326 @ changeset: 5:6e572121998e
307 | tag: tip
327 | tag: tip
308 | user: test
328 | user: test
309 | date: Thu Jan 01 00:00:00 1970 +0000
329 | date: Thu Jan 01 00:00:00 1970 +0000
310 | summary: add original_e
330 | summary: add original_e
311 |
331 |
312 x changeset: 4:7c694bff0650
332 x changeset: 4:7c694bff0650
313 | user: test
333 | user: test
314 | date: Thu Jan 01 00:00:00 1970 +0000
334 | date: Thu Jan 01 00:00:00 1970 +0000
315 | summary: add original_d
335 | summary: add original_d
316 |
336 |
317 o changeset: 3:5601fb93a350
337 o changeset: 3:5601fb93a350
318 | parent: 1:7c3bad9141dc
338 | parent: 1:7c3bad9141dc
319 | user: test
339 | user: test
320 | date: Thu Jan 01 00:00:00 1970 +0000
340 | date: Thu Jan 01 00:00:00 1970 +0000
321 | summary: add new_3_c
341 | summary: add new_3_c
322 |
342 |
323 o changeset: 1:7c3bad9141dc
343 o changeset: 1:7c3bad9141dc
324 | user: test
344 | user: test
325 | date: Thu Jan 01 00:00:00 1970 +0000
345 | date: Thu Jan 01 00:00:00 1970 +0000
326 | summary: add b
346 | summary: add b
327 |
347 |
328 o changeset: 0:1f0dee641bb7
348 o changeset: 0:1f0dee641bb7
329 user: test
349 user: test
330 date: Thu Jan 01 00:00:00 1970 +0000
350 date: Thu Jan 01 00:00:00 1970 +0000
331 summary: add a
351 summary: add a
332
352
333
353
334 refuse to push obsolete changeset
354 refuse to push obsolete changeset
335
355
336 $ hg push ../tmpc/ -r 'desc("original_d")'
356 $ hg push ../tmpc/ -r 'desc("original_d")'
337 pushing to ../tmpc/
357 pushing to ../tmpc/
338 searching for changes
358 searching for changes
339 abort: push includes an obsolete changeset: 7c694bff0650!
359 abort: push includes an obsolete changeset: 7c694bff0650!
340 [255]
360 [255]
341
361
342 refuse to push unstable changeset
362 refuse to push unstable changeset
343
363
344 $ hg push ../tmpc/
364 $ hg push ../tmpc/
345 pushing to ../tmpc/
365 pushing to ../tmpc/
346 searching for changes
366 searching for changes
347 abort: push includes an unstable changeset: 6e572121998e!
367 abort: push includes an unstable changeset: 6e572121998e!
348 [255]
368 [255]
349
369
350 Test that extinct changeset are properly detected
370 Test that extinct changeset are properly detected
351
371
352 $ hg log -r 'extinct()'
372 $ hg log -r 'extinct()'
353
373
354 Don't try to push extinct changeset
374 Don't try to push extinct changeset
355
375
356 $ hg init ../tmpf
376 $ hg init ../tmpf
357 $ hg out ../tmpf
377 $ hg out ../tmpf
358 comparing with ../tmpf
378 comparing with ../tmpf
359 searching for changes
379 searching for changes
360 changeset: 0:1f0dee641bb7
380 changeset: 0:1f0dee641bb7
361 user: test
381 user: test
362 date: Thu Jan 01 00:00:00 1970 +0000
382 date: Thu Jan 01 00:00:00 1970 +0000
363 summary: add a
383 summary: add a
364
384
365 changeset: 1:7c3bad9141dc
385 changeset: 1:7c3bad9141dc
366 user: test
386 user: test
367 date: Thu Jan 01 00:00:00 1970 +0000
387 date: Thu Jan 01 00:00:00 1970 +0000
368 summary: add b
388 summary: add b
369
389
370 changeset: 2:245bde4270cd
390 changeset: 2:245bde4270cd
371 user: test
391 user: test
372 date: Thu Jan 01 00:00:00 1970 +0000
392 date: Thu Jan 01 00:00:00 1970 +0000
373 summary: add original_c
393 summary: add original_c
374
394
375 changeset: 3:5601fb93a350
395 changeset: 3:5601fb93a350
376 parent: 1:7c3bad9141dc
396 parent: 1:7c3bad9141dc
377 user: test
397 user: test
378 date: Thu Jan 01 00:00:00 1970 +0000
398 date: Thu Jan 01 00:00:00 1970 +0000
379 summary: add new_3_c
399 summary: add new_3_c
380
400
381 changeset: 4:7c694bff0650
401 changeset: 4:7c694bff0650
382 user: test
402 user: test
383 date: Thu Jan 01 00:00:00 1970 +0000
403 date: Thu Jan 01 00:00:00 1970 +0000
384 summary: add original_d
404 summary: add original_d
385
405
386 changeset: 5:6e572121998e
406 changeset: 5:6e572121998e
387 tag: tip
407 tag: tip
388 user: test
408 user: test
389 date: Thu Jan 01 00:00:00 1970 +0000
409 date: Thu Jan 01 00:00:00 1970 +0000
390 summary: add original_e
410 summary: add original_e
391
411
392 $ hg push ../tmpf -f # -f because be push unstable too
412 $ hg push ../tmpf -f # -f because be push unstable too
393 pushing to ../tmpf
413 pushing to ../tmpf
394 searching for changes
414 searching for changes
395 adding changesets
415 adding changesets
396 adding manifests
416 adding manifests
397 adding file changes
417 adding file changes
398 added 6 changesets with 6 changes to 6 files (+1 heads)
418 added 6 changesets with 6 changes to 6 files (+1 heads)
399
419
400 no warning displayed
420 no warning displayed
401
421
402 $ hg push ../tmpf
422 $ hg push ../tmpf
403 pushing to ../tmpf
423 pushing to ../tmpf
404 searching for changes
424 searching for changes
405 no changes found
425 no changes found
406 [1]
426 [1]
407
427
408 Do not warn about new head when the new head is a successors of a remote one
428 Do not warn about new head when the new head is a successors of a remote one
409
429
410 $ hg glog
430 $ hg glog
411 @ changeset: 5:6e572121998e
431 @ changeset: 5:6e572121998e
412 | tag: tip
432 | tag: tip
413 | user: test
433 | user: test
414 | date: Thu Jan 01 00:00:00 1970 +0000
434 | date: Thu Jan 01 00:00:00 1970 +0000
415 | summary: add original_e
435 | summary: add original_e
416 |
436 |
417 x changeset: 4:7c694bff0650
437 x changeset: 4:7c694bff0650
418 | user: test
438 | user: test
419 | date: Thu Jan 01 00:00:00 1970 +0000
439 | date: Thu Jan 01 00:00:00 1970 +0000
420 | summary: add original_d
440 | summary: add original_d
421 |
441 |
422 o changeset: 3:5601fb93a350
442 o changeset: 3:5601fb93a350
423 | parent: 1:7c3bad9141dc
443 | parent: 1:7c3bad9141dc
424 | user: test
444 | user: test
425 | date: Thu Jan 01 00:00:00 1970 +0000
445 | date: Thu Jan 01 00:00:00 1970 +0000
426 | summary: add new_3_c
446 | summary: add new_3_c
427 |
447 |
428 | o changeset: 2:245bde4270cd
448 | o changeset: 2:245bde4270cd
429 |/ user: test
449 |/ user: test
430 | date: Thu Jan 01 00:00:00 1970 +0000
450 | date: Thu Jan 01 00:00:00 1970 +0000
431 | summary: add original_c
451 | summary: add original_c
432 |
452 |
433 o changeset: 1:7c3bad9141dc
453 o changeset: 1:7c3bad9141dc
434 | user: test
454 | user: test
435 | date: Thu Jan 01 00:00:00 1970 +0000
455 | date: Thu Jan 01 00:00:00 1970 +0000
436 | summary: add b
456 | summary: add b
437 |
457 |
438 o changeset: 0:1f0dee641bb7
458 o changeset: 0:1f0dee641bb7
439 user: test
459 user: test
440 date: Thu Jan 01 00:00:00 1970 +0000
460 date: Thu Jan 01 00:00:00 1970 +0000
441 summary: add a
461 summary: add a
442
462
443 $ hg up -q 'desc(new_3_c)'
463 $ hg up -q 'desc(new_3_c)'
444 $ mkcommit obsolete_e
464 $ mkcommit obsolete_e
445 created new head
465 created new head
446 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
466 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
447 $ hg push ../tmpf
467 $ hg push ../tmpf
448 pushing to ../tmpf
468 pushing to ../tmpf
449 searching for changes
469 searching for changes
450 adding changesets
470 adding changesets
451 adding manifests
471 adding manifests
452 adding file changes
472 adding file changes
453 added 1 changesets with 1 changes to 1 files (+1 heads)
473 added 1 changesets with 1 changes to 1 files (+1 heads)
General Comments 0
You need to be logged in to leave comments. Login now