##// END OF EJS Templates
push: prevent moving phases outside of the pushed subset
Pierre-Yves David -
r15956:5653f2d1 stable
parent child Browse files
Show More
@@ -1,2281 +1,2313 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error, revset
13 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 filecache = scmutil.filecache
21
21
22 class localrepository(repo.repository):
22 class localrepository(repo.repository):
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 'known', 'getbundle'))
24 'known', 'getbundle'))
25 supportedformats = set(('revlogv1', 'generaldelta'))
25 supportedformats = set(('revlogv1', 'generaldelta'))
26 supported = supportedformats | set(('store', 'fncache', 'shared',
26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 'dotencode'))
27 'dotencode'))
28
28
29 def __init__(self, baseui, path=None, create=False):
29 def __init__(self, baseui, path=None, create=False):
30 repo.repository.__init__(self)
30 repo.repository.__init__(self)
31 self.root = os.path.realpath(util.expandpath(path))
31 self.root = os.path.realpath(util.expandpath(path))
32 self.path = os.path.join(self.root, ".hg")
32 self.path = os.path.join(self.root, ".hg")
33 self.origroot = path
33 self.origroot = path
34 self.auditor = scmutil.pathauditor(self.root, self._checknested)
34 self.auditor = scmutil.pathauditor(self.root, self._checknested)
35 self.opener = scmutil.opener(self.path)
35 self.opener = scmutil.opener(self.path)
36 self.wopener = scmutil.opener(self.root)
36 self.wopener = scmutil.opener(self.root)
37 self.baseui = baseui
37 self.baseui = baseui
38 self.ui = baseui.copy()
38 self.ui = baseui.copy()
39 self._dirtyphases = False
39 self._dirtyphases = False
40 # A list of callback to shape the phase if no data were found.
40 # A list of callback to shape the phase if no data were found.
41 # Callback are in the form: func(repo, roots) --> processed root.
41 # Callback are in the form: func(repo, roots) --> processed root.
42 # This list it to be filled by extension during repo setup
42 # This list it to be filled by extension during repo setup
43 self._phasedefaults = []
43 self._phasedefaults = []
44
44
45 try:
45 try:
46 self.ui.readconfig(self.join("hgrc"), self.root)
46 self.ui.readconfig(self.join("hgrc"), self.root)
47 extensions.loadall(self.ui)
47 extensions.loadall(self.ui)
48 except IOError:
48 except IOError:
49 pass
49 pass
50
50
51 if not os.path.isdir(self.path):
51 if not os.path.isdir(self.path):
52 if create:
52 if create:
53 if not os.path.exists(path):
53 if not os.path.exists(path):
54 util.makedirs(path)
54 util.makedirs(path)
55 util.makedir(self.path, notindexed=True)
55 util.makedir(self.path, notindexed=True)
56 requirements = ["revlogv1"]
56 requirements = ["revlogv1"]
57 if self.ui.configbool('format', 'usestore', True):
57 if self.ui.configbool('format', 'usestore', True):
58 os.mkdir(os.path.join(self.path, "store"))
58 os.mkdir(os.path.join(self.path, "store"))
59 requirements.append("store")
59 requirements.append("store")
60 if self.ui.configbool('format', 'usefncache', True):
60 if self.ui.configbool('format', 'usefncache', True):
61 requirements.append("fncache")
61 requirements.append("fncache")
62 if self.ui.configbool('format', 'dotencode', True):
62 if self.ui.configbool('format', 'dotencode', True):
63 requirements.append('dotencode')
63 requirements.append('dotencode')
64 # create an invalid changelog
64 # create an invalid changelog
65 self.opener.append(
65 self.opener.append(
66 "00changelog.i",
66 "00changelog.i",
67 '\0\0\0\2' # represents revlogv2
67 '\0\0\0\2' # represents revlogv2
68 ' dummy changelog to prevent using the old repo layout'
68 ' dummy changelog to prevent using the old repo layout'
69 )
69 )
70 if self.ui.configbool('format', 'generaldelta', False):
70 if self.ui.configbool('format', 'generaldelta', False):
71 requirements.append("generaldelta")
71 requirements.append("generaldelta")
72 requirements = set(requirements)
72 requirements = set(requirements)
73 else:
73 else:
74 raise error.RepoError(_("repository %s not found") % path)
74 raise error.RepoError(_("repository %s not found") % path)
75 elif create:
75 elif create:
76 raise error.RepoError(_("repository %s already exists") % path)
76 raise error.RepoError(_("repository %s already exists") % path)
77 else:
77 else:
78 try:
78 try:
79 requirements = scmutil.readrequires(self.opener, self.supported)
79 requirements = scmutil.readrequires(self.opener, self.supported)
80 except IOError, inst:
80 except IOError, inst:
81 if inst.errno != errno.ENOENT:
81 if inst.errno != errno.ENOENT:
82 raise
82 raise
83 requirements = set()
83 requirements = set()
84
84
85 self.sharedpath = self.path
85 self.sharedpath = self.path
86 try:
86 try:
87 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
87 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
88 if not os.path.exists(s):
88 if not os.path.exists(s):
89 raise error.RepoError(
89 raise error.RepoError(
90 _('.hg/sharedpath points to nonexistent directory %s') % s)
90 _('.hg/sharedpath points to nonexistent directory %s') % s)
91 self.sharedpath = s
91 self.sharedpath = s
92 except IOError, inst:
92 except IOError, inst:
93 if inst.errno != errno.ENOENT:
93 if inst.errno != errno.ENOENT:
94 raise
94 raise
95
95
96 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
96 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
97 self.spath = self.store.path
97 self.spath = self.store.path
98 self.sopener = self.store.opener
98 self.sopener = self.store.opener
99 self.sjoin = self.store.join
99 self.sjoin = self.store.join
100 self.opener.createmode = self.store.createmode
100 self.opener.createmode = self.store.createmode
101 self._applyrequirements(requirements)
101 self._applyrequirements(requirements)
102 if create:
102 if create:
103 self._writerequirements()
103 self._writerequirements()
104
104
105
105
106 self._branchcache = None
106 self._branchcache = None
107 self._branchcachetip = None
107 self._branchcachetip = None
108 self.filterpats = {}
108 self.filterpats = {}
109 self._datafilters = {}
109 self._datafilters = {}
110 self._transref = self._lockref = self._wlockref = None
110 self._transref = self._lockref = self._wlockref = None
111
111
112 # A cache for various files under .hg/ that tracks file changes,
112 # A cache for various files under .hg/ that tracks file changes,
113 # (used by the filecache decorator)
113 # (used by the filecache decorator)
114 #
114 #
115 # Maps a property name to its util.filecacheentry
115 # Maps a property name to its util.filecacheentry
116 self._filecache = {}
116 self._filecache = {}
117
117
118 def _applyrequirements(self, requirements):
118 def _applyrequirements(self, requirements):
119 self.requirements = requirements
119 self.requirements = requirements
120 openerreqs = set(('revlogv1', 'generaldelta'))
120 openerreqs = set(('revlogv1', 'generaldelta'))
121 self.sopener.options = dict((r, 1) for r in requirements
121 self.sopener.options = dict((r, 1) for r in requirements
122 if r in openerreqs)
122 if r in openerreqs)
123
123
124 def _writerequirements(self):
124 def _writerequirements(self):
125 reqfile = self.opener("requires", "w")
125 reqfile = self.opener("requires", "w")
126 for r in self.requirements:
126 for r in self.requirements:
127 reqfile.write("%s\n" % r)
127 reqfile.write("%s\n" % r)
128 reqfile.close()
128 reqfile.close()
129
129
130 def _checknested(self, path):
130 def _checknested(self, path):
131 """Determine if path is a legal nested repository."""
131 """Determine if path is a legal nested repository."""
132 if not path.startswith(self.root):
132 if not path.startswith(self.root):
133 return False
133 return False
134 subpath = path[len(self.root) + 1:]
134 subpath = path[len(self.root) + 1:]
135 normsubpath = util.pconvert(subpath)
135 normsubpath = util.pconvert(subpath)
136
136
137 # XXX: Checking against the current working copy is wrong in
137 # XXX: Checking against the current working copy is wrong in
138 # the sense that it can reject things like
138 # the sense that it can reject things like
139 #
139 #
140 # $ hg cat -r 10 sub/x.txt
140 # $ hg cat -r 10 sub/x.txt
141 #
141 #
142 # if sub/ is no longer a subrepository in the working copy
142 # if sub/ is no longer a subrepository in the working copy
143 # parent revision.
143 # parent revision.
144 #
144 #
145 # However, it can of course also allow things that would have
145 # However, it can of course also allow things that would have
146 # been rejected before, such as the above cat command if sub/
146 # been rejected before, such as the above cat command if sub/
147 # is a subrepository now, but was a normal directory before.
147 # is a subrepository now, but was a normal directory before.
148 # The old path auditor would have rejected by mistake since it
148 # The old path auditor would have rejected by mistake since it
149 # panics when it sees sub/.hg/.
149 # panics when it sees sub/.hg/.
150 #
150 #
151 # All in all, checking against the working copy seems sensible
151 # All in all, checking against the working copy seems sensible
152 # since we want to prevent access to nested repositories on
152 # since we want to prevent access to nested repositories on
153 # the filesystem *now*.
153 # the filesystem *now*.
154 ctx = self[None]
154 ctx = self[None]
155 parts = util.splitpath(subpath)
155 parts = util.splitpath(subpath)
156 while parts:
156 while parts:
157 prefix = '/'.join(parts)
157 prefix = '/'.join(parts)
158 if prefix in ctx.substate:
158 if prefix in ctx.substate:
159 if prefix == normsubpath:
159 if prefix == normsubpath:
160 return True
160 return True
161 else:
161 else:
162 sub = ctx.sub(prefix)
162 sub = ctx.sub(prefix)
163 return sub.checknested(subpath[len(prefix) + 1:])
163 return sub.checknested(subpath[len(prefix) + 1:])
164 else:
164 else:
165 parts.pop()
165 parts.pop()
166 return False
166 return False
167
167
168 @filecache('bookmarks')
168 @filecache('bookmarks')
169 def _bookmarks(self):
169 def _bookmarks(self):
170 return bookmarks.read(self)
170 return bookmarks.read(self)
171
171
172 @filecache('bookmarks.current')
172 @filecache('bookmarks.current')
173 def _bookmarkcurrent(self):
173 def _bookmarkcurrent(self):
174 return bookmarks.readcurrent(self)
174 return bookmarks.readcurrent(self)
175
175
176 def _writebookmarks(self, marks):
176 def _writebookmarks(self, marks):
177 bookmarks.write(self)
177 bookmarks.write(self)
178
178
179 @filecache('phaseroots')
179 @filecache('phaseroots')
180 def _phaseroots(self):
180 def _phaseroots(self):
181 self._dirtyphases = False
181 self._dirtyphases = False
182 phaseroots = phases.readroots(self)
182 phaseroots = phases.readroots(self)
183 phases.filterunknown(self, phaseroots)
183 phases.filterunknown(self, phaseroots)
184 return phaseroots
184 return phaseroots
185
185
186 @propertycache
186 @propertycache
187 def _phaserev(self):
187 def _phaserev(self):
188 cache = [phases.public] * len(self)
188 cache = [phases.public] * len(self)
189 for phase in phases.trackedphases:
189 for phase in phases.trackedphases:
190 roots = map(self.changelog.rev, self._phaseroots[phase])
190 roots = map(self.changelog.rev, self._phaseroots[phase])
191 if roots:
191 if roots:
192 for rev in roots:
192 for rev in roots:
193 cache[rev] = phase
193 cache[rev] = phase
194 for rev in self.changelog.descendants(*roots):
194 for rev in self.changelog.descendants(*roots):
195 cache[rev] = phase
195 cache[rev] = phase
196 return cache
196 return cache
197
197
198 @filecache('00changelog.i', True)
198 @filecache('00changelog.i', True)
199 def changelog(self):
199 def changelog(self):
200 c = changelog.changelog(self.sopener)
200 c = changelog.changelog(self.sopener)
201 if 'HG_PENDING' in os.environ:
201 if 'HG_PENDING' in os.environ:
202 p = os.environ['HG_PENDING']
202 p = os.environ['HG_PENDING']
203 if p.startswith(self.root):
203 if p.startswith(self.root):
204 c.readpending('00changelog.i.a')
204 c.readpending('00changelog.i.a')
205 return c
205 return c
206
206
207 @filecache('00manifest.i', True)
207 @filecache('00manifest.i', True)
208 def manifest(self):
208 def manifest(self):
209 return manifest.manifest(self.sopener)
209 return manifest.manifest(self.sopener)
210
210
211 @filecache('dirstate')
211 @filecache('dirstate')
212 def dirstate(self):
212 def dirstate(self):
213 warned = [0]
213 warned = [0]
214 def validate(node):
214 def validate(node):
215 try:
215 try:
216 self.changelog.rev(node)
216 self.changelog.rev(node)
217 return node
217 return node
218 except error.LookupError:
218 except error.LookupError:
219 if not warned[0]:
219 if not warned[0]:
220 warned[0] = True
220 warned[0] = True
221 self.ui.warn(_("warning: ignoring unknown"
221 self.ui.warn(_("warning: ignoring unknown"
222 " working parent %s!\n") % short(node))
222 " working parent %s!\n") % short(node))
223 return nullid
223 return nullid
224
224
225 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
225 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
226
226
227 def __getitem__(self, changeid):
227 def __getitem__(self, changeid):
228 if changeid is None:
228 if changeid is None:
229 return context.workingctx(self)
229 return context.workingctx(self)
230 return context.changectx(self, changeid)
230 return context.changectx(self, changeid)
231
231
232 def __contains__(self, changeid):
232 def __contains__(self, changeid):
233 try:
233 try:
234 return bool(self.lookup(changeid))
234 return bool(self.lookup(changeid))
235 except error.RepoLookupError:
235 except error.RepoLookupError:
236 return False
236 return False
237
237
238 def __nonzero__(self):
238 def __nonzero__(self):
239 return True
239 return True
240
240
241 def __len__(self):
241 def __len__(self):
242 return len(self.changelog)
242 return len(self.changelog)
243
243
244 def __iter__(self):
244 def __iter__(self):
245 for i in xrange(len(self)):
245 for i in xrange(len(self)):
246 yield i
246 yield i
247
247
248 def revs(self, expr, *args):
248 def revs(self, expr, *args):
249 '''Return a list of revisions matching the given revset'''
249 '''Return a list of revisions matching the given revset'''
250 expr = revset.formatspec(expr, *args)
250 expr = revset.formatspec(expr, *args)
251 m = revset.match(None, expr)
251 m = revset.match(None, expr)
252 return [r for r in m(self, range(len(self)))]
252 return [r for r in m(self, range(len(self)))]
253
253
254 def set(self, expr, *args):
254 def set(self, expr, *args):
255 '''
255 '''
256 Yield a context for each matching revision, after doing arg
256 Yield a context for each matching revision, after doing arg
257 replacement via revset.formatspec
257 replacement via revset.formatspec
258 '''
258 '''
259 for r in self.revs(expr, *args):
259 for r in self.revs(expr, *args):
260 yield self[r]
260 yield self[r]
261
261
262 def url(self):
262 def url(self):
263 return 'file:' + self.root
263 return 'file:' + self.root
264
264
265 def hook(self, name, throw=False, **args):
265 def hook(self, name, throw=False, **args):
266 return hook.hook(self.ui, self, name, throw, **args)
266 return hook.hook(self.ui, self, name, throw, **args)
267
267
268 tag_disallowed = ':\r\n'
268 tag_disallowed = ':\r\n'
269
269
270 def _tag(self, names, node, message, local, user, date, extra={}):
270 def _tag(self, names, node, message, local, user, date, extra={}):
271 if isinstance(names, str):
271 if isinstance(names, str):
272 allchars = names
272 allchars = names
273 names = (names,)
273 names = (names,)
274 else:
274 else:
275 allchars = ''.join(names)
275 allchars = ''.join(names)
276 for c in self.tag_disallowed:
276 for c in self.tag_disallowed:
277 if c in allchars:
277 if c in allchars:
278 raise util.Abort(_('%r cannot be used in a tag name') % c)
278 raise util.Abort(_('%r cannot be used in a tag name') % c)
279
279
280 branches = self.branchmap()
280 branches = self.branchmap()
281 for name in names:
281 for name in names:
282 self.hook('pretag', throw=True, node=hex(node), tag=name,
282 self.hook('pretag', throw=True, node=hex(node), tag=name,
283 local=local)
283 local=local)
284 if name in branches:
284 if name in branches:
285 self.ui.warn(_("warning: tag %s conflicts with existing"
285 self.ui.warn(_("warning: tag %s conflicts with existing"
286 " branch name\n") % name)
286 " branch name\n") % name)
287
287
288 def writetags(fp, names, munge, prevtags):
288 def writetags(fp, names, munge, prevtags):
289 fp.seek(0, 2)
289 fp.seek(0, 2)
290 if prevtags and prevtags[-1] != '\n':
290 if prevtags and prevtags[-1] != '\n':
291 fp.write('\n')
291 fp.write('\n')
292 for name in names:
292 for name in names:
293 m = munge and munge(name) or name
293 m = munge and munge(name) or name
294 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
294 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
295 old = self.tags().get(name, nullid)
295 old = self.tags().get(name, nullid)
296 fp.write('%s %s\n' % (hex(old), m))
296 fp.write('%s %s\n' % (hex(old), m))
297 fp.write('%s %s\n' % (hex(node), m))
297 fp.write('%s %s\n' % (hex(node), m))
298 fp.close()
298 fp.close()
299
299
300 prevtags = ''
300 prevtags = ''
301 if local:
301 if local:
302 try:
302 try:
303 fp = self.opener('localtags', 'r+')
303 fp = self.opener('localtags', 'r+')
304 except IOError:
304 except IOError:
305 fp = self.opener('localtags', 'a')
305 fp = self.opener('localtags', 'a')
306 else:
306 else:
307 prevtags = fp.read()
307 prevtags = fp.read()
308
308
309 # local tags are stored in the current charset
309 # local tags are stored in the current charset
310 writetags(fp, names, None, prevtags)
310 writetags(fp, names, None, prevtags)
311 for name in names:
311 for name in names:
312 self.hook('tag', node=hex(node), tag=name, local=local)
312 self.hook('tag', node=hex(node), tag=name, local=local)
313 return
313 return
314
314
315 try:
315 try:
316 fp = self.wfile('.hgtags', 'rb+')
316 fp = self.wfile('.hgtags', 'rb+')
317 except IOError, e:
317 except IOError, e:
318 if e.errno != errno.ENOENT:
318 if e.errno != errno.ENOENT:
319 raise
319 raise
320 fp = self.wfile('.hgtags', 'ab')
320 fp = self.wfile('.hgtags', 'ab')
321 else:
321 else:
322 prevtags = fp.read()
322 prevtags = fp.read()
323
323
324 # committed tags are stored in UTF-8
324 # committed tags are stored in UTF-8
325 writetags(fp, names, encoding.fromlocal, prevtags)
325 writetags(fp, names, encoding.fromlocal, prevtags)
326
326
327 fp.close()
327 fp.close()
328
328
329 self.invalidatecaches()
329 self.invalidatecaches()
330
330
331 if '.hgtags' not in self.dirstate:
331 if '.hgtags' not in self.dirstate:
332 self[None].add(['.hgtags'])
332 self[None].add(['.hgtags'])
333
333
334 m = matchmod.exact(self.root, '', ['.hgtags'])
334 m = matchmod.exact(self.root, '', ['.hgtags'])
335 tagnode = self.commit(message, user, date, extra=extra, match=m)
335 tagnode = self.commit(message, user, date, extra=extra, match=m)
336
336
337 for name in names:
337 for name in names:
338 self.hook('tag', node=hex(node), tag=name, local=local)
338 self.hook('tag', node=hex(node), tag=name, local=local)
339
339
340 return tagnode
340 return tagnode
341
341
342 def tag(self, names, node, message, local, user, date):
342 def tag(self, names, node, message, local, user, date):
343 '''tag a revision with one or more symbolic names.
343 '''tag a revision with one or more symbolic names.
344
344
345 names is a list of strings or, when adding a single tag, names may be a
345 names is a list of strings or, when adding a single tag, names may be a
346 string.
346 string.
347
347
348 if local is True, the tags are stored in a per-repository file.
348 if local is True, the tags are stored in a per-repository file.
349 otherwise, they are stored in the .hgtags file, and a new
349 otherwise, they are stored in the .hgtags file, and a new
350 changeset is committed with the change.
350 changeset is committed with the change.
351
351
352 keyword arguments:
352 keyword arguments:
353
353
354 local: whether to store tags in non-version-controlled file
354 local: whether to store tags in non-version-controlled file
355 (default False)
355 (default False)
356
356
357 message: commit message to use if committing
357 message: commit message to use if committing
358
358
359 user: name of user to use if committing
359 user: name of user to use if committing
360
360
361 date: date tuple to use if committing'''
361 date: date tuple to use if committing'''
362
362
363 if not local:
363 if not local:
364 for x in self.status()[:5]:
364 for x in self.status()[:5]:
365 if '.hgtags' in x:
365 if '.hgtags' in x:
366 raise util.Abort(_('working copy of .hgtags is changed '
366 raise util.Abort(_('working copy of .hgtags is changed '
367 '(please commit .hgtags manually)'))
367 '(please commit .hgtags manually)'))
368
368
369 self.tags() # instantiate the cache
369 self.tags() # instantiate the cache
370 self._tag(names, node, message, local, user, date)
370 self._tag(names, node, message, local, user, date)
371
371
372 @propertycache
372 @propertycache
373 def _tagscache(self):
373 def _tagscache(self):
374 '''Returns a tagscache object that contains various tags related caches.'''
374 '''Returns a tagscache object that contains various tags related caches.'''
375
375
376 # This simplifies its cache management by having one decorated
376 # This simplifies its cache management by having one decorated
377 # function (this one) and the rest simply fetch things from it.
377 # function (this one) and the rest simply fetch things from it.
378 class tagscache(object):
378 class tagscache(object):
379 def __init__(self):
379 def __init__(self):
380 # These two define the set of tags for this repository. tags
380 # These two define the set of tags for this repository. tags
381 # maps tag name to node; tagtypes maps tag name to 'global' or
381 # maps tag name to node; tagtypes maps tag name to 'global' or
382 # 'local'. (Global tags are defined by .hgtags across all
382 # 'local'. (Global tags are defined by .hgtags across all
383 # heads, and local tags are defined in .hg/localtags.)
383 # heads, and local tags are defined in .hg/localtags.)
384 # They constitute the in-memory cache of tags.
384 # They constitute the in-memory cache of tags.
385 self.tags = self.tagtypes = None
385 self.tags = self.tagtypes = None
386
386
387 self.nodetagscache = self.tagslist = None
387 self.nodetagscache = self.tagslist = None
388
388
389 cache = tagscache()
389 cache = tagscache()
390 cache.tags, cache.tagtypes = self._findtags()
390 cache.tags, cache.tagtypes = self._findtags()
391
391
392 return cache
392 return cache
393
393
394 def tags(self):
394 def tags(self):
395 '''return a mapping of tag to node'''
395 '''return a mapping of tag to node'''
396 return self._tagscache.tags
396 return self._tagscache.tags
397
397
398 def _findtags(self):
398 def _findtags(self):
399 '''Do the hard work of finding tags. Return a pair of dicts
399 '''Do the hard work of finding tags. Return a pair of dicts
400 (tags, tagtypes) where tags maps tag name to node, and tagtypes
400 (tags, tagtypes) where tags maps tag name to node, and tagtypes
401 maps tag name to a string like \'global\' or \'local\'.
401 maps tag name to a string like \'global\' or \'local\'.
402 Subclasses or extensions are free to add their own tags, but
402 Subclasses or extensions are free to add their own tags, but
403 should be aware that the returned dicts will be retained for the
403 should be aware that the returned dicts will be retained for the
404 duration of the localrepo object.'''
404 duration of the localrepo object.'''
405
405
406 # XXX what tagtype should subclasses/extensions use? Currently
406 # XXX what tagtype should subclasses/extensions use? Currently
407 # mq and bookmarks add tags, but do not set the tagtype at all.
407 # mq and bookmarks add tags, but do not set the tagtype at all.
408 # Should each extension invent its own tag type? Should there
408 # Should each extension invent its own tag type? Should there
409 # be one tagtype for all such "virtual" tags? Or is the status
409 # be one tagtype for all such "virtual" tags? Or is the status
410 # quo fine?
410 # quo fine?
411
411
412 alltags = {} # map tag name to (node, hist)
412 alltags = {} # map tag name to (node, hist)
413 tagtypes = {}
413 tagtypes = {}
414
414
415 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
415 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
416 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
416 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
417
417
418 # Build the return dicts. Have to re-encode tag names because
418 # Build the return dicts. Have to re-encode tag names because
419 # the tags module always uses UTF-8 (in order not to lose info
419 # the tags module always uses UTF-8 (in order not to lose info
420 # writing to the cache), but the rest of Mercurial wants them in
420 # writing to the cache), but the rest of Mercurial wants them in
421 # local encoding.
421 # local encoding.
422 tags = {}
422 tags = {}
423 for (name, (node, hist)) in alltags.iteritems():
423 for (name, (node, hist)) in alltags.iteritems():
424 if node != nullid:
424 if node != nullid:
425 try:
425 try:
426 # ignore tags to unknown nodes
426 # ignore tags to unknown nodes
427 self.changelog.lookup(node)
427 self.changelog.lookup(node)
428 tags[encoding.tolocal(name)] = node
428 tags[encoding.tolocal(name)] = node
429 except error.LookupError:
429 except error.LookupError:
430 pass
430 pass
431 tags['tip'] = self.changelog.tip()
431 tags['tip'] = self.changelog.tip()
432 tagtypes = dict([(encoding.tolocal(name), value)
432 tagtypes = dict([(encoding.tolocal(name), value)
433 for (name, value) in tagtypes.iteritems()])
433 for (name, value) in tagtypes.iteritems()])
434 return (tags, tagtypes)
434 return (tags, tagtypes)
435
435
436 def tagtype(self, tagname):
436 def tagtype(self, tagname):
437 '''
437 '''
438 return the type of the given tag. result can be:
438 return the type of the given tag. result can be:
439
439
440 'local' : a local tag
440 'local' : a local tag
441 'global' : a global tag
441 'global' : a global tag
442 None : tag does not exist
442 None : tag does not exist
443 '''
443 '''
444
444
445 return self._tagscache.tagtypes.get(tagname)
445 return self._tagscache.tagtypes.get(tagname)
446
446
447 def tagslist(self):
447 def tagslist(self):
448 '''return a list of tags ordered by revision'''
448 '''return a list of tags ordered by revision'''
449 if not self._tagscache.tagslist:
449 if not self._tagscache.tagslist:
450 l = []
450 l = []
451 for t, n in self.tags().iteritems():
451 for t, n in self.tags().iteritems():
452 r = self.changelog.rev(n)
452 r = self.changelog.rev(n)
453 l.append((r, t, n))
453 l.append((r, t, n))
454 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
454 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
455
455
456 return self._tagscache.tagslist
456 return self._tagscache.tagslist
457
457
458 def nodetags(self, node):
458 def nodetags(self, node):
459 '''return the tags associated with a node'''
459 '''return the tags associated with a node'''
460 if not self._tagscache.nodetagscache:
460 if not self._tagscache.nodetagscache:
461 nodetagscache = {}
461 nodetagscache = {}
462 for t, n in self.tags().iteritems():
462 for t, n in self.tags().iteritems():
463 nodetagscache.setdefault(n, []).append(t)
463 nodetagscache.setdefault(n, []).append(t)
464 for tags in nodetagscache.itervalues():
464 for tags in nodetagscache.itervalues():
465 tags.sort()
465 tags.sort()
466 self._tagscache.nodetagscache = nodetagscache
466 self._tagscache.nodetagscache = nodetagscache
467 return self._tagscache.nodetagscache.get(node, [])
467 return self._tagscache.nodetagscache.get(node, [])
468
468
469 def nodebookmarks(self, node):
469 def nodebookmarks(self, node):
470 marks = []
470 marks = []
471 for bookmark, n in self._bookmarks.iteritems():
471 for bookmark, n in self._bookmarks.iteritems():
472 if n == node:
472 if n == node:
473 marks.append(bookmark)
473 marks.append(bookmark)
474 return sorted(marks)
474 return sorted(marks)
475
475
476 def _branchtags(self, partial, lrev):
476 def _branchtags(self, partial, lrev):
477 # TODO: rename this function?
477 # TODO: rename this function?
478 tiprev = len(self) - 1
478 tiprev = len(self) - 1
479 if lrev != tiprev:
479 if lrev != tiprev:
480 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
480 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
481 self._updatebranchcache(partial, ctxgen)
481 self._updatebranchcache(partial, ctxgen)
482 self._writebranchcache(partial, self.changelog.tip(), tiprev)
482 self._writebranchcache(partial, self.changelog.tip(), tiprev)
483
483
484 return partial
484 return partial
485
485
486 def updatebranchcache(self):
486 def updatebranchcache(self):
487 tip = self.changelog.tip()
487 tip = self.changelog.tip()
488 if self._branchcache is not None and self._branchcachetip == tip:
488 if self._branchcache is not None and self._branchcachetip == tip:
489 return
489 return
490
490
491 oldtip = self._branchcachetip
491 oldtip = self._branchcachetip
492 self._branchcachetip = tip
492 self._branchcachetip = tip
493 if oldtip is None or oldtip not in self.changelog.nodemap:
493 if oldtip is None or oldtip not in self.changelog.nodemap:
494 partial, last, lrev = self._readbranchcache()
494 partial, last, lrev = self._readbranchcache()
495 else:
495 else:
496 lrev = self.changelog.rev(oldtip)
496 lrev = self.changelog.rev(oldtip)
497 partial = self._branchcache
497 partial = self._branchcache
498
498
499 self._branchtags(partial, lrev)
499 self._branchtags(partial, lrev)
500 # this private cache holds all heads (not just tips)
500 # this private cache holds all heads (not just tips)
501 self._branchcache = partial
501 self._branchcache = partial
502
502
503 def branchmap(self):
503 def branchmap(self):
504 '''returns a dictionary {branch: [branchheads]}'''
504 '''returns a dictionary {branch: [branchheads]}'''
505 self.updatebranchcache()
505 self.updatebranchcache()
506 return self._branchcache
506 return self._branchcache
507
507
508 def branchtags(self):
508 def branchtags(self):
509 '''return a dict where branch names map to the tipmost head of
509 '''return a dict where branch names map to the tipmost head of
510 the branch, open heads come before closed'''
510 the branch, open heads come before closed'''
511 bt = {}
511 bt = {}
512 for bn, heads in self.branchmap().iteritems():
512 for bn, heads in self.branchmap().iteritems():
513 tip = heads[-1]
513 tip = heads[-1]
514 for h in reversed(heads):
514 for h in reversed(heads):
515 if 'close' not in self.changelog.read(h)[5]:
515 if 'close' not in self.changelog.read(h)[5]:
516 tip = h
516 tip = h
517 break
517 break
518 bt[bn] = tip
518 bt[bn] = tip
519 return bt
519 return bt
520
520
521 def _readbranchcache(self):
521 def _readbranchcache(self):
522 partial = {}
522 partial = {}
523 try:
523 try:
524 f = self.opener("cache/branchheads")
524 f = self.opener("cache/branchheads")
525 lines = f.read().split('\n')
525 lines = f.read().split('\n')
526 f.close()
526 f.close()
527 except (IOError, OSError):
527 except (IOError, OSError):
528 return {}, nullid, nullrev
528 return {}, nullid, nullrev
529
529
530 try:
530 try:
531 last, lrev = lines.pop(0).split(" ", 1)
531 last, lrev = lines.pop(0).split(" ", 1)
532 last, lrev = bin(last), int(lrev)
532 last, lrev = bin(last), int(lrev)
533 if lrev >= len(self) or self[lrev].node() != last:
533 if lrev >= len(self) or self[lrev].node() != last:
534 # invalidate the cache
534 # invalidate the cache
535 raise ValueError('invalidating branch cache (tip differs)')
535 raise ValueError('invalidating branch cache (tip differs)')
536 for l in lines:
536 for l in lines:
537 if not l:
537 if not l:
538 continue
538 continue
539 node, label = l.split(" ", 1)
539 node, label = l.split(" ", 1)
540 label = encoding.tolocal(label.strip())
540 label = encoding.tolocal(label.strip())
541 partial.setdefault(label, []).append(bin(node))
541 partial.setdefault(label, []).append(bin(node))
542 except KeyboardInterrupt:
542 except KeyboardInterrupt:
543 raise
543 raise
544 except Exception, inst:
544 except Exception, inst:
545 if self.ui.debugflag:
545 if self.ui.debugflag:
546 self.ui.warn(str(inst), '\n')
546 self.ui.warn(str(inst), '\n')
547 partial, last, lrev = {}, nullid, nullrev
547 partial, last, lrev = {}, nullid, nullrev
548 return partial, last, lrev
548 return partial, last, lrev
549
549
550 def _writebranchcache(self, branches, tip, tiprev):
550 def _writebranchcache(self, branches, tip, tiprev):
551 try:
551 try:
552 f = self.opener("cache/branchheads", "w", atomictemp=True)
552 f = self.opener("cache/branchheads", "w", atomictemp=True)
553 f.write("%s %s\n" % (hex(tip), tiprev))
553 f.write("%s %s\n" % (hex(tip), tiprev))
554 for label, nodes in branches.iteritems():
554 for label, nodes in branches.iteritems():
555 for node in nodes:
555 for node in nodes:
556 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
556 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
557 f.close()
557 f.close()
558 except (IOError, OSError):
558 except (IOError, OSError):
559 pass
559 pass
560
560
561 def _updatebranchcache(self, partial, ctxgen):
561 def _updatebranchcache(self, partial, ctxgen):
562 # collect new branch entries
562 # collect new branch entries
563 newbranches = {}
563 newbranches = {}
564 for c in ctxgen:
564 for c in ctxgen:
565 newbranches.setdefault(c.branch(), []).append(c.node())
565 newbranches.setdefault(c.branch(), []).append(c.node())
566 # if older branchheads are reachable from new ones, they aren't
566 # if older branchheads are reachable from new ones, they aren't
567 # really branchheads. Note checking parents is insufficient:
567 # really branchheads. Note checking parents is insufficient:
568 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
568 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
569 for branch, newnodes in newbranches.iteritems():
569 for branch, newnodes in newbranches.iteritems():
570 bheads = partial.setdefault(branch, [])
570 bheads = partial.setdefault(branch, [])
571 bheads.extend(newnodes)
571 bheads.extend(newnodes)
572 if len(bheads) <= 1:
572 if len(bheads) <= 1:
573 continue
573 continue
574 bheads = sorted(bheads, key=lambda x: self[x].rev())
574 bheads = sorted(bheads, key=lambda x: self[x].rev())
575 # starting from tip means fewer passes over reachable
575 # starting from tip means fewer passes over reachable
576 while newnodes:
576 while newnodes:
577 latest = newnodes.pop()
577 latest = newnodes.pop()
578 if latest not in bheads:
578 if latest not in bheads:
579 continue
579 continue
580 minbhrev = self[bheads[0]].node()
580 minbhrev = self[bheads[0]].node()
581 reachable = self.changelog.reachable(latest, minbhrev)
581 reachable = self.changelog.reachable(latest, minbhrev)
582 reachable.remove(latest)
582 reachable.remove(latest)
583 if reachable:
583 if reachable:
584 bheads = [b for b in bheads if b not in reachable]
584 bheads = [b for b in bheads if b not in reachable]
585 partial[branch] = bheads
585 partial[branch] = bheads
586
586
587 def lookup(self, key):
587 def lookup(self, key):
588 if isinstance(key, int):
588 if isinstance(key, int):
589 return self.changelog.node(key)
589 return self.changelog.node(key)
590 elif key == '.':
590 elif key == '.':
591 return self.dirstate.p1()
591 return self.dirstate.p1()
592 elif key == 'null':
592 elif key == 'null':
593 return nullid
593 return nullid
594 elif key == 'tip':
594 elif key == 'tip':
595 return self.changelog.tip()
595 return self.changelog.tip()
596 n = self.changelog._match(key)
596 n = self.changelog._match(key)
597 if n:
597 if n:
598 return n
598 return n
599 if key in self._bookmarks:
599 if key in self._bookmarks:
600 return self._bookmarks[key]
600 return self._bookmarks[key]
601 if key in self.tags():
601 if key in self.tags():
602 return self.tags()[key]
602 return self.tags()[key]
603 if key in self.branchtags():
603 if key in self.branchtags():
604 return self.branchtags()[key]
604 return self.branchtags()[key]
605 n = self.changelog._partialmatch(key)
605 n = self.changelog._partialmatch(key)
606 if n:
606 if n:
607 return n
607 return n
608
608
609 # can't find key, check if it might have come from damaged dirstate
609 # can't find key, check if it might have come from damaged dirstate
610 if key in self.dirstate.parents():
610 if key in self.dirstate.parents():
611 raise error.Abort(_("working directory has unknown parent '%s'!")
611 raise error.Abort(_("working directory has unknown parent '%s'!")
612 % short(key))
612 % short(key))
613 try:
613 try:
614 if len(key) == 20:
614 if len(key) == 20:
615 key = hex(key)
615 key = hex(key)
616 except TypeError:
616 except TypeError:
617 pass
617 pass
618 raise error.RepoLookupError(_("unknown revision '%s'") % key)
618 raise error.RepoLookupError(_("unknown revision '%s'") % key)
619
619
620 def lookupbranch(self, key, remote=None):
620 def lookupbranch(self, key, remote=None):
621 repo = remote or self
621 repo = remote or self
622 if key in repo.branchmap():
622 if key in repo.branchmap():
623 return key
623 return key
624
624
625 repo = (remote and remote.local()) and remote or self
625 repo = (remote and remote.local()) and remote or self
626 return repo[key].branch()
626 return repo[key].branch()
627
627
628 def known(self, nodes):
628 def known(self, nodes):
629 nm = self.changelog.nodemap
629 nm = self.changelog.nodemap
630 result = []
630 result = []
631 for n in nodes:
631 for n in nodes:
632 r = nm.get(n)
632 r = nm.get(n)
633 resp = not (r is None or self._phaserev[r] >= phases.secret)
633 resp = not (r is None or self._phaserev[r] >= phases.secret)
634 result.append(resp)
634 result.append(resp)
635 return result
635 return result
636
636
637 def local(self):
637 def local(self):
638 return self
638 return self
639
639
640 def cancopy(self):
640 def cancopy(self):
641 return (repo.repository.cancopy(self)
641 return (repo.repository.cancopy(self)
642 and not self._phaseroots[phases.secret])
642 and not self._phaseroots[phases.secret])
643
643
644 def join(self, f):
644 def join(self, f):
645 return os.path.join(self.path, f)
645 return os.path.join(self.path, f)
646
646
647 def wjoin(self, f):
647 def wjoin(self, f):
648 return os.path.join(self.root, f)
648 return os.path.join(self.root, f)
649
649
650 def file(self, f):
650 def file(self, f):
651 if f[0] == '/':
651 if f[0] == '/':
652 f = f[1:]
652 f = f[1:]
653 return filelog.filelog(self.sopener, f)
653 return filelog.filelog(self.sopener, f)
654
654
655 def changectx(self, changeid):
655 def changectx(self, changeid):
656 return self[changeid]
656 return self[changeid]
657
657
658 def parents(self, changeid=None):
658 def parents(self, changeid=None):
659 '''get list of changectxs for parents of changeid'''
659 '''get list of changectxs for parents of changeid'''
660 return self[changeid].parents()
660 return self[changeid].parents()
661
661
662 def filectx(self, path, changeid=None, fileid=None):
662 def filectx(self, path, changeid=None, fileid=None):
663 """changeid can be a changeset revision, node, or tag.
663 """changeid can be a changeset revision, node, or tag.
664 fileid can be a file revision or node."""
664 fileid can be a file revision or node."""
665 return context.filectx(self, path, changeid, fileid)
665 return context.filectx(self, path, changeid, fileid)
666
666
667 def getcwd(self):
667 def getcwd(self):
668 return self.dirstate.getcwd()
668 return self.dirstate.getcwd()
669
669
670 def pathto(self, f, cwd=None):
670 def pathto(self, f, cwd=None):
671 return self.dirstate.pathto(f, cwd)
671 return self.dirstate.pathto(f, cwd)
672
672
673 def wfile(self, f, mode='r'):
673 def wfile(self, f, mode='r'):
674 return self.wopener(f, mode)
674 return self.wopener(f, mode)
675
675
676 def _link(self, f):
676 def _link(self, f):
677 return os.path.islink(self.wjoin(f))
677 return os.path.islink(self.wjoin(f))
678
678
679 def _loadfilter(self, filter):
679 def _loadfilter(self, filter):
680 if filter not in self.filterpats:
680 if filter not in self.filterpats:
681 l = []
681 l = []
682 for pat, cmd in self.ui.configitems(filter):
682 for pat, cmd in self.ui.configitems(filter):
683 if cmd == '!':
683 if cmd == '!':
684 continue
684 continue
685 mf = matchmod.match(self.root, '', [pat])
685 mf = matchmod.match(self.root, '', [pat])
686 fn = None
686 fn = None
687 params = cmd
687 params = cmd
688 for name, filterfn in self._datafilters.iteritems():
688 for name, filterfn in self._datafilters.iteritems():
689 if cmd.startswith(name):
689 if cmd.startswith(name):
690 fn = filterfn
690 fn = filterfn
691 params = cmd[len(name):].lstrip()
691 params = cmd[len(name):].lstrip()
692 break
692 break
693 if not fn:
693 if not fn:
694 fn = lambda s, c, **kwargs: util.filter(s, c)
694 fn = lambda s, c, **kwargs: util.filter(s, c)
695 # Wrap old filters not supporting keyword arguments
695 # Wrap old filters not supporting keyword arguments
696 if not inspect.getargspec(fn)[2]:
696 if not inspect.getargspec(fn)[2]:
697 oldfn = fn
697 oldfn = fn
698 fn = lambda s, c, **kwargs: oldfn(s, c)
698 fn = lambda s, c, **kwargs: oldfn(s, c)
699 l.append((mf, fn, params))
699 l.append((mf, fn, params))
700 self.filterpats[filter] = l
700 self.filterpats[filter] = l
701 return self.filterpats[filter]
701 return self.filterpats[filter]
702
702
703 def _filter(self, filterpats, filename, data):
703 def _filter(self, filterpats, filename, data):
704 for mf, fn, cmd in filterpats:
704 for mf, fn, cmd in filterpats:
705 if mf(filename):
705 if mf(filename):
706 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
706 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
707 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
707 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
708 break
708 break
709
709
710 return data
710 return data
711
711
712 @propertycache
712 @propertycache
713 def _encodefilterpats(self):
713 def _encodefilterpats(self):
714 return self._loadfilter('encode')
714 return self._loadfilter('encode')
715
715
716 @propertycache
716 @propertycache
717 def _decodefilterpats(self):
717 def _decodefilterpats(self):
718 return self._loadfilter('decode')
718 return self._loadfilter('decode')
719
719
720 def adddatafilter(self, name, filter):
720 def adddatafilter(self, name, filter):
721 self._datafilters[name] = filter
721 self._datafilters[name] = filter
722
722
723 def wread(self, filename):
723 def wread(self, filename):
724 if self._link(filename):
724 if self._link(filename):
725 data = os.readlink(self.wjoin(filename))
725 data = os.readlink(self.wjoin(filename))
726 else:
726 else:
727 data = self.wopener.read(filename)
727 data = self.wopener.read(filename)
728 return self._filter(self._encodefilterpats, filename, data)
728 return self._filter(self._encodefilterpats, filename, data)
729
729
730 def wwrite(self, filename, data, flags):
730 def wwrite(self, filename, data, flags):
731 data = self._filter(self._decodefilterpats, filename, data)
731 data = self._filter(self._decodefilterpats, filename, data)
732 if 'l' in flags:
732 if 'l' in flags:
733 self.wopener.symlink(data, filename)
733 self.wopener.symlink(data, filename)
734 else:
734 else:
735 self.wopener.write(filename, data)
735 self.wopener.write(filename, data)
736 if 'x' in flags:
736 if 'x' in flags:
737 util.setflags(self.wjoin(filename), False, True)
737 util.setflags(self.wjoin(filename), False, True)
738
738
739 def wwritedata(self, filename, data):
739 def wwritedata(self, filename, data):
740 return self._filter(self._decodefilterpats, filename, data)
740 return self._filter(self._decodefilterpats, filename, data)
741
741
742 def transaction(self, desc):
742 def transaction(self, desc):
743 tr = self._transref and self._transref() or None
743 tr = self._transref and self._transref() or None
744 if tr and tr.running():
744 if tr and tr.running():
745 return tr.nest()
745 return tr.nest()
746
746
747 # abort here if the journal already exists
747 # abort here if the journal already exists
748 if os.path.exists(self.sjoin("journal")):
748 if os.path.exists(self.sjoin("journal")):
749 raise error.RepoError(
749 raise error.RepoError(
750 _("abandoned transaction found - run hg recover"))
750 _("abandoned transaction found - run hg recover"))
751
751
752 journalfiles = self._writejournal(desc)
752 journalfiles = self._writejournal(desc)
753 renames = [(x, undoname(x)) for x in journalfiles]
753 renames = [(x, undoname(x)) for x in journalfiles]
754
754
755 tr = transaction.transaction(self.ui.warn, self.sopener,
755 tr = transaction.transaction(self.ui.warn, self.sopener,
756 self.sjoin("journal"),
756 self.sjoin("journal"),
757 aftertrans(renames),
757 aftertrans(renames),
758 self.store.createmode)
758 self.store.createmode)
759 self._transref = weakref.ref(tr)
759 self._transref = weakref.ref(tr)
760 return tr
760 return tr
761
761
762 def _writejournal(self, desc):
762 def _writejournal(self, desc):
763 # save dirstate for rollback
763 # save dirstate for rollback
764 try:
764 try:
765 ds = self.opener.read("dirstate")
765 ds = self.opener.read("dirstate")
766 except IOError:
766 except IOError:
767 ds = ""
767 ds = ""
768 self.opener.write("journal.dirstate", ds)
768 self.opener.write("journal.dirstate", ds)
769 self.opener.write("journal.branch",
769 self.opener.write("journal.branch",
770 encoding.fromlocal(self.dirstate.branch()))
770 encoding.fromlocal(self.dirstate.branch()))
771 self.opener.write("journal.desc",
771 self.opener.write("journal.desc",
772 "%d\n%s\n" % (len(self), desc))
772 "%d\n%s\n" % (len(self), desc))
773
773
774 bkname = self.join('bookmarks')
774 bkname = self.join('bookmarks')
775 if os.path.exists(bkname):
775 if os.path.exists(bkname):
776 util.copyfile(bkname, self.join('journal.bookmarks'))
776 util.copyfile(bkname, self.join('journal.bookmarks'))
777 else:
777 else:
778 self.opener.write('journal.bookmarks', '')
778 self.opener.write('journal.bookmarks', '')
779 phasesname = self.sjoin('phaseroots')
779 phasesname = self.sjoin('phaseroots')
780 if os.path.exists(phasesname):
780 if os.path.exists(phasesname):
781 util.copyfile(phasesname, self.sjoin('journal.phaseroots'))
781 util.copyfile(phasesname, self.sjoin('journal.phaseroots'))
782 else:
782 else:
783 self.sopener.write('journal.phaseroots', '')
783 self.sopener.write('journal.phaseroots', '')
784
784
785 return (self.sjoin('journal'), self.join('journal.dirstate'),
785 return (self.sjoin('journal'), self.join('journal.dirstate'),
786 self.join('journal.branch'), self.join('journal.desc'),
786 self.join('journal.branch'), self.join('journal.desc'),
787 self.join('journal.bookmarks'),
787 self.join('journal.bookmarks'),
788 self.sjoin('journal.phaseroots'))
788 self.sjoin('journal.phaseroots'))
789
789
790 def recover(self):
790 def recover(self):
791 lock = self.lock()
791 lock = self.lock()
792 try:
792 try:
793 if os.path.exists(self.sjoin("journal")):
793 if os.path.exists(self.sjoin("journal")):
794 self.ui.status(_("rolling back interrupted transaction\n"))
794 self.ui.status(_("rolling back interrupted transaction\n"))
795 transaction.rollback(self.sopener, self.sjoin("journal"),
795 transaction.rollback(self.sopener, self.sjoin("journal"),
796 self.ui.warn)
796 self.ui.warn)
797 self.invalidate()
797 self.invalidate()
798 return True
798 return True
799 else:
799 else:
800 self.ui.warn(_("no interrupted transaction available\n"))
800 self.ui.warn(_("no interrupted transaction available\n"))
801 return False
801 return False
802 finally:
802 finally:
803 lock.release()
803 lock.release()
804
804
805 def rollback(self, dryrun=False, force=False):
805 def rollback(self, dryrun=False, force=False):
806 wlock = lock = None
806 wlock = lock = None
807 try:
807 try:
808 wlock = self.wlock()
808 wlock = self.wlock()
809 lock = self.lock()
809 lock = self.lock()
810 if os.path.exists(self.sjoin("undo")):
810 if os.path.exists(self.sjoin("undo")):
811 return self._rollback(dryrun, force)
811 return self._rollback(dryrun, force)
812 else:
812 else:
813 self.ui.warn(_("no rollback information available\n"))
813 self.ui.warn(_("no rollback information available\n"))
814 return 1
814 return 1
815 finally:
815 finally:
816 release(lock, wlock)
816 release(lock, wlock)
817
817
818 def _rollback(self, dryrun, force):
818 def _rollback(self, dryrun, force):
819 ui = self.ui
819 ui = self.ui
820 try:
820 try:
821 args = self.opener.read('undo.desc').splitlines()
821 args = self.opener.read('undo.desc').splitlines()
822 (oldlen, desc, detail) = (int(args[0]), args[1], None)
822 (oldlen, desc, detail) = (int(args[0]), args[1], None)
823 if len(args) >= 3:
823 if len(args) >= 3:
824 detail = args[2]
824 detail = args[2]
825 oldtip = oldlen - 1
825 oldtip = oldlen - 1
826
826
827 if detail and ui.verbose:
827 if detail and ui.verbose:
828 msg = (_('repository tip rolled back to revision %s'
828 msg = (_('repository tip rolled back to revision %s'
829 ' (undo %s: %s)\n')
829 ' (undo %s: %s)\n')
830 % (oldtip, desc, detail))
830 % (oldtip, desc, detail))
831 else:
831 else:
832 msg = (_('repository tip rolled back to revision %s'
832 msg = (_('repository tip rolled back to revision %s'
833 ' (undo %s)\n')
833 ' (undo %s)\n')
834 % (oldtip, desc))
834 % (oldtip, desc))
835 except IOError:
835 except IOError:
836 msg = _('rolling back unknown transaction\n')
836 msg = _('rolling back unknown transaction\n')
837 desc = None
837 desc = None
838
838
839 if not force and self['.'] != self['tip'] and desc == 'commit':
839 if not force and self['.'] != self['tip'] and desc == 'commit':
840 raise util.Abort(
840 raise util.Abort(
841 _('rollback of last commit while not checked out '
841 _('rollback of last commit while not checked out '
842 'may lose data'), hint=_('use -f to force'))
842 'may lose data'), hint=_('use -f to force'))
843
843
844 ui.status(msg)
844 ui.status(msg)
845 if dryrun:
845 if dryrun:
846 return 0
846 return 0
847
847
848 parents = self.dirstate.parents()
848 parents = self.dirstate.parents()
849 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
849 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
850 if os.path.exists(self.join('undo.bookmarks')):
850 if os.path.exists(self.join('undo.bookmarks')):
851 util.rename(self.join('undo.bookmarks'),
851 util.rename(self.join('undo.bookmarks'),
852 self.join('bookmarks'))
852 self.join('bookmarks'))
853 if os.path.exists(self.sjoin('undo.phaseroots')):
853 if os.path.exists(self.sjoin('undo.phaseroots')):
854 util.rename(self.sjoin('undo.phaseroots'),
854 util.rename(self.sjoin('undo.phaseroots'),
855 self.sjoin('phaseroots'))
855 self.sjoin('phaseroots'))
856 self.invalidate()
856 self.invalidate()
857
857
858 parentgone = (parents[0] not in self.changelog.nodemap or
858 parentgone = (parents[0] not in self.changelog.nodemap or
859 parents[1] not in self.changelog.nodemap)
859 parents[1] not in self.changelog.nodemap)
860 if parentgone:
860 if parentgone:
861 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
861 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
862 try:
862 try:
863 branch = self.opener.read('undo.branch')
863 branch = self.opener.read('undo.branch')
864 self.dirstate.setbranch(branch)
864 self.dirstate.setbranch(branch)
865 except IOError:
865 except IOError:
866 ui.warn(_('named branch could not be reset: '
866 ui.warn(_('named branch could not be reset: '
867 'current branch is still \'%s\'\n')
867 'current branch is still \'%s\'\n')
868 % self.dirstate.branch())
868 % self.dirstate.branch())
869
869
870 self.dirstate.invalidate()
870 self.dirstate.invalidate()
871 parents = tuple([p.rev() for p in self.parents()])
871 parents = tuple([p.rev() for p in self.parents()])
872 if len(parents) > 1:
872 if len(parents) > 1:
873 ui.status(_('working directory now based on '
873 ui.status(_('working directory now based on '
874 'revisions %d and %d\n') % parents)
874 'revisions %d and %d\n') % parents)
875 else:
875 else:
876 ui.status(_('working directory now based on '
876 ui.status(_('working directory now based on '
877 'revision %d\n') % parents)
877 'revision %d\n') % parents)
878 self.destroyed()
878 self.destroyed()
879 return 0
879 return 0
880
880
881 def invalidatecaches(self):
881 def invalidatecaches(self):
882 try:
882 try:
883 delattr(self, '_tagscache')
883 delattr(self, '_tagscache')
884 except AttributeError:
884 except AttributeError:
885 pass
885 pass
886
886
887 self._branchcache = None # in UTF-8
887 self._branchcache = None # in UTF-8
888 self._branchcachetip = None
888 self._branchcachetip = None
889
889
890 def invalidatedirstate(self):
890 def invalidatedirstate(self):
891 '''Invalidates the dirstate, causing the next call to dirstate
891 '''Invalidates the dirstate, causing the next call to dirstate
892 to check if it was modified since the last time it was read,
892 to check if it was modified since the last time it was read,
893 rereading it if it has.
893 rereading it if it has.
894
894
895 This is different to dirstate.invalidate() that it doesn't always
895 This is different to dirstate.invalidate() that it doesn't always
896 rereads the dirstate. Use dirstate.invalidate() if you want to
896 rereads the dirstate. Use dirstate.invalidate() if you want to
897 explicitly read the dirstate again (i.e. restoring it to a previous
897 explicitly read the dirstate again (i.e. restoring it to a previous
898 known good state).'''
898 known good state).'''
899 try:
899 try:
900 delattr(self, 'dirstate')
900 delattr(self, 'dirstate')
901 except AttributeError:
901 except AttributeError:
902 pass
902 pass
903
903
904 def invalidate(self):
904 def invalidate(self):
905 for k in self._filecache:
905 for k in self._filecache:
906 # dirstate is invalidated separately in invalidatedirstate()
906 # dirstate is invalidated separately in invalidatedirstate()
907 if k == 'dirstate':
907 if k == 'dirstate':
908 continue
908 continue
909
909
910 try:
910 try:
911 delattr(self, k)
911 delattr(self, k)
912 except AttributeError:
912 except AttributeError:
913 pass
913 pass
914 self.invalidatecaches()
914 self.invalidatecaches()
915
915
916 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
916 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
917 try:
917 try:
918 l = lock.lock(lockname, 0, releasefn, desc=desc)
918 l = lock.lock(lockname, 0, releasefn, desc=desc)
919 except error.LockHeld, inst:
919 except error.LockHeld, inst:
920 if not wait:
920 if not wait:
921 raise
921 raise
922 self.ui.warn(_("waiting for lock on %s held by %r\n") %
922 self.ui.warn(_("waiting for lock on %s held by %r\n") %
923 (desc, inst.locker))
923 (desc, inst.locker))
924 # default to 600 seconds timeout
924 # default to 600 seconds timeout
925 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
925 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
926 releasefn, desc=desc)
926 releasefn, desc=desc)
927 if acquirefn:
927 if acquirefn:
928 acquirefn()
928 acquirefn()
929 return l
929 return l
930
930
931 def _afterlock(self, callback):
931 def _afterlock(self, callback):
932 """add a callback to the current repository lock.
932 """add a callback to the current repository lock.
933
933
934 The callback will be executed on lock release."""
934 The callback will be executed on lock release."""
935 l = self._lockref and self._lockref()
935 l = self._lockref and self._lockref()
936 if l:
936 if l:
937 l.postrelease.append(callback)
937 l.postrelease.append(callback)
938
938
939 def lock(self, wait=True):
939 def lock(self, wait=True):
940 '''Lock the repository store (.hg/store) and return a weak reference
940 '''Lock the repository store (.hg/store) and return a weak reference
941 to the lock. Use this before modifying the store (e.g. committing or
941 to the lock. Use this before modifying the store (e.g. committing or
942 stripping). If you are opening a transaction, get a lock as well.)'''
942 stripping). If you are opening a transaction, get a lock as well.)'''
943 l = self._lockref and self._lockref()
943 l = self._lockref and self._lockref()
944 if l is not None and l.held:
944 if l is not None and l.held:
945 l.lock()
945 l.lock()
946 return l
946 return l
947
947
948 def unlock():
948 def unlock():
949 self.store.write()
949 self.store.write()
950 if self._dirtyphases:
950 if self._dirtyphases:
951 phases.writeroots(self)
951 phases.writeroots(self)
952 for k, ce in self._filecache.items():
952 for k, ce in self._filecache.items():
953 if k == 'dirstate':
953 if k == 'dirstate':
954 continue
954 continue
955 ce.refresh()
955 ce.refresh()
956
956
957 l = self._lock(self.sjoin("lock"), wait, unlock,
957 l = self._lock(self.sjoin("lock"), wait, unlock,
958 self.invalidate, _('repository %s') % self.origroot)
958 self.invalidate, _('repository %s') % self.origroot)
959 self._lockref = weakref.ref(l)
959 self._lockref = weakref.ref(l)
960 return l
960 return l
961
961
962 def wlock(self, wait=True):
962 def wlock(self, wait=True):
963 '''Lock the non-store parts of the repository (everything under
963 '''Lock the non-store parts of the repository (everything under
964 .hg except .hg/store) and return a weak reference to the lock.
964 .hg except .hg/store) and return a weak reference to the lock.
965 Use this before modifying files in .hg.'''
965 Use this before modifying files in .hg.'''
966 l = self._wlockref and self._wlockref()
966 l = self._wlockref and self._wlockref()
967 if l is not None and l.held:
967 if l is not None and l.held:
968 l.lock()
968 l.lock()
969 return l
969 return l
970
970
971 def unlock():
971 def unlock():
972 self.dirstate.write()
972 self.dirstate.write()
973 ce = self._filecache.get('dirstate')
973 ce = self._filecache.get('dirstate')
974 if ce:
974 if ce:
975 ce.refresh()
975 ce.refresh()
976
976
977 l = self._lock(self.join("wlock"), wait, unlock,
977 l = self._lock(self.join("wlock"), wait, unlock,
978 self.invalidatedirstate, _('working directory of %s') %
978 self.invalidatedirstate, _('working directory of %s') %
979 self.origroot)
979 self.origroot)
980 self._wlockref = weakref.ref(l)
980 self._wlockref = weakref.ref(l)
981 return l
981 return l
982
982
983 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
983 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
984 """
984 """
985 commit an individual file as part of a larger transaction
985 commit an individual file as part of a larger transaction
986 """
986 """
987
987
988 fname = fctx.path()
988 fname = fctx.path()
989 text = fctx.data()
989 text = fctx.data()
990 flog = self.file(fname)
990 flog = self.file(fname)
991 fparent1 = manifest1.get(fname, nullid)
991 fparent1 = manifest1.get(fname, nullid)
992 fparent2 = fparent2o = manifest2.get(fname, nullid)
992 fparent2 = fparent2o = manifest2.get(fname, nullid)
993
993
994 meta = {}
994 meta = {}
995 copy = fctx.renamed()
995 copy = fctx.renamed()
996 if copy and copy[0] != fname:
996 if copy and copy[0] != fname:
997 # Mark the new revision of this file as a copy of another
997 # Mark the new revision of this file as a copy of another
998 # file. This copy data will effectively act as a parent
998 # file. This copy data will effectively act as a parent
999 # of this new revision. If this is a merge, the first
999 # of this new revision. If this is a merge, the first
1000 # parent will be the nullid (meaning "look up the copy data")
1000 # parent will be the nullid (meaning "look up the copy data")
1001 # and the second one will be the other parent. For example:
1001 # and the second one will be the other parent. For example:
1002 #
1002 #
1003 # 0 --- 1 --- 3 rev1 changes file foo
1003 # 0 --- 1 --- 3 rev1 changes file foo
1004 # \ / rev2 renames foo to bar and changes it
1004 # \ / rev2 renames foo to bar and changes it
1005 # \- 2 -/ rev3 should have bar with all changes and
1005 # \- 2 -/ rev3 should have bar with all changes and
1006 # should record that bar descends from
1006 # should record that bar descends from
1007 # bar in rev2 and foo in rev1
1007 # bar in rev2 and foo in rev1
1008 #
1008 #
1009 # this allows this merge to succeed:
1009 # this allows this merge to succeed:
1010 #
1010 #
1011 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1011 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1012 # \ / merging rev3 and rev4 should use bar@rev2
1012 # \ / merging rev3 and rev4 should use bar@rev2
1013 # \- 2 --- 4 as the merge base
1013 # \- 2 --- 4 as the merge base
1014 #
1014 #
1015
1015
1016 cfname = copy[0]
1016 cfname = copy[0]
1017 crev = manifest1.get(cfname)
1017 crev = manifest1.get(cfname)
1018 newfparent = fparent2
1018 newfparent = fparent2
1019
1019
1020 if manifest2: # branch merge
1020 if manifest2: # branch merge
1021 if fparent2 == nullid or crev is None: # copied on remote side
1021 if fparent2 == nullid or crev is None: # copied on remote side
1022 if cfname in manifest2:
1022 if cfname in manifest2:
1023 crev = manifest2[cfname]
1023 crev = manifest2[cfname]
1024 newfparent = fparent1
1024 newfparent = fparent1
1025
1025
1026 # find source in nearest ancestor if we've lost track
1026 # find source in nearest ancestor if we've lost track
1027 if not crev:
1027 if not crev:
1028 self.ui.debug(" %s: searching for copy revision for %s\n" %
1028 self.ui.debug(" %s: searching for copy revision for %s\n" %
1029 (fname, cfname))
1029 (fname, cfname))
1030 for ancestor in self[None].ancestors():
1030 for ancestor in self[None].ancestors():
1031 if cfname in ancestor:
1031 if cfname in ancestor:
1032 crev = ancestor[cfname].filenode()
1032 crev = ancestor[cfname].filenode()
1033 break
1033 break
1034
1034
1035 if crev:
1035 if crev:
1036 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1036 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1037 meta["copy"] = cfname
1037 meta["copy"] = cfname
1038 meta["copyrev"] = hex(crev)
1038 meta["copyrev"] = hex(crev)
1039 fparent1, fparent2 = nullid, newfparent
1039 fparent1, fparent2 = nullid, newfparent
1040 else:
1040 else:
1041 self.ui.warn(_("warning: can't find ancestor for '%s' "
1041 self.ui.warn(_("warning: can't find ancestor for '%s' "
1042 "copied from '%s'!\n") % (fname, cfname))
1042 "copied from '%s'!\n") % (fname, cfname))
1043
1043
1044 elif fparent2 != nullid:
1044 elif fparent2 != nullid:
1045 # is one parent an ancestor of the other?
1045 # is one parent an ancestor of the other?
1046 fparentancestor = flog.ancestor(fparent1, fparent2)
1046 fparentancestor = flog.ancestor(fparent1, fparent2)
1047 if fparentancestor == fparent1:
1047 if fparentancestor == fparent1:
1048 fparent1, fparent2 = fparent2, nullid
1048 fparent1, fparent2 = fparent2, nullid
1049 elif fparentancestor == fparent2:
1049 elif fparentancestor == fparent2:
1050 fparent2 = nullid
1050 fparent2 = nullid
1051
1051
1052 # is the file changed?
1052 # is the file changed?
1053 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1053 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1054 changelist.append(fname)
1054 changelist.append(fname)
1055 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1055 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1056
1056
1057 # are just the flags changed during merge?
1057 # are just the flags changed during merge?
1058 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1058 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1059 changelist.append(fname)
1059 changelist.append(fname)
1060
1060
1061 return fparent1
1061 return fparent1
1062
1062
1063 def commit(self, text="", user=None, date=None, match=None, force=False,
1063 def commit(self, text="", user=None, date=None, match=None, force=False,
1064 editor=False, extra={}):
1064 editor=False, extra={}):
1065 """Add a new revision to current repository.
1065 """Add a new revision to current repository.
1066
1066
1067 Revision information is gathered from the working directory,
1067 Revision information is gathered from the working directory,
1068 match can be used to filter the committed files. If editor is
1068 match can be used to filter the committed files. If editor is
1069 supplied, it is called to get a commit message.
1069 supplied, it is called to get a commit message.
1070 """
1070 """
1071
1071
1072 def fail(f, msg):
1072 def fail(f, msg):
1073 raise util.Abort('%s: %s' % (f, msg))
1073 raise util.Abort('%s: %s' % (f, msg))
1074
1074
1075 if not match:
1075 if not match:
1076 match = matchmod.always(self.root, '')
1076 match = matchmod.always(self.root, '')
1077
1077
1078 if not force:
1078 if not force:
1079 vdirs = []
1079 vdirs = []
1080 match.dir = vdirs.append
1080 match.dir = vdirs.append
1081 match.bad = fail
1081 match.bad = fail
1082
1082
1083 wlock = self.wlock()
1083 wlock = self.wlock()
1084 try:
1084 try:
1085 wctx = self[None]
1085 wctx = self[None]
1086 merge = len(wctx.parents()) > 1
1086 merge = len(wctx.parents()) > 1
1087
1087
1088 if (not force and merge and match and
1088 if (not force and merge and match and
1089 (match.files() or match.anypats())):
1089 (match.files() or match.anypats())):
1090 raise util.Abort(_('cannot partially commit a merge '
1090 raise util.Abort(_('cannot partially commit a merge '
1091 '(do not specify files or patterns)'))
1091 '(do not specify files or patterns)'))
1092
1092
1093 changes = self.status(match=match, clean=force)
1093 changes = self.status(match=match, clean=force)
1094 if force:
1094 if force:
1095 changes[0].extend(changes[6]) # mq may commit unchanged files
1095 changes[0].extend(changes[6]) # mq may commit unchanged files
1096
1096
1097 # check subrepos
1097 # check subrepos
1098 subs = []
1098 subs = []
1099 removedsubs = set()
1099 removedsubs = set()
1100 if '.hgsub' in wctx:
1100 if '.hgsub' in wctx:
1101 # only manage subrepos and .hgsubstate if .hgsub is present
1101 # only manage subrepos and .hgsubstate if .hgsub is present
1102 for p in wctx.parents():
1102 for p in wctx.parents():
1103 removedsubs.update(s for s in p.substate if match(s))
1103 removedsubs.update(s for s in p.substate if match(s))
1104 for s in wctx.substate:
1104 for s in wctx.substate:
1105 removedsubs.discard(s)
1105 removedsubs.discard(s)
1106 if match(s) and wctx.sub(s).dirty():
1106 if match(s) and wctx.sub(s).dirty():
1107 subs.append(s)
1107 subs.append(s)
1108 if (subs or removedsubs):
1108 if (subs or removedsubs):
1109 if (not match('.hgsub') and
1109 if (not match('.hgsub') and
1110 '.hgsub' in (wctx.modified() + wctx.added())):
1110 '.hgsub' in (wctx.modified() + wctx.added())):
1111 raise util.Abort(
1111 raise util.Abort(
1112 _("can't commit subrepos without .hgsub"))
1112 _("can't commit subrepos without .hgsub"))
1113 if '.hgsubstate' not in changes[0]:
1113 if '.hgsubstate' not in changes[0]:
1114 changes[0].insert(0, '.hgsubstate')
1114 changes[0].insert(0, '.hgsubstate')
1115 if '.hgsubstate' in changes[2]:
1115 if '.hgsubstate' in changes[2]:
1116 changes[2].remove('.hgsubstate')
1116 changes[2].remove('.hgsubstate')
1117 elif '.hgsub' in changes[2]:
1117 elif '.hgsub' in changes[2]:
1118 # clean up .hgsubstate when .hgsub is removed
1118 # clean up .hgsubstate when .hgsub is removed
1119 if ('.hgsubstate' in wctx and
1119 if ('.hgsubstate' in wctx and
1120 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1120 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1121 changes[2].insert(0, '.hgsubstate')
1121 changes[2].insert(0, '.hgsubstate')
1122
1122
1123 if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
1123 if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
1124 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
1124 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
1125 if changedsubs:
1125 if changedsubs:
1126 raise util.Abort(_("uncommitted changes in subrepo %s")
1126 raise util.Abort(_("uncommitted changes in subrepo %s")
1127 % changedsubs[0],
1127 % changedsubs[0],
1128 hint=_("use --subrepos for recursive commit"))
1128 hint=_("use --subrepos for recursive commit"))
1129
1129
1130 # make sure all explicit patterns are matched
1130 # make sure all explicit patterns are matched
1131 if not force and match.files():
1131 if not force and match.files():
1132 matched = set(changes[0] + changes[1] + changes[2])
1132 matched = set(changes[0] + changes[1] + changes[2])
1133
1133
1134 for f in match.files():
1134 for f in match.files():
1135 if f == '.' or f in matched or f in wctx.substate:
1135 if f == '.' or f in matched or f in wctx.substate:
1136 continue
1136 continue
1137 if f in changes[3]: # missing
1137 if f in changes[3]: # missing
1138 fail(f, _('file not found!'))
1138 fail(f, _('file not found!'))
1139 if f in vdirs: # visited directory
1139 if f in vdirs: # visited directory
1140 d = f + '/'
1140 d = f + '/'
1141 for mf in matched:
1141 for mf in matched:
1142 if mf.startswith(d):
1142 if mf.startswith(d):
1143 break
1143 break
1144 else:
1144 else:
1145 fail(f, _("no match under directory!"))
1145 fail(f, _("no match under directory!"))
1146 elif f not in self.dirstate:
1146 elif f not in self.dirstate:
1147 fail(f, _("file not tracked!"))
1147 fail(f, _("file not tracked!"))
1148
1148
1149 if (not force and not extra.get("close") and not merge
1149 if (not force and not extra.get("close") and not merge
1150 and not (changes[0] or changes[1] or changes[2])
1150 and not (changes[0] or changes[1] or changes[2])
1151 and wctx.branch() == wctx.p1().branch()):
1151 and wctx.branch() == wctx.p1().branch()):
1152 return None
1152 return None
1153
1153
1154 ms = mergemod.mergestate(self)
1154 ms = mergemod.mergestate(self)
1155 for f in changes[0]:
1155 for f in changes[0]:
1156 if f in ms and ms[f] == 'u':
1156 if f in ms and ms[f] == 'u':
1157 raise util.Abort(_("unresolved merge conflicts "
1157 raise util.Abort(_("unresolved merge conflicts "
1158 "(see hg help resolve)"))
1158 "(see hg help resolve)"))
1159
1159
1160 cctx = context.workingctx(self, text, user, date, extra, changes)
1160 cctx = context.workingctx(self, text, user, date, extra, changes)
1161 if editor:
1161 if editor:
1162 cctx._text = editor(self, cctx, subs)
1162 cctx._text = editor(self, cctx, subs)
1163 edited = (text != cctx._text)
1163 edited = (text != cctx._text)
1164
1164
1165 # commit subs
1165 # commit subs
1166 if subs or removedsubs:
1166 if subs or removedsubs:
1167 state = wctx.substate.copy()
1167 state = wctx.substate.copy()
1168 for s in sorted(subs):
1168 for s in sorted(subs):
1169 sub = wctx.sub(s)
1169 sub = wctx.sub(s)
1170 self.ui.status(_('committing subrepository %s\n') %
1170 self.ui.status(_('committing subrepository %s\n') %
1171 subrepo.subrelpath(sub))
1171 subrepo.subrelpath(sub))
1172 sr = sub.commit(cctx._text, user, date)
1172 sr = sub.commit(cctx._text, user, date)
1173 state[s] = (state[s][0], sr)
1173 state[s] = (state[s][0], sr)
1174 subrepo.writestate(self, state)
1174 subrepo.writestate(self, state)
1175
1175
1176 # Save commit message in case this transaction gets rolled back
1176 # Save commit message in case this transaction gets rolled back
1177 # (e.g. by a pretxncommit hook). Leave the content alone on
1177 # (e.g. by a pretxncommit hook). Leave the content alone on
1178 # the assumption that the user will use the same editor again.
1178 # the assumption that the user will use the same editor again.
1179 msgfn = self.savecommitmessage(cctx._text)
1179 msgfn = self.savecommitmessage(cctx._text)
1180
1180
1181 p1, p2 = self.dirstate.parents()
1181 p1, p2 = self.dirstate.parents()
1182 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1182 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1183 try:
1183 try:
1184 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1184 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1185 ret = self.commitctx(cctx, True)
1185 ret = self.commitctx(cctx, True)
1186 except:
1186 except:
1187 if edited:
1187 if edited:
1188 self.ui.write(
1188 self.ui.write(
1189 _('note: commit message saved in %s\n') % msgfn)
1189 _('note: commit message saved in %s\n') % msgfn)
1190 raise
1190 raise
1191
1191
1192 # update bookmarks, dirstate and mergestate
1192 # update bookmarks, dirstate and mergestate
1193 bookmarks.update(self, p1, ret)
1193 bookmarks.update(self, p1, ret)
1194 for f in changes[0] + changes[1]:
1194 for f in changes[0] + changes[1]:
1195 self.dirstate.normal(f)
1195 self.dirstate.normal(f)
1196 for f in changes[2]:
1196 for f in changes[2]:
1197 self.dirstate.drop(f)
1197 self.dirstate.drop(f)
1198 self.dirstate.setparents(ret)
1198 self.dirstate.setparents(ret)
1199 ms.reset()
1199 ms.reset()
1200 finally:
1200 finally:
1201 wlock.release()
1201 wlock.release()
1202
1202
1203 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1203 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1204 return ret
1204 return ret
1205
1205
1206 def commitctx(self, ctx, error=False):
1206 def commitctx(self, ctx, error=False):
1207 """Add a new revision to current repository.
1207 """Add a new revision to current repository.
1208 Revision information is passed via the context argument.
1208 Revision information is passed via the context argument.
1209 """
1209 """
1210
1210
1211 tr = lock = None
1211 tr = lock = None
1212 removed = list(ctx.removed())
1212 removed = list(ctx.removed())
1213 p1, p2 = ctx.p1(), ctx.p2()
1213 p1, p2 = ctx.p1(), ctx.p2()
1214 user = ctx.user()
1214 user = ctx.user()
1215
1215
1216 lock = self.lock()
1216 lock = self.lock()
1217 try:
1217 try:
1218 tr = self.transaction("commit")
1218 tr = self.transaction("commit")
1219 trp = weakref.proxy(tr)
1219 trp = weakref.proxy(tr)
1220
1220
1221 if ctx.files():
1221 if ctx.files():
1222 m1 = p1.manifest().copy()
1222 m1 = p1.manifest().copy()
1223 m2 = p2.manifest()
1223 m2 = p2.manifest()
1224
1224
1225 # check in files
1225 # check in files
1226 new = {}
1226 new = {}
1227 changed = []
1227 changed = []
1228 linkrev = len(self)
1228 linkrev = len(self)
1229 for f in sorted(ctx.modified() + ctx.added()):
1229 for f in sorted(ctx.modified() + ctx.added()):
1230 self.ui.note(f + "\n")
1230 self.ui.note(f + "\n")
1231 try:
1231 try:
1232 fctx = ctx[f]
1232 fctx = ctx[f]
1233 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1233 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1234 changed)
1234 changed)
1235 m1.set(f, fctx.flags())
1235 m1.set(f, fctx.flags())
1236 except OSError, inst:
1236 except OSError, inst:
1237 self.ui.warn(_("trouble committing %s!\n") % f)
1237 self.ui.warn(_("trouble committing %s!\n") % f)
1238 raise
1238 raise
1239 except IOError, inst:
1239 except IOError, inst:
1240 errcode = getattr(inst, 'errno', errno.ENOENT)
1240 errcode = getattr(inst, 'errno', errno.ENOENT)
1241 if error or errcode and errcode != errno.ENOENT:
1241 if error or errcode and errcode != errno.ENOENT:
1242 self.ui.warn(_("trouble committing %s!\n") % f)
1242 self.ui.warn(_("trouble committing %s!\n") % f)
1243 raise
1243 raise
1244 else:
1244 else:
1245 removed.append(f)
1245 removed.append(f)
1246
1246
1247 # update manifest
1247 # update manifest
1248 m1.update(new)
1248 m1.update(new)
1249 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1249 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1250 drop = [f for f in removed if f in m1]
1250 drop = [f for f in removed if f in m1]
1251 for f in drop:
1251 for f in drop:
1252 del m1[f]
1252 del m1[f]
1253 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1253 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1254 p2.manifestnode(), (new, drop))
1254 p2.manifestnode(), (new, drop))
1255 files = changed + removed
1255 files = changed + removed
1256 else:
1256 else:
1257 mn = p1.manifestnode()
1257 mn = p1.manifestnode()
1258 files = []
1258 files = []
1259
1259
1260 # update changelog
1260 # update changelog
1261 self.changelog.delayupdate()
1261 self.changelog.delayupdate()
1262 n = self.changelog.add(mn, files, ctx.description(),
1262 n = self.changelog.add(mn, files, ctx.description(),
1263 trp, p1.node(), p2.node(),
1263 trp, p1.node(), p2.node(),
1264 user, ctx.date(), ctx.extra().copy())
1264 user, ctx.date(), ctx.extra().copy())
1265 p = lambda: self.changelog.writepending() and self.root or ""
1265 p = lambda: self.changelog.writepending() and self.root or ""
1266 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1266 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1267 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1267 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1268 parent2=xp2, pending=p)
1268 parent2=xp2, pending=p)
1269 self.changelog.finalize(trp)
1269 self.changelog.finalize(trp)
1270 # set the new commit is proper phase
1270 # set the new commit is proper phase
1271 targetphase = self.ui.configint('phases', 'new-commit',
1271 targetphase = self.ui.configint('phases', 'new-commit',
1272 phases.draft)
1272 phases.draft)
1273 if targetphase:
1273 if targetphase:
1274 # retract boundary do not alter parent changeset.
1274 # retract boundary do not alter parent changeset.
1275 # if a parent have higher the resulting phase will
1275 # if a parent have higher the resulting phase will
1276 # be compliant anyway
1276 # be compliant anyway
1277 #
1277 #
1278 # if minimal phase was 0 we don't need to retract anything
1278 # if minimal phase was 0 we don't need to retract anything
1279 phases.retractboundary(self, targetphase, [n])
1279 phases.retractboundary(self, targetphase, [n])
1280 tr.close()
1280 tr.close()
1281 self.updatebranchcache()
1281 self.updatebranchcache()
1282 return n
1282 return n
1283 finally:
1283 finally:
1284 if tr:
1284 if tr:
1285 tr.release()
1285 tr.release()
1286 lock.release()
1286 lock.release()
1287
1287
1288 def destroyed(self):
1288 def destroyed(self):
1289 '''Inform the repository that nodes have been destroyed.
1289 '''Inform the repository that nodes have been destroyed.
1290 Intended for use by strip and rollback, so there's a common
1290 Intended for use by strip and rollback, so there's a common
1291 place for anything that has to be done after destroying history.'''
1291 place for anything that has to be done after destroying history.'''
1292 # XXX it might be nice if we could take the list of destroyed
1292 # XXX it might be nice if we could take the list of destroyed
1293 # nodes, but I don't see an easy way for rollback() to do that
1293 # nodes, but I don't see an easy way for rollback() to do that
1294
1294
1295 # Ensure the persistent tag cache is updated. Doing it now
1295 # Ensure the persistent tag cache is updated. Doing it now
1296 # means that the tag cache only has to worry about destroyed
1296 # means that the tag cache only has to worry about destroyed
1297 # heads immediately after a strip/rollback. That in turn
1297 # heads immediately after a strip/rollback. That in turn
1298 # guarantees that "cachetip == currenttip" (comparing both rev
1298 # guarantees that "cachetip == currenttip" (comparing both rev
1299 # and node) always means no nodes have been added or destroyed.
1299 # and node) always means no nodes have been added or destroyed.
1300
1300
1301 # XXX this is suboptimal when qrefresh'ing: we strip the current
1301 # XXX this is suboptimal when qrefresh'ing: we strip the current
1302 # head, refresh the tag cache, then immediately add a new head.
1302 # head, refresh the tag cache, then immediately add a new head.
1303 # But I think doing it this way is necessary for the "instant
1303 # But I think doing it this way is necessary for the "instant
1304 # tag cache retrieval" case to work.
1304 # tag cache retrieval" case to work.
1305 self.invalidatecaches()
1305 self.invalidatecaches()
1306
1306
1307 def walk(self, match, node=None):
1307 def walk(self, match, node=None):
1308 '''
1308 '''
1309 walk recursively through the directory tree or a given
1309 walk recursively through the directory tree or a given
1310 changeset, finding all files matched by the match
1310 changeset, finding all files matched by the match
1311 function
1311 function
1312 '''
1312 '''
1313 return self[node].walk(match)
1313 return self[node].walk(match)
1314
1314
1315 def status(self, node1='.', node2=None, match=None,
1315 def status(self, node1='.', node2=None, match=None,
1316 ignored=False, clean=False, unknown=False,
1316 ignored=False, clean=False, unknown=False,
1317 listsubrepos=False):
1317 listsubrepos=False):
1318 """return status of files between two nodes or node and working directory
1318 """return status of files between two nodes or node and working directory
1319
1319
1320 If node1 is None, use the first dirstate parent instead.
1320 If node1 is None, use the first dirstate parent instead.
1321 If node2 is None, compare node1 with working directory.
1321 If node2 is None, compare node1 with working directory.
1322 """
1322 """
1323
1323
1324 def mfmatches(ctx):
1324 def mfmatches(ctx):
1325 mf = ctx.manifest().copy()
1325 mf = ctx.manifest().copy()
1326 for fn in mf.keys():
1326 for fn in mf.keys():
1327 if not match(fn):
1327 if not match(fn):
1328 del mf[fn]
1328 del mf[fn]
1329 return mf
1329 return mf
1330
1330
1331 if isinstance(node1, context.changectx):
1331 if isinstance(node1, context.changectx):
1332 ctx1 = node1
1332 ctx1 = node1
1333 else:
1333 else:
1334 ctx1 = self[node1]
1334 ctx1 = self[node1]
1335 if isinstance(node2, context.changectx):
1335 if isinstance(node2, context.changectx):
1336 ctx2 = node2
1336 ctx2 = node2
1337 else:
1337 else:
1338 ctx2 = self[node2]
1338 ctx2 = self[node2]
1339
1339
1340 working = ctx2.rev() is None
1340 working = ctx2.rev() is None
1341 parentworking = working and ctx1 == self['.']
1341 parentworking = working and ctx1 == self['.']
1342 match = match or matchmod.always(self.root, self.getcwd())
1342 match = match or matchmod.always(self.root, self.getcwd())
1343 listignored, listclean, listunknown = ignored, clean, unknown
1343 listignored, listclean, listunknown = ignored, clean, unknown
1344
1344
1345 # load earliest manifest first for caching reasons
1345 # load earliest manifest first for caching reasons
1346 if not working and ctx2.rev() < ctx1.rev():
1346 if not working and ctx2.rev() < ctx1.rev():
1347 ctx2.manifest()
1347 ctx2.manifest()
1348
1348
1349 if not parentworking:
1349 if not parentworking:
1350 def bad(f, msg):
1350 def bad(f, msg):
1351 if f not in ctx1:
1351 if f not in ctx1:
1352 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1352 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1353 match.bad = bad
1353 match.bad = bad
1354
1354
1355 if working: # we need to scan the working dir
1355 if working: # we need to scan the working dir
1356 subrepos = []
1356 subrepos = []
1357 if '.hgsub' in self.dirstate:
1357 if '.hgsub' in self.dirstate:
1358 subrepos = ctx2.substate.keys()
1358 subrepos = ctx2.substate.keys()
1359 s = self.dirstate.status(match, subrepos, listignored,
1359 s = self.dirstate.status(match, subrepos, listignored,
1360 listclean, listunknown)
1360 listclean, listunknown)
1361 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1361 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1362
1362
1363 # check for any possibly clean files
1363 # check for any possibly clean files
1364 if parentworking and cmp:
1364 if parentworking and cmp:
1365 fixup = []
1365 fixup = []
1366 # do a full compare of any files that might have changed
1366 # do a full compare of any files that might have changed
1367 for f in sorted(cmp):
1367 for f in sorted(cmp):
1368 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1368 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1369 or ctx1[f].cmp(ctx2[f])):
1369 or ctx1[f].cmp(ctx2[f])):
1370 modified.append(f)
1370 modified.append(f)
1371 else:
1371 else:
1372 fixup.append(f)
1372 fixup.append(f)
1373
1373
1374 # update dirstate for files that are actually clean
1374 # update dirstate for files that are actually clean
1375 if fixup:
1375 if fixup:
1376 if listclean:
1376 if listclean:
1377 clean += fixup
1377 clean += fixup
1378
1378
1379 try:
1379 try:
1380 # updating the dirstate is optional
1380 # updating the dirstate is optional
1381 # so we don't wait on the lock
1381 # so we don't wait on the lock
1382 wlock = self.wlock(False)
1382 wlock = self.wlock(False)
1383 try:
1383 try:
1384 for f in fixup:
1384 for f in fixup:
1385 self.dirstate.normal(f)
1385 self.dirstate.normal(f)
1386 finally:
1386 finally:
1387 wlock.release()
1387 wlock.release()
1388 except error.LockError:
1388 except error.LockError:
1389 pass
1389 pass
1390
1390
1391 if not parentworking:
1391 if not parentworking:
1392 mf1 = mfmatches(ctx1)
1392 mf1 = mfmatches(ctx1)
1393 if working:
1393 if working:
1394 # we are comparing working dir against non-parent
1394 # we are comparing working dir against non-parent
1395 # generate a pseudo-manifest for the working dir
1395 # generate a pseudo-manifest for the working dir
1396 mf2 = mfmatches(self['.'])
1396 mf2 = mfmatches(self['.'])
1397 for f in cmp + modified + added:
1397 for f in cmp + modified + added:
1398 mf2[f] = None
1398 mf2[f] = None
1399 mf2.set(f, ctx2.flags(f))
1399 mf2.set(f, ctx2.flags(f))
1400 for f in removed:
1400 for f in removed:
1401 if f in mf2:
1401 if f in mf2:
1402 del mf2[f]
1402 del mf2[f]
1403 else:
1403 else:
1404 # we are comparing two revisions
1404 # we are comparing two revisions
1405 deleted, unknown, ignored = [], [], []
1405 deleted, unknown, ignored = [], [], []
1406 mf2 = mfmatches(ctx2)
1406 mf2 = mfmatches(ctx2)
1407
1407
1408 modified, added, clean = [], [], []
1408 modified, added, clean = [], [], []
1409 for fn in mf2:
1409 for fn in mf2:
1410 if fn in mf1:
1410 if fn in mf1:
1411 if (fn not in deleted and
1411 if (fn not in deleted and
1412 (mf1.flags(fn) != mf2.flags(fn) or
1412 (mf1.flags(fn) != mf2.flags(fn) or
1413 (mf1[fn] != mf2[fn] and
1413 (mf1[fn] != mf2[fn] and
1414 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1414 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1415 modified.append(fn)
1415 modified.append(fn)
1416 elif listclean:
1416 elif listclean:
1417 clean.append(fn)
1417 clean.append(fn)
1418 del mf1[fn]
1418 del mf1[fn]
1419 elif fn not in deleted:
1419 elif fn not in deleted:
1420 added.append(fn)
1420 added.append(fn)
1421 removed = mf1.keys()
1421 removed = mf1.keys()
1422
1422
1423 if working and modified and not self.dirstate._checklink:
1423 if working and modified and not self.dirstate._checklink:
1424 # Symlink placeholders may get non-symlink-like contents
1424 # Symlink placeholders may get non-symlink-like contents
1425 # via user error or dereferencing by NFS or Samba servers,
1425 # via user error or dereferencing by NFS or Samba servers,
1426 # so we filter out any placeholders that don't look like a
1426 # so we filter out any placeholders that don't look like a
1427 # symlink
1427 # symlink
1428 sane = []
1428 sane = []
1429 for f in modified:
1429 for f in modified:
1430 if ctx2.flags(f) == 'l':
1430 if ctx2.flags(f) == 'l':
1431 d = ctx2[f].data()
1431 d = ctx2[f].data()
1432 if len(d) >= 1024 or '\n' in d or util.binary(d):
1432 if len(d) >= 1024 or '\n' in d or util.binary(d):
1433 self.ui.debug('ignoring suspect symlink placeholder'
1433 self.ui.debug('ignoring suspect symlink placeholder'
1434 ' "%s"\n' % f)
1434 ' "%s"\n' % f)
1435 continue
1435 continue
1436 sane.append(f)
1436 sane.append(f)
1437 modified = sane
1437 modified = sane
1438
1438
1439 r = modified, added, removed, deleted, unknown, ignored, clean
1439 r = modified, added, removed, deleted, unknown, ignored, clean
1440
1440
1441 if listsubrepos:
1441 if listsubrepos:
1442 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1442 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1443 if working:
1443 if working:
1444 rev2 = None
1444 rev2 = None
1445 else:
1445 else:
1446 rev2 = ctx2.substate[subpath][1]
1446 rev2 = ctx2.substate[subpath][1]
1447 try:
1447 try:
1448 submatch = matchmod.narrowmatcher(subpath, match)
1448 submatch = matchmod.narrowmatcher(subpath, match)
1449 s = sub.status(rev2, match=submatch, ignored=listignored,
1449 s = sub.status(rev2, match=submatch, ignored=listignored,
1450 clean=listclean, unknown=listunknown,
1450 clean=listclean, unknown=listunknown,
1451 listsubrepos=True)
1451 listsubrepos=True)
1452 for rfiles, sfiles in zip(r, s):
1452 for rfiles, sfiles in zip(r, s):
1453 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1453 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1454 except error.LookupError:
1454 except error.LookupError:
1455 self.ui.status(_("skipping missing subrepository: %s\n")
1455 self.ui.status(_("skipping missing subrepository: %s\n")
1456 % subpath)
1456 % subpath)
1457
1457
1458 for l in r:
1458 for l in r:
1459 l.sort()
1459 l.sort()
1460 return r
1460 return r
1461
1461
1462 def heads(self, start=None):
1462 def heads(self, start=None):
1463 heads = self.changelog.heads(start)
1463 heads = self.changelog.heads(start)
1464 # sort the output in rev descending order
1464 # sort the output in rev descending order
1465 return sorted(heads, key=self.changelog.rev, reverse=True)
1465 return sorted(heads, key=self.changelog.rev, reverse=True)
1466
1466
1467 def branchheads(self, branch=None, start=None, closed=False):
1467 def branchheads(self, branch=None, start=None, closed=False):
1468 '''return a (possibly filtered) list of heads for the given branch
1468 '''return a (possibly filtered) list of heads for the given branch
1469
1469
1470 Heads are returned in topological order, from newest to oldest.
1470 Heads are returned in topological order, from newest to oldest.
1471 If branch is None, use the dirstate branch.
1471 If branch is None, use the dirstate branch.
1472 If start is not None, return only heads reachable from start.
1472 If start is not None, return only heads reachable from start.
1473 If closed is True, return heads that are marked as closed as well.
1473 If closed is True, return heads that are marked as closed as well.
1474 '''
1474 '''
1475 if branch is None:
1475 if branch is None:
1476 branch = self[None].branch()
1476 branch = self[None].branch()
1477 branches = self.branchmap()
1477 branches = self.branchmap()
1478 if branch not in branches:
1478 if branch not in branches:
1479 return []
1479 return []
1480 # the cache returns heads ordered lowest to highest
1480 # the cache returns heads ordered lowest to highest
1481 bheads = list(reversed(branches[branch]))
1481 bheads = list(reversed(branches[branch]))
1482 if start is not None:
1482 if start is not None:
1483 # filter out the heads that cannot be reached from startrev
1483 # filter out the heads that cannot be reached from startrev
1484 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1484 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1485 bheads = [h for h in bheads if h in fbheads]
1485 bheads = [h for h in bheads if h in fbheads]
1486 if not closed:
1486 if not closed:
1487 bheads = [h for h in bheads if
1487 bheads = [h for h in bheads if
1488 ('close' not in self.changelog.read(h)[5])]
1488 ('close' not in self.changelog.read(h)[5])]
1489 return bheads
1489 return bheads
1490
1490
1491 def branches(self, nodes):
1491 def branches(self, nodes):
1492 if not nodes:
1492 if not nodes:
1493 nodes = [self.changelog.tip()]
1493 nodes = [self.changelog.tip()]
1494 b = []
1494 b = []
1495 for n in nodes:
1495 for n in nodes:
1496 t = n
1496 t = n
1497 while True:
1497 while True:
1498 p = self.changelog.parents(n)
1498 p = self.changelog.parents(n)
1499 if p[1] != nullid or p[0] == nullid:
1499 if p[1] != nullid or p[0] == nullid:
1500 b.append((t, n, p[0], p[1]))
1500 b.append((t, n, p[0], p[1]))
1501 break
1501 break
1502 n = p[0]
1502 n = p[0]
1503 return b
1503 return b
1504
1504
1505 def between(self, pairs):
1505 def between(self, pairs):
1506 r = []
1506 r = []
1507
1507
1508 for top, bottom in pairs:
1508 for top, bottom in pairs:
1509 n, l, i = top, [], 0
1509 n, l, i = top, [], 0
1510 f = 1
1510 f = 1
1511
1511
1512 while n != bottom and n != nullid:
1512 while n != bottom and n != nullid:
1513 p = self.changelog.parents(n)[0]
1513 p = self.changelog.parents(n)[0]
1514 if i == f:
1514 if i == f:
1515 l.append(n)
1515 l.append(n)
1516 f = f * 2
1516 f = f * 2
1517 n = p
1517 n = p
1518 i += 1
1518 i += 1
1519
1519
1520 r.append(l)
1520 r.append(l)
1521
1521
1522 return r
1522 return r
1523
1523
1524 def pull(self, remote, heads=None, force=False):
1524 def pull(self, remote, heads=None, force=False):
1525 lock = self.lock()
1525 lock = self.lock()
1526 try:
1526 try:
1527 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1527 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1528 force=force)
1528 force=force)
1529 common, fetch, rheads = tmp
1529 common, fetch, rheads = tmp
1530 if not fetch:
1530 if not fetch:
1531 self.ui.status(_("no changes found\n"))
1531 self.ui.status(_("no changes found\n"))
1532 added = []
1532 added = []
1533 result = 0
1533 result = 0
1534 else:
1534 else:
1535 if heads is None and list(common) == [nullid]:
1535 if heads is None and list(common) == [nullid]:
1536 self.ui.status(_("requesting all changes\n"))
1536 self.ui.status(_("requesting all changes\n"))
1537 elif heads is None and remote.capable('changegroupsubset'):
1537 elif heads is None and remote.capable('changegroupsubset'):
1538 # issue1320, avoid a race if remote changed after discovery
1538 # issue1320, avoid a race if remote changed after discovery
1539 heads = rheads
1539 heads = rheads
1540
1540
1541 if remote.capable('getbundle'):
1541 if remote.capable('getbundle'):
1542 cg = remote.getbundle('pull', common=common,
1542 cg = remote.getbundle('pull', common=common,
1543 heads=heads or rheads)
1543 heads=heads or rheads)
1544 elif heads is None:
1544 elif heads is None:
1545 cg = remote.changegroup(fetch, 'pull')
1545 cg = remote.changegroup(fetch, 'pull')
1546 elif not remote.capable('changegroupsubset'):
1546 elif not remote.capable('changegroupsubset'):
1547 raise util.Abort(_("partial pull cannot be done because "
1547 raise util.Abort(_("partial pull cannot be done because "
1548 "other repository doesn't support "
1548 "other repository doesn't support "
1549 "changegroupsubset."))
1549 "changegroupsubset."))
1550 else:
1550 else:
1551 cg = remote.changegroupsubset(fetch, heads, 'pull')
1551 cg = remote.changegroupsubset(fetch, heads, 'pull')
1552 clstart = len(self.changelog)
1552 clstart = len(self.changelog)
1553 result = self.addchangegroup(cg, 'pull', remote.url())
1553 result = self.addchangegroup(cg, 'pull', remote.url())
1554 clend = len(self.changelog)
1554 clend = len(self.changelog)
1555 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1555 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1556
1556
1557 # compute target subset
1558 if heads is None:
1559 # We pulled every thing possible
1560 # sync on everything common
1561 subset = common + added
1562 else:
1563 # We pulled a specific subset
1564 # sync on this subset
1565 subset = heads
1557
1566
1558 # Get remote phases data from remote
1567 # Get remote phases data from remote
1559 remotephases = remote.listkeys('phases')
1568 remotephases = remote.listkeys('phases')
1560 publishing = bool(remotephases.get('publishing', False))
1569 publishing = bool(remotephases.get('publishing', False))
1561 if remotephases and not publishing:
1570 if remotephases and not publishing:
1562 # remote is new and unpublishing
1571 # remote is new and unpublishing
1563 subset = common + added
1564 pheads, _dr = phases.analyzeremotephases(self, subset,
1572 pheads, _dr = phases.analyzeremotephases(self, subset,
1565 remotephases)
1573 remotephases)
1566 phases.advanceboundary(self, phases.public, pheads)
1574 phases.advanceboundary(self, phases.public, pheads)
1567 phases.advanceboundary(self, phases.draft, common + added)
1575 phases.advanceboundary(self, phases.draft, subset)
1568 else:
1576 else:
1569 # Remote is old or publishing all common changesets
1577 # Remote is old or publishing all common changesets
1570 # should be seen as public
1578 # should be seen as public
1571 phases.advanceboundary(self, phases.public, common + added)
1579 phases.advanceboundary(self, phases.public, subset)
1572 finally:
1580 finally:
1573 lock.release()
1581 lock.release()
1574
1582
1575 return result
1583 return result
1576
1584
1577 def checkpush(self, force, revs):
1585 def checkpush(self, force, revs):
1578 """Extensions can override this function if additional checks have
1586 """Extensions can override this function if additional checks have
1579 to be performed before pushing, or call it if they override push
1587 to be performed before pushing, or call it if they override push
1580 command.
1588 command.
1581 """
1589 """
1582 pass
1590 pass
1583
1591
1584 def push(self, remote, force=False, revs=None, newbranch=False):
1592 def push(self, remote, force=False, revs=None, newbranch=False):
1585 '''Push outgoing changesets (limited by revs) from the current
1593 '''Push outgoing changesets (limited by revs) from the current
1586 repository to remote. Return an integer:
1594 repository to remote. Return an integer:
1587 - 0 means HTTP error *or* nothing to push
1595 - 0 means HTTP error *or* nothing to push
1588 - 1 means we pushed and remote head count is unchanged *or*
1596 - 1 means we pushed and remote head count is unchanged *or*
1589 we have outgoing changesets but refused to push
1597 we have outgoing changesets but refused to push
1590 - other values as described by addchangegroup()
1598 - other values as described by addchangegroup()
1591 '''
1599 '''
1592 # there are two ways to push to remote repo:
1600 # there are two ways to push to remote repo:
1593 #
1601 #
1594 # addchangegroup assumes local user can lock remote
1602 # addchangegroup assumes local user can lock remote
1595 # repo (local filesystem, old ssh servers).
1603 # repo (local filesystem, old ssh servers).
1596 #
1604 #
1597 # unbundle assumes local user cannot lock remote repo (new ssh
1605 # unbundle assumes local user cannot lock remote repo (new ssh
1598 # servers, http servers).
1606 # servers, http servers).
1599
1607
1600 # get local lock as we might write phase data
1608 # get local lock as we might write phase data
1601 locallock = self.lock()
1609 locallock = self.lock()
1602 try:
1610 try:
1603 self.checkpush(force, revs)
1611 self.checkpush(force, revs)
1604 lock = None
1612 lock = None
1605 unbundle = remote.capable('unbundle')
1613 unbundle = remote.capable('unbundle')
1606 if not unbundle:
1614 if not unbundle:
1607 lock = remote.lock()
1615 lock = remote.lock()
1608 try:
1616 try:
1609 # discovery
1617 # discovery
1610 fci = discovery.findcommonincoming
1618 fci = discovery.findcommonincoming
1611 commoninc = fci(self, remote, force=force)
1619 commoninc = fci(self, remote, force=force)
1612 common, inc, remoteheads = commoninc
1620 common, inc, remoteheads = commoninc
1613 fco = discovery.findcommonoutgoing
1621 fco = discovery.findcommonoutgoing
1614 outgoing = fco(self, remote, onlyheads=revs,
1622 outgoing = fco(self, remote, onlyheads=revs,
1615 commoninc=commoninc, force=force)
1623 commoninc=commoninc, force=force)
1616
1624
1617
1625
1618 if not outgoing.missing:
1626 if not outgoing.missing:
1619 # nothing to push
1627 # nothing to push
1620 if outgoing.excluded:
1628 if outgoing.excluded:
1621 msg = "no changes to push but %i secret changesets\n"
1629 msg = "no changes to push but %i secret changesets\n"
1622 self.ui.status(_(msg) % len(outgoing.excluded))
1630 self.ui.status(_(msg) % len(outgoing.excluded))
1623 else:
1631 else:
1624 self.ui.status(_("no changes found\n"))
1632 self.ui.status(_("no changes found\n"))
1625 ret = 1
1633 ret = 1
1626 else:
1634 else:
1627 # something to push
1635 # something to push
1628 if not force:
1636 if not force:
1629 discovery.checkheads(self, remote, outgoing,
1637 discovery.checkheads(self, remote, outgoing,
1630 remoteheads, newbranch)
1638 remoteheads, newbranch)
1631
1639
1632 # create a changegroup from local
1640 # create a changegroup from local
1633 if revs is None and not outgoing.excluded:
1641 if revs is None and not outgoing.excluded:
1634 # push everything,
1642 # push everything,
1635 # use the fast path, no race possible on push
1643 # use the fast path, no race possible on push
1636 cg = self._changegroup(outgoing.missing, 'push')
1644 cg = self._changegroup(outgoing.missing, 'push')
1637 else:
1645 else:
1638 cg = self.getlocalbundle('push', outgoing)
1646 cg = self.getlocalbundle('push', outgoing)
1639
1647
1640 # apply changegroup to remote
1648 # apply changegroup to remote
1641 if unbundle:
1649 if unbundle:
1642 # local repo finds heads on server, finds out what
1650 # local repo finds heads on server, finds out what
1643 # revs it must push. once revs transferred, if server
1651 # revs it must push. once revs transferred, if server
1644 # finds it has different heads (someone else won
1652 # finds it has different heads (someone else won
1645 # commit/push race), server aborts.
1653 # commit/push race), server aborts.
1646 if force:
1654 if force:
1647 remoteheads = ['force']
1655 remoteheads = ['force']
1648 # ssh: return remote's addchangegroup()
1656 # ssh: return remote's addchangegroup()
1649 # http: return remote's addchangegroup() or 0 for error
1657 # http: return remote's addchangegroup() or 0 for error
1650 ret = remote.unbundle(cg, remoteheads, 'push')
1658 ret = remote.unbundle(cg, remoteheads, 'push')
1651 else:
1659 else:
1652 # we return an integer indicating remote head count change
1660 # we return an integer indicating remote head count change
1653 ret = remote.addchangegroup(cg, 'push', self.url())
1661 ret = remote.addchangegroup(cg, 'push', self.url())
1654
1662
1655 cheads = outgoing.commonheads[:]
1656 if ret:
1663 if ret:
1657 # push succeed, synchonize common + pushed
1664 # push succeed, synchonize target of the push
1658 # this is a no-op if there was nothing to push
1665 cheads = outgoing.missingheads
1659 cheads += outgoing.missingheads
1666 elif revs is None:
1667 # All out push fails. synchronize all common
1668 cheads = outgoing.commonheads
1669 else:
1670 # I want cheads = heads(::missingheads and ::commonheads)
1671 # (missingheads is revs with secret changeset filtered out)
1672 #
1673 # This can be expressed as:
1674 # cheads = ( (missingheads and ::commonheads)
1675 # + (commonheads and ::missingheads))"
1676 # )
1677 #
1678 # while trying to push we already computed the following:
1679 # common = (::commonheads)
1680 # missing = ((commonheads::missingheads) - commonheads)
1681 #
1682 # We can pick:
1683 # * missingheads part of comon (::commonheads)
1684 common = set(outgoing.common)
1685 cheads = [n for node in revs if n in common]
1686 # and
1687 # * commonheads parents on missing
1688 rvset = repo.revset('%ln and parents(roots(%ln))',
1689 outgoing.commonheads,
1690 outgoing.missing)
1691 cheads.extend(c.node() for c in rvset)
1660 # even when we don't push, exchanging phase data is useful
1692 # even when we don't push, exchanging phase data is useful
1661 remotephases = remote.listkeys('phases')
1693 remotephases = remote.listkeys('phases')
1662 if not remotephases: # old server or public only repo
1694 if not remotephases: # old server or public only repo
1663 phases.advanceboundary(self, phases.public, cheads)
1695 phases.advanceboundary(self, phases.public, cheads)
1664 # don't push any phase data as there is nothing to push
1696 # don't push any phase data as there is nothing to push
1665 else:
1697 else:
1666 ana = phases.analyzeremotephases(self, cheads, remotephases)
1698 ana = phases.analyzeremotephases(self, cheads, remotephases)
1667 pheads, droots = ana
1699 pheads, droots = ana
1668 ### Apply remote phase on local
1700 ### Apply remote phase on local
1669 if remotephases.get('publishing', False):
1701 if remotephases.get('publishing', False):
1670 phases.advanceboundary(self, phases.public, cheads)
1702 phases.advanceboundary(self, phases.public, cheads)
1671 else: # publish = False
1703 else: # publish = False
1672 phases.advanceboundary(self, phases.public, pheads)
1704 phases.advanceboundary(self, phases.public, pheads)
1673 phases.advanceboundary(self, phases.draft, cheads)
1705 phases.advanceboundary(self, phases.draft, cheads)
1674 ### Apply local phase on remote
1706 ### Apply local phase on remote
1675
1707
1676 # Get the list of all revs draft on remote by public here.
1708 # Get the list of all revs draft on remote by public here.
1677 # XXX Beware that revset break if droots is not strictly
1709 # XXX Beware that revset break if droots is not strictly
1678 # XXX root we may want to ensure it is but it is costly
1710 # XXX root we may want to ensure it is but it is costly
1679 outdated = self.set('heads((%ln::%ln) and public())',
1711 outdated = self.set('heads((%ln::%ln) and public())',
1680 droots, cheads)
1712 droots, cheads)
1681 for newremotehead in outdated:
1713 for newremotehead in outdated:
1682 r = remote.pushkey('phases',
1714 r = remote.pushkey('phases',
1683 newremotehead.hex(),
1715 newremotehead.hex(),
1684 str(phases.draft),
1716 str(phases.draft),
1685 str(phases.public))
1717 str(phases.public))
1686 if not r:
1718 if not r:
1687 self.ui.warn(_('updating %s to public failed!\n')
1719 self.ui.warn(_('updating %s to public failed!\n')
1688 % newremotehead)
1720 % newremotehead)
1689 finally:
1721 finally:
1690 if lock is not None:
1722 if lock is not None:
1691 lock.release()
1723 lock.release()
1692 finally:
1724 finally:
1693 locallock.release()
1725 locallock.release()
1694
1726
1695 self.ui.debug("checking for updated bookmarks\n")
1727 self.ui.debug("checking for updated bookmarks\n")
1696 rb = remote.listkeys('bookmarks')
1728 rb = remote.listkeys('bookmarks')
1697 for k in rb.keys():
1729 for k in rb.keys():
1698 if k in self._bookmarks:
1730 if k in self._bookmarks:
1699 nr, nl = rb[k], hex(self._bookmarks[k])
1731 nr, nl = rb[k], hex(self._bookmarks[k])
1700 if nr in self:
1732 if nr in self:
1701 cr = self[nr]
1733 cr = self[nr]
1702 cl = self[nl]
1734 cl = self[nl]
1703 if cl in cr.descendants():
1735 if cl in cr.descendants():
1704 r = remote.pushkey('bookmarks', k, nr, nl)
1736 r = remote.pushkey('bookmarks', k, nr, nl)
1705 if r:
1737 if r:
1706 self.ui.status(_("updating bookmark %s\n") % k)
1738 self.ui.status(_("updating bookmark %s\n") % k)
1707 else:
1739 else:
1708 self.ui.warn(_('updating bookmark %s'
1740 self.ui.warn(_('updating bookmark %s'
1709 ' failed!\n') % k)
1741 ' failed!\n') % k)
1710
1742
1711 return ret
1743 return ret
1712
1744
1713 def changegroupinfo(self, nodes, source):
1745 def changegroupinfo(self, nodes, source):
1714 if self.ui.verbose or source == 'bundle':
1746 if self.ui.verbose or source == 'bundle':
1715 self.ui.status(_("%d changesets found\n") % len(nodes))
1747 self.ui.status(_("%d changesets found\n") % len(nodes))
1716 if self.ui.debugflag:
1748 if self.ui.debugflag:
1717 self.ui.debug("list of changesets:\n")
1749 self.ui.debug("list of changesets:\n")
1718 for node in nodes:
1750 for node in nodes:
1719 self.ui.debug("%s\n" % hex(node))
1751 self.ui.debug("%s\n" % hex(node))
1720
1752
1721 def changegroupsubset(self, bases, heads, source):
1753 def changegroupsubset(self, bases, heads, source):
1722 """Compute a changegroup consisting of all the nodes that are
1754 """Compute a changegroup consisting of all the nodes that are
1723 descendants of any of the bases and ancestors of any of the heads.
1755 descendants of any of the bases and ancestors of any of the heads.
1724 Return a chunkbuffer object whose read() method will return
1756 Return a chunkbuffer object whose read() method will return
1725 successive changegroup chunks.
1757 successive changegroup chunks.
1726
1758
1727 It is fairly complex as determining which filenodes and which
1759 It is fairly complex as determining which filenodes and which
1728 manifest nodes need to be included for the changeset to be complete
1760 manifest nodes need to be included for the changeset to be complete
1729 is non-trivial.
1761 is non-trivial.
1730
1762
1731 Another wrinkle is doing the reverse, figuring out which changeset in
1763 Another wrinkle is doing the reverse, figuring out which changeset in
1732 the changegroup a particular filenode or manifestnode belongs to.
1764 the changegroup a particular filenode or manifestnode belongs to.
1733 """
1765 """
1734 cl = self.changelog
1766 cl = self.changelog
1735 if not bases:
1767 if not bases:
1736 bases = [nullid]
1768 bases = [nullid]
1737 csets, bases, heads = cl.nodesbetween(bases, heads)
1769 csets, bases, heads = cl.nodesbetween(bases, heads)
1738 # We assume that all ancestors of bases are known
1770 # We assume that all ancestors of bases are known
1739 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1771 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1740 return self._changegroupsubset(common, csets, heads, source)
1772 return self._changegroupsubset(common, csets, heads, source)
1741
1773
1742 def getlocalbundle(self, source, outgoing):
1774 def getlocalbundle(self, source, outgoing):
1743 """Like getbundle, but taking a discovery.outgoing as an argument.
1775 """Like getbundle, but taking a discovery.outgoing as an argument.
1744
1776
1745 This is only implemented for local repos and reuses potentially
1777 This is only implemented for local repos and reuses potentially
1746 precomputed sets in outgoing."""
1778 precomputed sets in outgoing."""
1747 if not outgoing.missing:
1779 if not outgoing.missing:
1748 return None
1780 return None
1749 return self._changegroupsubset(outgoing.common,
1781 return self._changegroupsubset(outgoing.common,
1750 outgoing.missing,
1782 outgoing.missing,
1751 outgoing.missingheads,
1783 outgoing.missingheads,
1752 source)
1784 source)
1753
1785
1754 def getbundle(self, source, heads=None, common=None):
1786 def getbundle(self, source, heads=None, common=None):
1755 """Like changegroupsubset, but returns the set difference between the
1787 """Like changegroupsubset, but returns the set difference between the
1756 ancestors of heads and the ancestors common.
1788 ancestors of heads and the ancestors common.
1757
1789
1758 If heads is None, use the local heads. If common is None, use [nullid].
1790 If heads is None, use the local heads. If common is None, use [nullid].
1759
1791
1760 The nodes in common might not all be known locally due to the way the
1792 The nodes in common might not all be known locally due to the way the
1761 current discovery protocol works.
1793 current discovery protocol works.
1762 """
1794 """
1763 cl = self.changelog
1795 cl = self.changelog
1764 if common:
1796 if common:
1765 nm = cl.nodemap
1797 nm = cl.nodemap
1766 common = [n for n in common if n in nm]
1798 common = [n for n in common if n in nm]
1767 else:
1799 else:
1768 common = [nullid]
1800 common = [nullid]
1769 if not heads:
1801 if not heads:
1770 heads = cl.heads()
1802 heads = cl.heads()
1771 return self.getlocalbundle(source,
1803 return self.getlocalbundle(source,
1772 discovery.outgoing(cl, common, heads))
1804 discovery.outgoing(cl, common, heads))
1773
1805
1774 def _changegroupsubset(self, commonrevs, csets, heads, source):
1806 def _changegroupsubset(self, commonrevs, csets, heads, source):
1775
1807
1776 cl = self.changelog
1808 cl = self.changelog
1777 mf = self.manifest
1809 mf = self.manifest
1778 mfs = {} # needed manifests
1810 mfs = {} # needed manifests
1779 fnodes = {} # needed file nodes
1811 fnodes = {} # needed file nodes
1780 changedfiles = set()
1812 changedfiles = set()
1781 fstate = ['', {}]
1813 fstate = ['', {}]
1782 count = [0]
1814 count = [0]
1783
1815
1784 # can we go through the fast path ?
1816 # can we go through the fast path ?
1785 heads.sort()
1817 heads.sort()
1786 if heads == sorted(self.heads()):
1818 if heads == sorted(self.heads()):
1787 return self._changegroup(csets, source)
1819 return self._changegroup(csets, source)
1788
1820
1789 # slow path
1821 # slow path
1790 self.hook('preoutgoing', throw=True, source=source)
1822 self.hook('preoutgoing', throw=True, source=source)
1791 self.changegroupinfo(csets, source)
1823 self.changegroupinfo(csets, source)
1792
1824
1793 # filter any nodes that claim to be part of the known set
1825 # filter any nodes that claim to be part of the known set
1794 def prune(revlog, missing):
1826 def prune(revlog, missing):
1795 return [n for n in missing
1827 return [n for n in missing
1796 if revlog.linkrev(revlog.rev(n)) not in commonrevs]
1828 if revlog.linkrev(revlog.rev(n)) not in commonrevs]
1797
1829
1798 def lookup(revlog, x):
1830 def lookup(revlog, x):
1799 if revlog == cl:
1831 if revlog == cl:
1800 c = cl.read(x)
1832 c = cl.read(x)
1801 changedfiles.update(c[3])
1833 changedfiles.update(c[3])
1802 mfs.setdefault(c[0], x)
1834 mfs.setdefault(c[0], x)
1803 count[0] += 1
1835 count[0] += 1
1804 self.ui.progress(_('bundling'), count[0],
1836 self.ui.progress(_('bundling'), count[0],
1805 unit=_('changesets'), total=len(csets))
1837 unit=_('changesets'), total=len(csets))
1806 return x
1838 return x
1807 elif revlog == mf:
1839 elif revlog == mf:
1808 clnode = mfs[x]
1840 clnode = mfs[x]
1809 mdata = mf.readfast(x)
1841 mdata = mf.readfast(x)
1810 for f in changedfiles:
1842 for f in changedfiles:
1811 if f in mdata:
1843 if f in mdata:
1812 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1844 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1813 count[0] += 1
1845 count[0] += 1
1814 self.ui.progress(_('bundling'), count[0],
1846 self.ui.progress(_('bundling'), count[0],
1815 unit=_('manifests'), total=len(mfs))
1847 unit=_('manifests'), total=len(mfs))
1816 return mfs[x]
1848 return mfs[x]
1817 else:
1849 else:
1818 self.ui.progress(
1850 self.ui.progress(
1819 _('bundling'), count[0], item=fstate[0],
1851 _('bundling'), count[0], item=fstate[0],
1820 unit=_('files'), total=len(changedfiles))
1852 unit=_('files'), total=len(changedfiles))
1821 return fstate[1][x]
1853 return fstate[1][x]
1822
1854
1823 bundler = changegroup.bundle10(lookup)
1855 bundler = changegroup.bundle10(lookup)
1824 reorder = self.ui.config('bundle', 'reorder', 'auto')
1856 reorder = self.ui.config('bundle', 'reorder', 'auto')
1825 if reorder == 'auto':
1857 if reorder == 'auto':
1826 reorder = None
1858 reorder = None
1827 else:
1859 else:
1828 reorder = util.parsebool(reorder)
1860 reorder = util.parsebool(reorder)
1829
1861
1830 def gengroup():
1862 def gengroup():
1831 # Create a changenode group generator that will call our functions
1863 # Create a changenode group generator that will call our functions
1832 # back to lookup the owning changenode and collect information.
1864 # back to lookup the owning changenode and collect information.
1833 for chunk in cl.group(csets, bundler, reorder=reorder):
1865 for chunk in cl.group(csets, bundler, reorder=reorder):
1834 yield chunk
1866 yield chunk
1835 self.ui.progress(_('bundling'), None)
1867 self.ui.progress(_('bundling'), None)
1836
1868
1837 # Create a generator for the manifestnodes that calls our lookup
1869 # Create a generator for the manifestnodes that calls our lookup
1838 # and data collection functions back.
1870 # and data collection functions back.
1839 count[0] = 0
1871 count[0] = 0
1840 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1872 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1841 yield chunk
1873 yield chunk
1842 self.ui.progress(_('bundling'), None)
1874 self.ui.progress(_('bundling'), None)
1843
1875
1844 mfs.clear()
1876 mfs.clear()
1845
1877
1846 # Go through all our files in order sorted by name.
1878 # Go through all our files in order sorted by name.
1847 count[0] = 0
1879 count[0] = 0
1848 for fname in sorted(changedfiles):
1880 for fname in sorted(changedfiles):
1849 filerevlog = self.file(fname)
1881 filerevlog = self.file(fname)
1850 if not len(filerevlog):
1882 if not len(filerevlog):
1851 raise util.Abort(_("empty or missing revlog for %s") % fname)
1883 raise util.Abort(_("empty or missing revlog for %s") % fname)
1852 fstate[0] = fname
1884 fstate[0] = fname
1853 fstate[1] = fnodes.pop(fname, {})
1885 fstate[1] = fnodes.pop(fname, {})
1854
1886
1855 nodelist = prune(filerevlog, fstate[1])
1887 nodelist = prune(filerevlog, fstate[1])
1856 if nodelist:
1888 if nodelist:
1857 count[0] += 1
1889 count[0] += 1
1858 yield bundler.fileheader(fname)
1890 yield bundler.fileheader(fname)
1859 for chunk in filerevlog.group(nodelist, bundler, reorder):
1891 for chunk in filerevlog.group(nodelist, bundler, reorder):
1860 yield chunk
1892 yield chunk
1861
1893
1862 # Signal that no more groups are left.
1894 # Signal that no more groups are left.
1863 yield bundler.close()
1895 yield bundler.close()
1864 self.ui.progress(_('bundling'), None)
1896 self.ui.progress(_('bundling'), None)
1865
1897
1866 if csets:
1898 if csets:
1867 self.hook('outgoing', node=hex(csets[0]), source=source)
1899 self.hook('outgoing', node=hex(csets[0]), source=source)
1868
1900
1869 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1901 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1870
1902
1871 def changegroup(self, basenodes, source):
1903 def changegroup(self, basenodes, source):
1872 # to avoid a race we use changegroupsubset() (issue1320)
1904 # to avoid a race we use changegroupsubset() (issue1320)
1873 return self.changegroupsubset(basenodes, self.heads(), source)
1905 return self.changegroupsubset(basenodes, self.heads(), source)
1874
1906
1875 def _changegroup(self, nodes, source):
1907 def _changegroup(self, nodes, source):
1876 """Compute the changegroup of all nodes that we have that a recipient
1908 """Compute the changegroup of all nodes that we have that a recipient
1877 doesn't. Return a chunkbuffer object whose read() method will return
1909 doesn't. Return a chunkbuffer object whose read() method will return
1878 successive changegroup chunks.
1910 successive changegroup chunks.
1879
1911
1880 This is much easier than the previous function as we can assume that
1912 This is much easier than the previous function as we can assume that
1881 the recipient has any changenode we aren't sending them.
1913 the recipient has any changenode we aren't sending them.
1882
1914
1883 nodes is the set of nodes to send"""
1915 nodes is the set of nodes to send"""
1884
1916
1885 cl = self.changelog
1917 cl = self.changelog
1886 mf = self.manifest
1918 mf = self.manifest
1887 mfs = {}
1919 mfs = {}
1888 changedfiles = set()
1920 changedfiles = set()
1889 fstate = ['']
1921 fstate = ['']
1890 count = [0]
1922 count = [0]
1891
1923
1892 self.hook('preoutgoing', throw=True, source=source)
1924 self.hook('preoutgoing', throw=True, source=source)
1893 self.changegroupinfo(nodes, source)
1925 self.changegroupinfo(nodes, source)
1894
1926
1895 revset = set([cl.rev(n) for n in nodes])
1927 revset = set([cl.rev(n) for n in nodes])
1896
1928
1897 def gennodelst(log):
1929 def gennodelst(log):
1898 return [log.node(r) for r in log if log.linkrev(r) in revset]
1930 return [log.node(r) for r in log if log.linkrev(r) in revset]
1899
1931
1900 def lookup(revlog, x):
1932 def lookup(revlog, x):
1901 if revlog == cl:
1933 if revlog == cl:
1902 c = cl.read(x)
1934 c = cl.read(x)
1903 changedfiles.update(c[3])
1935 changedfiles.update(c[3])
1904 mfs.setdefault(c[0], x)
1936 mfs.setdefault(c[0], x)
1905 count[0] += 1
1937 count[0] += 1
1906 self.ui.progress(_('bundling'), count[0],
1938 self.ui.progress(_('bundling'), count[0],
1907 unit=_('changesets'), total=len(nodes))
1939 unit=_('changesets'), total=len(nodes))
1908 return x
1940 return x
1909 elif revlog == mf:
1941 elif revlog == mf:
1910 count[0] += 1
1942 count[0] += 1
1911 self.ui.progress(_('bundling'), count[0],
1943 self.ui.progress(_('bundling'), count[0],
1912 unit=_('manifests'), total=len(mfs))
1944 unit=_('manifests'), total=len(mfs))
1913 return cl.node(revlog.linkrev(revlog.rev(x)))
1945 return cl.node(revlog.linkrev(revlog.rev(x)))
1914 else:
1946 else:
1915 self.ui.progress(
1947 self.ui.progress(
1916 _('bundling'), count[0], item=fstate[0],
1948 _('bundling'), count[0], item=fstate[0],
1917 total=len(changedfiles), unit=_('files'))
1949 total=len(changedfiles), unit=_('files'))
1918 return cl.node(revlog.linkrev(revlog.rev(x)))
1950 return cl.node(revlog.linkrev(revlog.rev(x)))
1919
1951
1920 bundler = changegroup.bundle10(lookup)
1952 bundler = changegroup.bundle10(lookup)
1921 reorder = self.ui.config('bundle', 'reorder', 'auto')
1953 reorder = self.ui.config('bundle', 'reorder', 'auto')
1922 if reorder == 'auto':
1954 if reorder == 'auto':
1923 reorder = None
1955 reorder = None
1924 else:
1956 else:
1925 reorder = util.parsebool(reorder)
1957 reorder = util.parsebool(reorder)
1926
1958
1927 def gengroup():
1959 def gengroup():
1928 '''yield a sequence of changegroup chunks (strings)'''
1960 '''yield a sequence of changegroup chunks (strings)'''
1929 # construct a list of all changed files
1961 # construct a list of all changed files
1930
1962
1931 for chunk in cl.group(nodes, bundler, reorder=reorder):
1963 for chunk in cl.group(nodes, bundler, reorder=reorder):
1932 yield chunk
1964 yield chunk
1933 self.ui.progress(_('bundling'), None)
1965 self.ui.progress(_('bundling'), None)
1934
1966
1935 count[0] = 0
1967 count[0] = 0
1936 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1968 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1937 yield chunk
1969 yield chunk
1938 self.ui.progress(_('bundling'), None)
1970 self.ui.progress(_('bundling'), None)
1939
1971
1940 count[0] = 0
1972 count[0] = 0
1941 for fname in sorted(changedfiles):
1973 for fname in sorted(changedfiles):
1942 filerevlog = self.file(fname)
1974 filerevlog = self.file(fname)
1943 if not len(filerevlog):
1975 if not len(filerevlog):
1944 raise util.Abort(_("empty or missing revlog for %s") % fname)
1976 raise util.Abort(_("empty or missing revlog for %s") % fname)
1945 fstate[0] = fname
1977 fstate[0] = fname
1946 nodelist = gennodelst(filerevlog)
1978 nodelist = gennodelst(filerevlog)
1947 if nodelist:
1979 if nodelist:
1948 count[0] += 1
1980 count[0] += 1
1949 yield bundler.fileheader(fname)
1981 yield bundler.fileheader(fname)
1950 for chunk in filerevlog.group(nodelist, bundler, reorder):
1982 for chunk in filerevlog.group(nodelist, bundler, reorder):
1951 yield chunk
1983 yield chunk
1952 yield bundler.close()
1984 yield bundler.close()
1953 self.ui.progress(_('bundling'), None)
1985 self.ui.progress(_('bundling'), None)
1954
1986
1955 if nodes:
1987 if nodes:
1956 self.hook('outgoing', node=hex(nodes[0]), source=source)
1988 self.hook('outgoing', node=hex(nodes[0]), source=source)
1957
1989
1958 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1990 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1959
1991
1960 def addchangegroup(self, source, srctype, url, emptyok=False):
1992 def addchangegroup(self, source, srctype, url, emptyok=False):
1961 """Add the changegroup returned by source.read() to this repo.
1993 """Add the changegroup returned by source.read() to this repo.
1962 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1994 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1963 the URL of the repo where this changegroup is coming from.
1995 the URL of the repo where this changegroup is coming from.
1964
1996
1965 Return an integer summarizing the change to this repo:
1997 Return an integer summarizing the change to this repo:
1966 - nothing changed or no source: 0
1998 - nothing changed or no source: 0
1967 - more heads than before: 1+added heads (2..n)
1999 - more heads than before: 1+added heads (2..n)
1968 - fewer heads than before: -1-removed heads (-2..-n)
2000 - fewer heads than before: -1-removed heads (-2..-n)
1969 - number of heads stays the same: 1
2001 - number of heads stays the same: 1
1970 """
2002 """
1971 def csmap(x):
2003 def csmap(x):
1972 self.ui.debug("add changeset %s\n" % short(x))
2004 self.ui.debug("add changeset %s\n" % short(x))
1973 return len(cl)
2005 return len(cl)
1974
2006
1975 def revmap(x):
2007 def revmap(x):
1976 return cl.rev(x)
2008 return cl.rev(x)
1977
2009
1978 if not source:
2010 if not source:
1979 return 0
2011 return 0
1980
2012
1981 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2013 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1982
2014
1983 changesets = files = revisions = 0
2015 changesets = files = revisions = 0
1984 efiles = set()
2016 efiles = set()
1985
2017
1986 # write changelog data to temp files so concurrent readers will not see
2018 # write changelog data to temp files so concurrent readers will not see
1987 # inconsistent view
2019 # inconsistent view
1988 cl = self.changelog
2020 cl = self.changelog
1989 cl.delayupdate()
2021 cl.delayupdate()
1990 oldheads = cl.heads()
2022 oldheads = cl.heads()
1991
2023
1992 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2024 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1993 try:
2025 try:
1994 trp = weakref.proxy(tr)
2026 trp = weakref.proxy(tr)
1995 # pull off the changeset group
2027 # pull off the changeset group
1996 self.ui.status(_("adding changesets\n"))
2028 self.ui.status(_("adding changesets\n"))
1997 clstart = len(cl)
2029 clstart = len(cl)
1998 class prog(object):
2030 class prog(object):
1999 step = _('changesets')
2031 step = _('changesets')
2000 count = 1
2032 count = 1
2001 ui = self.ui
2033 ui = self.ui
2002 total = None
2034 total = None
2003 def __call__(self):
2035 def __call__(self):
2004 self.ui.progress(self.step, self.count, unit=_('chunks'),
2036 self.ui.progress(self.step, self.count, unit=_('chunks'),
2005 total=self.total)
2037 total=self.total)
2006 self.count += 1
2038 self.count += 1
2007 pr = prog()
2039 pr = prog()
2008 source.callback = pr
2040 source.callback = pr
2009
2041
2010 source.changelogheader()
2042 source.changelogheader()
2011 srccontent = cl.addgroup(source, csmap, trp)
2043 srccontent = cl.addgroup(source, csmap, trp)
2012 if not (srccontent or emptyok):
2044 if not (srccontent or emptyok):
2013 raise util.Abort(_("received changelog group is empty"))
2045 raise util.Abort(_("received changelog group is empty"))
2014 clend = len(cl)
2046 clend = len(cl)
2015 changesets = clend - clstart
2047 changesets = clend - clstart
2016 for c in xrange(clstart, clend):
2048 for c in xrange(clstart, clend):
2017 efiles.update(self[c].files())
2049 efiles.update(self[c].files())
2018 efiles = len(efiles)
2050 efiles = len(efiles)
2019 self.ui.progress(_('changesets'), None)
2051 self.ui.progress(_('changesets'), None)
2020
2052
2021 # pull off the manifest group
2053 # pull off the manifest group
2022 self.ui.status(_("adding manifests\n"))
2054 self.ui.status(_("adding manifests\n"))
2023 pr.step = _('manifests')
2055 pr.step = _('manifests')
2024 pr.count = 1
2056 pr.count = 1
2025 pr.total = changesets # manifests <= changesets
2057 pr.total = changesets # manifests <= changesets
2026 # no need to check for empty manifest group here:
2058 # no need to check for empty manifest group here:
2027 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2059 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2028 # no new manifest will be created and the manifest group will
2060 # no new manifest will be created and the manifest group will
2029 # be empty during the pull
2061 # be empty during the pull
2030 source.manifestheader()
2062 source.manifestheader()
2031 self.manifest.addgroup(source, revmap, trp)
2063 self.manifest.addgroup(source, revmap, trp)
2032 self.ui.progress(_('manifests'), None)
2064 self.ui.progress(_('manifests'), None)
2033
2065
2034 needfiles = {}
2066 needfiles = {}
2035 if self.ui.configbool('server', 'validate', default=False):
2067 if self.ui.configbool('server', 'validate', default=False):
2036 # validate incoming csets have their manifests
2068 # validate incoming csets have their manifests
2037 for cset in xrange(clstart, clend):
2069 for cset in xrange(clstart, clend):
2038 mfest = self.changelog.read(self.changelog.node(cset))[0]
2070 mfest = self.changelog.read(self.changelog.node(cset))[0]
2039 mfest = self.manifest.readdelta(mfest)
2071 mfest = self.manifest.readdelta(mfest)
2040 # store file nodes we must see
2072 # store file nodes we must see
2041 for f, n in mfest.iteritems():
2073 for f, n in mfest.iteritems():
2042 needfiles.setdefault(f, set()).add(n)
2074 needfiles.setdefault(f, set()).add(n)
2043
2075
2044 # process the files
2076 # process the files
2045 self.ui.status(_("adding file changes\n"))
2077 self.ui.status(_("adding file changes\n"))
2046 pr.step = _('files')
2078 pr.step = _('files')
2047 pr.count = 1
2079 pr.count = 1
2048 pr.total = efiles
2080 pr.total = efiles
2049 source.callback = None
2081 source.callback = None
2050
2082
2051 while True:
2083 while True:
2052 chunkdata = source.filelogheader()
2084 chunkdata = source.filelogheader()
2053 if not chunkdata:
2085 if not chunkdata:
2054 break
2086 break
2055 f = chunkdata["filename"]
2087 f = chunkdata["filename"]
2056 self.ui.debug("adding %s revisions\n" % f)
2088 self.ui.debug("adding %s revisions\n" % f)
2057 pr()
2089 pr()
2058 fl = self.file(f)
2090 fl = self.file(f)
2059 o = len(fl)
2091 o = len(fl)
2060 if not fl.addgroup(source, revmap, trp):
2092 if not fl.addgroup(source, revmap, trp):
2061 raise util.Abort(_("received file revlog group is empty"))
2093 raise util.Abort(_("received file revlog group is empty"))
2062 revisions += len(fl) - o
2094 revisions += len(fl) - o
2063 files += 1
2095 files += 1
2064 if f in needfiles:
2096 if f in needfiles:
2065 needs = needfiles[f]
2097 needs = needfiles[f]
2066 for new in xrange(o, len(fl)):
2098 for new in xrange(o, len(fl)):
2067 n = fl.node(new)
2099 n = fl.node(new)
2068 if n in needs:
2100 if n in needs:
2069 needs.remove(n)
2101 needs.remove(n)
2070 if not needs:
2102 if not needs:
2071 del needfiles[f]
2103 del needfiles[f]
2072 self.ui.progress(_('files'), None)
2104 self.ui.progress(_('files'), None)
2073
2105
2074 for f, needs in needfiles.iteritems():
2106 for f, needs in needfiles.iteritems():
2075 fl = self.file(f)
2107 fl = self.file(f)
2076 for n in needs:
2108 for n in needs:
2077 try:
2109 try:
2078 fl.rev(n)
2110 fl.rev(n)
2079 except error.LookupError:
2111 except error.LookupError:
2080 raise util.Abort(
2112 raise util.Abort(
2081 _('missing file data for %s:%s - run hg verify') %
2113 _('missing file data for %s:%s - run hg verify') %
2082 (f, hex(n)))
2114 (f, hex(n)))
2083
2115
2084 dh = 0
2116 dh = 0
2085 if oldheads:
2117 if oldheads:
2086 heads = cl.heads()
2118 heads = cl.heads()
2087 dh = len(heads) - len(oldheads)
2119 dh = len(heads) - len(oldheads)
2088 for h in heads:
2120 for h in heads:
2089 if h not in oldheads and 'close' in self[h].extra():
2121 if h not in oldheads and 'close' in self[h].extra():
2090 dh -= 1
2122 dh -= 1
2091 htext = ""
2123 htext = ""
2092 if dh:
2124 if dh:
2093 htext = _(" (%+d heads)") % dh
2125 htext = _(" (%+d heads)") % dh
2094
2126
2095 self.ui.status(_("added %d changesets"
2127 self.ui.status(_("added %d changesets"
2096 " with %d changes to %d files%s\n")
2128 " with %d changes to %d files%s\n")
2097 % (changesets, revisions, files, htext))
2129 % (changesets, revisions, files, htext))
2098
2130
2099 if changesets > 0:
2131 if changesets > 0:
2100 p = lambda: cl.writepending() and self.root or ""
2132 p = lambda: cl.writepending() and self.root or ""
2101 self.hook('pretxnchangegroup', throw=True,
2133 self.hook('pretxnchangegroup', throw=True,
2102 node=hex(cl.node(clstart)), source=srctype,
2134 node=hex(cl.node(clstart)), source=srctype,
2103 url=url, pending=p)
2135 url=url, pending=p)
2104
2136
2105 added = [cl.node(r) for r in xrange(clstart, clend)]
2137 added = [cl.node(r) for r in xrange(clstart, clend)]
2106 publishing = self.ui.configbool('phases', 'publish', True)
2138 publishing = self.ui.configbool('phases', 'publish', True)
2107 if srctype == 'push':
2139 if srctype == 'push':
2108 # Old server can not push the boundary themself.
2140 # Old server can not push the boundary themself.
2109 # New server won't push the boundary if changeset already
2141 # New server won't push the boundary if changeset already
2110 # existed locally as secrete
2142 # existed locally as secrete
2111 #
2143 #
2112 # We should not use added here but the list of all change in
2144 # We should not use added here but the list of all change in
2113 # the bundle
2145 # the bundle
2114 if publishing:
2146 if publishing:
2115 phases.advanceboundary(self, phases.public, srccontent)
2147 phases.advanceboundary(self, phases.public, srccontent)
2116 else:
2148 else:
2117 phases.advanceboundary(self, phases.draft, srccontent)
2149 phases.advanceboundary(self, phases.draft, srccontent)
2118 phases.retractboundary(self, phases.draft, added)
2150 phases.retractboundary(self, phases.draft, added)
2119 elif srctype != 'strip':
2151 elif srctype != 'strip':
2120 # publishing only alter behavior during push
2152 # publishing only alter behavior during push
2121 #
2153 #
2122 # strip should not touch boundary at all
2154 # strip should not touch boundary at all
2123 phases.retractboundary(self, phases.draft, added)
2155 phases.retractboundary(self, phases.draft, added)
2124
2156
2125 # make changelog see real files again
2157 # make changelog see real files again
2126 cl.finalize(trp)
2158 cl.finalize(trp)
2127
2159
2128 tr.close()
2160 tr.close()
2129
2161
2130 if changesets > 0:
2162 if changesets > 0:
2131 def runhooks():
2163 def runhooks():
2132 # forcefully update the on-disk branch cache
2164 # forcefully update the on-disk branch cache
2133 self.ui.debug("updating the branch cache\n")
2165 self.ui.debug("updating the branch cache\n")
2134 self.updatebranchcache()
2166 self.updatebranchcache()
2135 self.hook("changegroup", node=hex(cl.node(clstart)),
2167 self.hook("changegroup", node=hex(cl.node(clstart)),
2136 source=srctype, url=url)
2168 source=srctype, url=url)
2137
2169
2138 for n in added:
2170 for n in added:
2139 self.hook("incoming", node=hex(n), source=srctype,
2171 self.hook("incoming", node=hex(n), source=srctype,
2140 url=url)
2172 url=url)
2141 self._afterlock(runhooks)
2173 self._afterlock(runhooks)
2142
2174
2143 finally:
2175 finally:
2144 tr.release()
2176 tr.release()
2145 # never return 0 here:
2177 # never return 0 here:
2146 if dh < 0:
2178 if dh < 0:
2147 return dh - 1
2179 return dh - 1
2148 else:
2180 else:
2149 return dh + 1
2181 return dh + 1
2150
2182
2151 def stream_in(self, remote, requirements):
2183 def stream_in(self, remote, requirements):
2152 lock = self.lock()
2184 lock = self.lock()
2153 try:
2185 try:
2154 fp = remote.stream_out()
2186 fp = remote.stream_out()
2155 l = fp.readline()
2187 l = fp.readline()
2156 try:
2188 try:
2157 resp = int(l)
2189 resp = int(l)
2158 except ValueError:
2190 except ValueError:
2159 raise error.ResponseError(
2191 raise error.ResponseError(
2160 _('Unexpected response from remote server:'), l)
2192 _('Unexpected response from remote server:'), l)
2161 if resp == 1:
2193 if resp == 1:
2162 raise util.Abort(_('operation forbidden by server'))
2194 raise util.Abort(_('operation forbidden by server'))
2163 elif resp == 2:
2195 elif resp == 2:
2164 raise util.Abort(_('locking the remote repository failed'))
2196 raise util.Abort(_('locking the remote repository failed'))
2165 elif resp != 0:
2197 elif resp != 0:
2166 raise util.Abort(_('the server sent an unknown error code'))
2198 raise util.Abort(_('the server sent an unknown error code'))
2167 self.ui.status(_('streaming all changes\n'))
2199 self.ui.status(_('streaming all changes\n'))
2168 l = fp.readline()
2200 l = fp.readline()
2169 try:
2201 try:
2170 total_files, total_bytes = map(int, l.split(' ', 1))
2202 total_files, total_bytes = map(int, l.split(' ', 1))
2171 except (ValueError, TypeError):
2203 except (ValueError, TypeError):
2172 raise error.ResponseError(
2204 raise error.ResponseError(
2173 _('Unexpected response from remote server:'), l)
2205 _('Unexpected response from remote server:'), l)
2174 self.ui.status(_('%d files to transfer, %s of data\n') %
2206 self.ui.status(_('%d files to transfer, %s of data\n') %
2175 (total_files, util.bytecount(total_bytes)))
2207 (total_files, util.bytecount(total_bytes)))
2176 start = time.time()
2208 start = time.time()
2177 for i in xrange(total_files):
2209 for i in xrange(total_files):
2178 # XXX doesn't support '\n' or '\r' in filenames
2210 # XXX doesn't support '\n' or '\r' in filenames
2179 l = fp.readline()
2211 l = fp.readline()
2180 try:
2212 try:
2181 name, size = l.split('\0', 1)
2213 name, size = l.split('\0', 1)
2182 size = int(size)
2214 size = int(size)
2183 except (ValueError, TypeError):
2215 except (ValueError, TypeError):
2184 raise error.ResponseError(
2216 raise error.ResponseError(
2185 _('Unexpected response from remote server:'), l)
2217 _('Unexpected response from remote server:'), l)
2186 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2218 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2187 # for backwards compat, name was partially encoded
2219 # for backwards compat, name was partially encoded
2188 ofp = self.sopener(store.decodedir(name), 'w')
2220 ofp = self.sopener(store.decodedir(name), 'w')
2189 for chunk in util.filechunkiter(fp, limit=size):
2221 for chunk in util.filechunkiter(fp, limit=size):
2190 ofp.write(chunk)
2222 ofp.write(chunk)
2191 ofp.close()
2223 ofp.close()
2192 elapsed = time.time() - start
2224 elapsed = time.time() - start
2193 if elapsed <= 0:
2225 if elapsed <= 0:
2194 elapsed = 0.001
2226 elapsed = 0.001
2195 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2227 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2196 (util.bytecount(total_bytes), elapsed,
2228 (util.bytecount(total_bytes), elapsed,
2197 util.bytecount(total_bytes / elapsed)))
2229 util.bytecount(total_bytes / elapsed)))
2198
2230
2199 # new requirements = old non-format requirements + new format-related
2231 # new requirements = old non-format requirements + new format-related
2200 # requirements from the streamed-in repository
2232 # requirements from the streamed-in repository
2201 requirements.update(set(self.requirements) - self.supportedformats)
2233 requirements.update(set(self.requirements) - self.supportedformats)
2202 self._applyrequirements(requirements)
2234 self._applyrequirements(requirements)
2203 self._writerequirements()
2235 self._writerequirements()
2204
2236
2205 self.invalidate()
2237 self.invalidate()
2206 return len(self.heads()) + 1
2238 return len(self.heads()) + 1
2207 finally:
2239 finally:
2208 lock.release()
2240 lock.release()
2209
2241
2210 def clone(self, remote, heads=[], stream=False):
2242 def clone(self, remote, heads=[], stream=False):
2211 '''clone remote repository.
2243 '''clone remote repository.
2212
2244
2213 keyword arguments:
2245 keyword arguments:
2214 heads: list of revs to clone (forces use of pull)
2246 heads: list of revs to clone (forces use of pull)
2215 stream: use streaming clone if possible'''
2247 stream: use streaming clone if possible'''
2216
2248
2217 # now, all clients that can request uncompressed clones can
2249 # now, all clients that can request uncompressed clones can
2218 # read repo formats supported by all servers that can serve
2250 # read repo formats supported by all servers that can serve
2219 # them.
2251 # them.
2220
2252
2221 # if revlog format changes, client will have to check version
2253 # if revlog format changes, client will have to check version
2222 # and format flags on "stream" capability, and use
2254 # and format flags on "stream" capability, and use
2223 # uncompressed only if compatible.
2255 # uncompressed only if compatible.
2224
2256
2225 if stream and not heads:
2257 if stream and not heads:
2226 # 'stream' means remote revlog format is revlogv1 only
2258 # 'stream' means remote revlog format is revlogv1 only
2227 if remote.capable('stream'):
2259 if remote.capable('stream'):
2228 return self.stream_in(remote, set(('revlogv1',)))
2260 return self.stream_in(remote, set(('revlogv1',)))
2229 # otherwise, 'streamreqs' contains the remote revlog format
2261 # otherwise, 'streamreqs' contains the remote revlog format
2230 streamreqs = remote.capable('streamreqs')
2262 streamreqs = remote.capable('streamreqs')
2231 if streamreqs:
2263 if streamreqs:
2232 streamreqs = set(streamreqs.split(','))
2264 streamreqs = set(streamreqs.split(','))
2233 # if we support it, stream in and adjust our requirements
2265 # if we support it, stream in and adjust our requirements
2234 if not streamreqs - self.supportedformats:
2266 if not streamreqs - self.supportedformats:
2235 return self.stream_in(remote, streamreqs)
2267 return self.stream_in(remote, streamreqs)
2236 return self.pull(remote, heads)
2268 return self.pull(remote, heads)
2237
2269
2238 def pushkey(self, namespace, key, old, new):
2270 def pushkey(self, namespace, key, old, new):
2239 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2271 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2240 old=old, new=new)
2272 old=old, new=new)
2241 ret = pushkey.push(self, namespace, key, old, new)
2273 ret = pushkey.push(self, namespace, key, old, new)
2242 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2274 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2243 ret=ret)
2275 ret=ret)
2244 return ret
2276 return ret
2245
2277
2246 def listkeys(self, namespace):
2278 def listkeys(self, namespace):
2247 self.hook('prelistkeys', throw=True, namespace=namespace)
2279 self.hook('prelistkeys', throw=True, namespace=namespace)
2248 values = pushkey.list(self, namespace)
2280 values = pushkey.list(self, namespace)
2249 self.hook('listkeys', namespace=namespace, values=values)
2281 self.hook('listkeys', namespace=namespace, values=values)
2250 return values
2282 return values
2251
2283
2252 def debugwireargs(self, one, two, three=None, four=None, five=None):
2284 def debugwireargs(self, one, two, three=None, four=None, five=None):
2253 '''used to test argument passing over the wire'''
2285 '''used to test argument passing over the wire'''
2254 return "%s %s %s %s %s" % (one, two, three, four, five)
2286 return "%s %s %s %s %s" % (one, two, three, four, five)
2255
2287
2256 def savecommitmessage(self, text):
2288 def savecommitmessage(self, text):
2257 fp = self.opener('last-message.txt', 'wb')
2289 fp = self.opener('last-message.txt', 'wb')
2258 try:
2290 try:
2259 fp.write(text)
2291 fp.write(text)
2260 finally:
2292 finally:
2261 fp.close()
2293 fp.close()
2262 return self.pathto(fp.name[len(self.root)+1:])
2294 return self.pathto(fp.name[len(self.root)+1:])
2263
2295
2264 # used to avoid circular references so destructors work
2296 # used to avoid circular references so destructors work
2265 def aftertrans(files):
2297 def aftertrans(files):
2266 renamefiles = [tuple(t) for t in files]
2298 renamefiles = [tuple(t) for t in files]
2267 def a():
2299 def a():
2268 for src, dest in renamefiles:
2300 for src, dest in renamefiles:
2269 util.rename(src, dest)
2301 util.rename(src, dest)
2270 return a
2302 return a
2271
2303
2272 def undoname(fn):
2304 def undoname(fn):
2273 base, name = os.path.split(fn)
2305 base, name = os.path.split(fn)
2274 assert name.startswith('journal')
2306 assert name.startswith('journal')
2275 return os.path.join(base, name.replace('journal', 'undo', 1))
2307 return os.path.join(base, name.replace('journal', 'undo', 1))
2276
2308
2277 def instance(ui, path, create):
2309 def instance(ui, path, create):
2278 return localrepository(ui, util.urllocalpath(path), create)
2310 return localrepository(ui, util.urllocalpath(path), create)
2279
2311
2280 def islocal(path):
2312 def islocal(path):
2281 return True
2313 return True
@@ -1,581 +1,580 b''
1 $ "$TESTDIR/hghave" system-sh || exit 80
1 $ "$TESTDIR/hghave" system-sh || exit 80
2
2
3 commit hooks can see env vars
3 commit hooks can see env vars
4
4
5 $ hg init a
5 $ hg init a
6 $ cd a
6 $ cd a
7 $ echo "[hooks]" > .hg/hgrc
7 $ echo "[hooks]" > .hg/hgrc
8 $ echo 'commit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit' >> .hg/hgrc
8 $ echo 'commit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit' >> .hg/hgrc
9 $ echo 'commit.b = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit.b' >> .hg/hgrc
9 $ echo 'commit.b = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit.b' >> .hg/hgrc
10 $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python "$TESTDIR"/printenv.py precommit' >> .hg/hgrc
10 $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python "$TESTDIR"/printenv.py precommit' >> .hg/hgrc
11 $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py pretxncommit' >> .hg/hgrc
11 $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py pretxncommit' >> .hg/hgrc
12 $ echo 'pretxncommit.tip = hg -q tip' >> .hg/hgrc
12 $ echo 'pretxncommit.tip = hg -q tip' >> .hg/hgrc
13 $ echo 'pre-identify = python "$TESTDIR"/printenv.py pre-identify 1' >> .hg/hgrc
13 $ echo 'pre-identify = python "$TESTDIR"/printenv.py pre-identify 1' >> .hg/hgrc
14 $ echo 'pre-cat = python "$TESTDIR"/printenv.py pre-cat' >> .hg/hgrc
14 $ echo 'pre-cat = python "$TESTDIR"/printenv.py pre-cat' >> .hg/hgrc
15 $ echo 'post-cat = python "$TESTDIR"/printenv.py post-cat' >> .hg/hgrc
15 $ echo 'post-cat = python "$TESTDIR"/printenv.py post-cat' >> .hg/hgrc
16 $ echo a > a
16 $ echo a > a
17 $ hg add a
17 $ hg add a
18 $ hg commit -m a
18 $ hg commit -m a
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
21 0:cb9a9f314b8b
21 0:cb9a9f314b8b
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
24
24
25 $ hg clone . ../b
25 $ hg clone . ../b
26 updating to branch default
26 updating to branch default
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ cd ../b
28 $ cd ../b
29
29
30 changegroup hooks can see env vars
30 changegroup hooks can see env vars
31
31
32 $ echo '[hooks]' > .hg/hgrc
32 $ echo '[hooks]' > .hg/hgrc
33 $ echo 'prechangegroup = python "$TESTDIR"/printenv.py prechangegroup' >> .hg/hgrc
33 $ echo 'prechangegroup = python "$TESTDIR"/printenv.py prechangegroup' >> .hg/hgrc
34 $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc
34 $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc
35 $ echo 'incoming = python "$TESTDIR"/printenv.py incoming' >> .hg/hgrc
35 $ echo 'incoming = python "$TESTDIR"/printenv.py incoming' >> .hg/hgrc
36
36
37 pretxncommit and commit hooks can see both parents of merge
37 pretxncommit and commit hooks can see both parents of merge
38
38
39 $ cd ../a
39 $ cd ../a
40 $ echo b >> a
40 $ echo b >> a
41 $ hg commit -m a1 -d "1 0"
41 $ hg commit -m a1 -d "1 0"
42 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
42 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
43 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
43 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
44 1:ab228980c14d
44 1:ab228980c14d
45 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
45 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
46 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
46 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
47 $ hg update -C 0
47 $ hg update -C 0
48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
49 $ echo b > b
49 $ echo b > b
50 $ hg add b
50 $ hg add b
51 $ hg commit -m b -d '1 0'
51 $ hg commit -m b -d '1 0'
52 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
52 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
53 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
53 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
54 2:ee9deb46ab31
54 2:ee9deb46ab31
55 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
55 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
56 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
56 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
57 created new head
57 created new head
58 $ hg merge 1
58 $ hg merge 1
59 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
60 (branch merge, don't forget to commit)
60 (branch merge, don't forget to commit)
61 $ hg commit -m merge -d '2 0'
61 $ hg commit -m merge -d '2 0'
62 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
62 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
63 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
63 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
64 3:07f3376c1e65
64 3:07f3376c1e65
65 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
65 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
66 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
66 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
67
67
68 test generic hooks
68 test generic hooks
69
69
70 $ hg id
70 $ hg id
71 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
71 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
72 warning: pre-identify hook exited with status 1
72 warning: pre-identify hook exited with status 1
73 [1]
73 [1]
74 $ hg cat b
74 $ hg cat b
75 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
75 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
76 b
76 b
77 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
77 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
78
78
79 $ cd ../b
79 $ cd ../b
80 $ hg pull ../a
80 $ hg pull ../a
81 pulling from ../a
81 pulling from ../a
82 searching for changes
82 searching for changes
83 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
83 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
84 adding changesets
84 adding changesets
85 adding manifests
85 adding manifests
86 adding file changes
86 adding file changes
87 added 3 changesets with 2 changes to 2 files
87 added 3 changesets with 2 changes to 2 files
88 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
88 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
89 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
89 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
90 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
90 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 (run 'hg update' to get a working copy)
92 (run 'hg update' to get a working copy)
93
93
94 tag hooks can see env vars
94 tag hooks can see env vars
95
95
96 $ cd ../a
96 $ cd ../a
97 $ echo 'pretag = python "$TESTDIR"/printenv.py pretag' >> .hg/hgrc
97 $ echo 'pretag = python "$TESTDIR"/printenv.py pretag' >> .hg/hgrc
98 $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python "$TESTDIR"/printenv.py tag' >> .hg/hgrc
98 $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python "$TESTDIR"/printenv.py tag' >> .hg/hgrc
99 $ hg tag -d '3 0' a
99 $ hg tag -d '3 0' a
100 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
100 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
101 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
101 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
102 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
102 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
103 4:539e4b31b6dc
103 4:539e4b31b6dc
104 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
104 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
105 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
105 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
106 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
107 $ hg tag -l la
107 $ hg tag -l la
108 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
108 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
109 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
109 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
110
110
111 pretag hook can forbid tagging
111 pretag hook can forbid tagging
112
112
113 $ echo 'pretag.forbid = python "$TESTDIR"/printenv.py pretag.forbid 1' >> .hg/hgrc
113 $ echo 'pretag.forbid = python "$TESTDIR"/printenv.py pretag.forbid 1' >> .hg/hgrc
114 $ hg tag -d '4 0' fa
114 $ hg tag -d '4 0' fa
115 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
115 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
116 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
116 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
117 abort: pretag.forbid hook exited with status 1
117 abort: pretag.forbid hook exited with status 1
118 [255]
118 [255]
119 $ hg tag -l fla
119 $ hg tag -l fla
120 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
120 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
121 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
121 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
122 abort: pretag.forbid hook exited with status 1
122 abort: pretag.forbid hook exited with status 1
123 [255]
123 [255]
124
124
125 pretxncommit hook can see changeset, can roll back txn, changeset no
125 pretxncommit hook can see changeset, can roll back txn, changeset no
126 more there after
126 more there after
127
127
128 $ echo 'pretxncommit.forbid0 = hg tip -q' >> .hg/hgrc
128 $ echo 'pretxncommit.forbid0 = hg tip -q' >> .hg/hgrc
129 $ echo 'pretxncommit.forbid1 = python "$TESTDIR"/printenv.py pretxncommit.forbid 1' >> .hg/hgrc
129 $ echo 'pretxncommit.forbid1 = python "$TESTDIR"/printenv.py pretxncommit.forbid 1' >> .hg/hgrc
130 $ echo z > z
130 $ echo z > z
131 $ hg add z
131 $ hg add z
132 $ hg -q tip
132 $ hg -q tip
133 4:539e4b31b6dc
133 4:539e4b31b6dc
134 $ hg commit -m 'fail' -d '4 0'
134 $ hg commit -m 'fail' -d '4 0'
135 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
135 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
136 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
136 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
137 5:6f611f8018c1
137 5:6f611f8018c1
138 5:6f611f8018c1
138 5:6f611f8018c1
139 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
139 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
140 transaction abort!
140 transaction abort!
141 rollback completed
141 rollback completed
142 abort: pretxncommit.forbid1 hook exited with status 1
142 abort: pretxncommit.forbid1 hook exited with status 1
143 [255]
143 [255]
144 $ hg -q tip
144 $ hg -q tip
145 4:539e4b31b6dc
145 4:539e4b31b6dc
146
146
147 precommit hook can prevent commit
147 precommit hook can prevent commit
148
148
149 $ echo 'precommit.forbid = python "$TESTDIR"/printenv.py precommit.forbid 1' >> .hg/hgrc
149 $ echo 'precommit.forbid = python "$TESTDIR"/printenv.py precommit.forbid 1' >> .hg/hgrc
150 $ hg commit -m 'fail' -d '4 0'
150 $ hg commit -m 'fail' -d '4 0'
151 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
151 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
152 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
152 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
153 abort: precommit.forbid hook exited with status 1
153 abort: precommit.forbid hook exited with status 1
154 [255]
154 [255]
155 $ hg -q tip
155 $ hg -q tip
156 4:539e4b31b6dc
156 4:539e4b31b6dc
157
157
158 preupdate hook can prevent update
158 preupdate hook can prevent update
159
159
160 $ echo 'preupdate = python "$TESTDIR"/printenv.py preupdate' >> .hg/hgrc
160 $ echo 'preupdate = python "$TESTDIR"/printenv.py preupdate' >> .hg/hgrc
161 $ hg update 1
161 $ hg update 1
162 preupdate hook: HG_PARENT1=ab228980c14d
162 preupdate hook: HG_PARENT1=ab228980c14d
163 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
163 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
164
164
165 update hook
165 update hook
166
166
167 $ echo 'update = python "$TESTDIR"/printenv.py update' >> .hg/hgrc
167 $ echo 'update = python "$TESTDIR"/printenv.py update' >> .hg/hgrc
168 $ hg update
168 $ hg update
169 preupdate hook: HG_PARENT1=539e4b31b6dc
169 preupdate hook: HG_PARENT1=539e4b31b6dc
170 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
170 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
171 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
172
172
173 pushkey hook
173 pushkey hook
174
174
175 $ echo 'pushkey = python "$TESTDIR"/printenv.py pushkey' >> .hg/hgrc
175 $ echo 'pushkey = python "$TESTDIR"/printenv.py pushkey' >> .hg/hgrc
176 $ cd ../b
176 $ cd ../b
177 $ hg bookmark -r null foo
177 $ hg bookmark -r null foo
178 $ hg push -B foo ../a
178 $ hg push -B foo ../a
179 pushing to ../a
179 pushing to ../a
180 searching for changes
180 searching for changes
181 no changes found
181 no changes found
182 pushkey hook: HG_KEY=07f3376c1e655977439df2a814e3cc14b27abac2 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1
183 exporting bookmark foo
182 exporting bookmark foo
184 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
183 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
185 $ cd ../a
184 $ cd ../a
186
185
187 listkeys hook
186 listkeys hook
188
187
189 $ echo 'listkeys = python "$TESTDIR"/printenv.py listkeys' >> .hg/hgrc
188 $ echo 'listkeys = python "$TESTDIR"/printenv.py listkeys' >> .hg/hgrc
190 $ hg bookmark -r null bar
189 $ hg bookmark -r null bar
191 $ cd ../b
190 $ cd ../b
192 $ hg pull -B bar ../a
191 $ hg pull -B bar ../a
193 pulling from ../a
192 pulling from ../a
194 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
193 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
195 no changes found
194 no changes found
196 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10': '1', 'publishing': 'True'}
195 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
197 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
196 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
198 importing bookmark bar
197 importing bookmark bar
199 $ cd ../a
198 $ cd ../a
200
199
201 test that prepushkey can prevent incoming keys
200 test that prepushkey can prevent incoming keys
202
201
203 $ echo 'prepushkey = python "$TESTDIR"/printenv.py prepushkey.forbid 1' >> .hg/hgrc
202 $ echo 'prepushkey = python "$TESTDIR"/printenv.py prepushkey.forbid 1' >> .hg/hgrc
204 $ cd ../b
203 $ cd ../b
205 $ hg bookmark -r null baz
204 $ hg bookmark -r null baz
206 $ hg push -B baz ../a
205 $ hg push -B baz ../a
207 pushing to ../a
206 pushing to ../a
208 searching for changes
207 searching for changes
209 no changes found
208 no changes found
210 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10': '1', 'publishing': 'True'}
209 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
211 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
210 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
212 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
211 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
213 exporting bookmark baz
212 exporting bookmark baz
214 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
213 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
215 abort: prepushkey hook exited with status 1
214 abort: prepushkey hook exited with status 1
216 [255]
215 [255]
217 $ cd ../a
216 $ cd ../a
218
217
219 test that prelistkeys can prevent listing keys
218 test that prelistkeys can prevent listing keys
220
219
221 $ echo 'prelistkeys = python "$TESTDIR"/printenv.py prelistkeys.forbid 1' >> .hg/hgrc
220 $ echo 'prelistkeys = python "$TESTDIR"/printenv.py prelistkeys.forbid 1' >> .hg/hgrc
222 $ hg bookmark -r null quux
221 $ hg bookmark -r null quux
223 $ cd ../b
222 $ cd ../b
224 $ hg pull -B quux ../a
223 $ hg pull -B quux ../a
225 pulling from ../a
224 pulling from ../a
226 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
225 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
227 abort: prelistkeys hook exited with status 1
226 abort: prelistkeys hook exited with status 1
228 [255]
227 [255]
229 $ cd ../a
228 $ cd ../a
230
229
231 prechangegroup hook can prevent incoming changes
230 prechangegroup hook can prevent incoming changes
232
231
233 $ cd ../b
232 $ cd ../b
234 $ hg -q tip
233 $ hg -q tip
235 3:07f3376c1e65
234 3:07f3376c1e65
236 $ echo '[hooks]' > .hg/hgrc
235 $ echo '[hooks]' > .hg/hgrc
237 $ echo 'prechangegroup.forbid = python "$TESTDIR"/printenv.py prechangegroup.forbid 1' >> .hg/hgrc
236 $ echo 'prechangegroup.forbid = python "$TESTDIR"/printenv.py prechangegroup.forbid 1' >> .hg/hgrc
238 $ hg pull ../a
237 $ hg pull ../a
239 pulling from ../a
238 pulling from ../a
240 searching for changes
239 searching for changes
241 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
240 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
242 abort: prechangegroup.forbid hook exited with status 1
241 abort: prechangegroup.forbid hook exited with status 1
243 [255]
242 [255]
244
243
245 pretxnchangegroup hook can see incoming changes, can roll back txn,
244 pretxnchangegroup hook can see incoming changes, can roll back txn,
246 incoming changes no longer there after
245 incoming changes no longer there after
247
246
248 $ echo '[hooks]' > .hg/hgrc
247 $ echo '[hooks]' > .hg/hgrc
249 $ echo 'pretxnchangegroup.forbid0 = hg tip -q' >> .hg/hgrc
248 $ echo 'pretxnchangegroup.forbid0 = hg tip -q' >> .hg/hgrc
250 $ echo 'pretxnchangegroup.forbid1 = python "$TESTDIR"/printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc
249 $ echo 'pretxnchangegroup.forbid1 = python "$TESTDIR"/printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc
251 $ hg pull ../a
250 $ hg pull ../a
252 pulling from ../a
251 pulling from ../a
253 searching for changes
252 searching for changes
254 adding changesets
253 adding changesets
255 adding manifests
254 adding manifests
256 adding file changes
255 adding file changes
257 added 1 changesets with 1 changes to 1 files
256 added 1 changesets with 1 changes to 1 files
258 4:539e4b31b6dc
257 4:539e4b31b6dc
259 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
258 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
260 transaction abort!
259 transaction abort!
261 rollback completed
260 rollback completed
262 abort: pretxnchangegroup.forbid1 hook exited with status 1
261 abort: pretxnchangegroup.forbid1 hook exited with status 1
263 [255]
262 [255]
264 $ hg -q tip
263 $ hg -q tip
265 3:07f3376c1e65
264 3:07f3376c1e65
266
265
267 outgoing hooks can see env vars
266 outgoing hooks can see env vars
268
267
269 $ rm .hg/hgrc
268 $ rm .hg/hgrc
270 $ echo '[hooks]' > ../a/.hg/hgrc
269 $ echo '[hooks]' > ../a/.hg/hgrc
271 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> ../a/.hg/hgrc
270 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> ../a/.hg/hgrc
272 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> ../a/.hg/hgrc
271 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> ../a/.hg/hgrc
273 $ hg pull ../a
272 $ hg pull ../a
274 pulling from ../a
273 pulling from ../a
275 searching for changes
274 searching for changes
276 preoutgoing hook: HG_SOURCE=pull
275 preoutgoing hook: HG_SOURCE=pull
277 adding changesets
276 adding changesets
278 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
277 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
279 adding manifests
278 adding manifests
280 adding file changes
279 adding file changes
281 added 1 changesets with 1 changes to 1 files
280 added 1 changesets with 1 changes to 1 files
282 (run 'hg update' to get a working copy)
281 (run 'hg update' to get a working copy)
283 $ hg rollback
282 $ hg rollback
284 repository tip rolled back to revision 3 (undo pull)
283 repository tip rolled back to revision 3 (undo pull)
285
284
286 preoutgoing hook can prevent outgoing changes
285 preoutgoing hook can prevent outgoing changes
287
286
288 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc
287 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc
289 $ hg pull ../a
288 $ hg pull ../a
290 pulling from ../a
289 pulling from ../a
291 searching for changes
290 searching for changes
292 preoutgoing hook: HG_SOURCE=pull
291 preoutgoing hook: HG_SOURCE=pull
293 preoutgoing.forbid hook: HG_SOURCE=pull
292 preoutgoing.forbid hook: HG_SOURCE=pull
294 abort: preoutgoing.forbid hook exited with status 1
293 abort: preoutgoing.forbid hook exited with status 1
295 [255]
294 [255]
296
295
297 outgoing hooks work for local clones
296 outgoing hooks work for local clones
298
297
299 $ cd ..
298 $ cd ..
300 $ echo '[hooks]' > a/.hg/hgrc
299 $ echo '[hooks]' > a/.hg/hgrc
301 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> a/.hg/hgrc
300 $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> a/.hg/hgrc
302 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> a/.hg/hgrc
301 $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> a/.hg/hgrc
303 $ hg clone a c
302 $ hg clone a c
304 preoutgoing hook: HG_SOURCE=clone
303 preoutgoing hook: HG_SOURCE=clone
305 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
304 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
306 updating to branch default
305 updating to branch default
307 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
306 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
308 $ rm -rf c
307 $ rm -rf c
309
308
310 preoutgoing hook can prevent outgoing changes for local clones
309 preoutgoing hook can prevent outgoing changes for local clones
311
310
312 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc
311 $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc
313 $ hg clone a zzz
312 $ hg clone a zzz
314 preoutgoing hook: HG_SOURCE=clone
313 preoutgoing hook: HG_SOURCE=clone
315 preoutgoing.forbid hook: HG_SOURCE=clone
314 preoutgoing.forbid hook: HG_SOURCE=clone
316 abort: preoutgoing.forbid hook exited with status 1
315 abort: preoutgoing.forbid hook exited with status 1
317 [255]
316 [255]
318 $ cd b
317 $ cd b
319
318
320 $ cat > hooktests.py <<EOF
319 $ cat > hooktests.py <<EOF
321 > from mercurial import util
320 > from mercurial import util
322 >
321 >
323 > uncallable = 0
322 > uncallable = 0
324 >
323 >
325 > def printargs(args):
324 > def printargs(args):
326 > args.pop('ui', None)
325 > args.pop('ui', None)
327 > args.pop('repo', None)
326 > args.pop('repo', None)
328 > a = list(args.items())
327 > a = list(args.items())
329 > a.sort()
328 > a.sort()
330 > print 'hook args:'
329 > print 'hook args:'
331 > for k, v in a:
330 > for k, v in a:
332 > print ' ', k, v
331 > print ' ', k, v
333 >
332 >
334 > def passhook(**args):
333 > def passhook(**args):
335 > printargs(args)
334 > printargs(args)
336 >
335 >
337 > def failhook(**args):
336 > def failhook(**args):
338 > printargs(args)
337 > printargs(args)
339 > return True
338 > return True
340 >
339 >
341 > class LocalException(Exception):
340 > class LocalException(Exception):
342 > pass
341 > pass
343 >
342 >
344 > def raisehook(**args):
343 > def raisehook(**args):
345 > raise LocalException('exception from hook')
344 > raise LocalException('exception from hook')
346 >
345 >
347 > def aborthook(**args):
346 > def aborthook(**args):
348 > raise util.Abort('raise abort from hook')
347 > raise util.Abort('raise abort from hook')
349 >
348 >
350 > def brokenhook(**args):
349 > def brokenhook(**args):
351 > return 1 + {}
350 > return 1 + {}
352 >
351 >
353 > def verbosehook(ui, **args):
352 > def verbosehook(ui, **args):
354 > ui.note('verbose output from hook\n')
353 > ui.note('verbose output from hook\n')
355 >
354 >
356 > def printtags(ui, repo, **args):
355 > def printtags(ui, repo, **args):
357 > print repo.tags().keys()
356 > print repo.tags().keys()
358 >
357 >
359 > class container:
358 > class container:
360 > unreachable = 1
359 > unreachable = 1
361 > EOF
360 > EOF
362
361
363 test python hooks
362 test python hooks
364
363
365 $ PYTHONPATH="`pwd`:$PYTHONPATH"
364 $ PYTHONPATH="`pwd`:$PYTHONPATH"
366 $ export PYTHONPATH
365 $ export PYTHONPATH
367
366
368 $ echo '[hooks]' > ../a/.hg/hgrc
367 $ echo '[hooks]' > ../a/.hg/hgrc
369 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
368 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
370 $ hg pull ../a 2>&1 | grep 'raised an exception'
369 $ hg pull ../a 2>&1 | grep 'raised an exception'
371 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
370 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
372
371
373 $ echo '[hooks]' > ../a/.hg/hgrc
372 $ echo '[hooks]' > ../a/.hg/hgrc
374 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
373 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
375 $ hg pull ../a 2>&1 | grep 'raised an exception'
374 $ hg pull ../a 2>&1 | grep 'raised an exception'
376 error: preoutgoing.raise hook raised an exception: exception from hook
375 error: preoutgoing.raise hook raised an exception: exception from hook
377
376
378 $ echo '[hooks]' > ../a/.hg/hgrc
377 $ echo '[hooks]' > ../a/.hg/hgrc
379 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
378 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
380 $ hg pull ../a
379 $ hg pull ../a
381 pulling from ../a
380 pulling from ../a
382 searching for changes
381 searching for changes
383 error: preoutgoing.abort hook failed: raise abort from hook
382 error: preoutgoing.abort hook failed: raise abort from hook
384 abort: raise abort from hook
383 abort: raise abort from hook
385 [255]
384 [255]
386
385
387 $ echo '[hooks]' > ../a/.hg/hgrc
386 $ echo '[hooks]' > ../a/.hg/hgrc
388 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
387 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
389 $ hg pull ../a
388 $ hg pull ../a
390 pulling from ../a
389 pulling from ../a
391 searching for changes
390 searching for changes
392 hook args:
391 hook args:
393 hooktype preoutgoing
392 hooktype preoutgoing
394 source pull
393 source pull
395 abort: preoutgoing.fail hook failed
394 abort: preoutgoing.fail hook failed
396 [255]
395 [255]
397
396
398 $ echo '[hooks]' > ../a/.hg/hgrc
397 $ echo '[hooks]' > ../a/.hg/hgrc
399 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
398 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
400 $ hg pull ../a
399 $ hg pull ../a
401 pulling from ../a
400 pulling from ../a
402 searching for changes
401 searching for changes
403 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
402 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
404 [255]
403 [255]
405
404
406 $ echo '[hooks]' > ../a/.hg/hgrc
405 $ echo '[hooks]' > ../a/.hg/hgrc
407 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
406 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
408 $ hg pull ../a
407 $ hg pull ../a
409 pulling from ../a
408 pulling from ../a
410 searching for changes
409 searching for changes
411 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
410 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
412 [255]
411 [255]
413
412
414 $ echo '[hooks]' > ../a/.hg/hgrc
413 $ echo '[hooks]' > ../a/.hg/hgrc
415 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
414 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
416 $ hg pull ../a
415 $ hg pull ../a
417 pulling from ../a
416 pulling from ../a
418 searching for changes
417 searching for changes
419 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
418 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
420 [255]
419 [255]
421
420
422 $ echo '[hooks]' > ../a/.hg/hgrc
421 $ echo '[hooks]' > ../a/.hg/hgrc
423 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
422 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
424 $ hg pull ../a
423 $ hg pull ../a
425 pulling from ../a
424 pulling from ../a
426 searching for changes
425 searching for changes
427 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
426 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
428 [255]
427 [255]
429
428
430 $ echo '[hooks]' > ../a/.hg/hgrc
429 $ echo '[hooks]' > ../a/.hg/hgrc
431 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
430 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
432 $ hg pull ../a
431 $ hg pull ../a
433 pulling from ../a
432 pulling from ../a
434 searching for changes
433 searching for changes
435 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
434 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
436 [255]
435 [255]
437
436
438 $ echo '[hooks]' > ../a/.hg/hgrc
437 $ echo '[hooks]' > ../a/.hg/hgrc
439 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
438 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
440 $ hg pull ../a
439 $ hg pull ../a
441 pulling from ../a
440 pulling from ../a
442 searching for changes
441 searching for changes
443 hook args:
442 hook args:
444 hooktype preoutgoing
443 hooktype preoutgoing
445 source pull
444 source pull
446 adding changesets
445 adding changesets
447 adding manifests
446 adding manifests
448 adding file changes
447 adding file changes
449 added 1 changesets with 1 changes to 1 files
448 added 1 changesets with 1 changes to 1 files
450 (run 'hg update' to get a working copy)
449 (run 'hg update' to get a working copy)
451
450
452 make sure --traceback works
451 make sure --traceback works
453
452
454 $ echo '[hooks]' > .hg/hgrc
453 $ echo '[hooks]' > .hg/hgrc
455 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
454 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
456
455
457 $ echo aa > a
456 $ echo aa > a
458 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
457 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
459 Traceback (most recent call last):
458 Traceback (most recent call last):
460
459
461 $ cd ..
460 $ cd ..
462 $ hg init c
461 $ hg init c
463 $ cd c
462 $ cd c
464
463
465 $ cat > hookext.py <<EOF
464 $ cat > hookext.py <<EOF
466 > def autohook(**args):
465 > def autohook(**args):
467 > print "Automatically installed hook"
466 > print "Automatically installed hook"
468 >
467 >
469 > def reposetup(ui, repo):
468 > def reposetup(ui, repo):
470 > repo.ui.setconfig("hooks", "commit.auto", autohook)
469 > repo.ui.setconfig("hooks", "commit.auto", autohook)
471 > EOF
470 > EOF
472 $ echo '[extensions]' >> .hg/hgrc
471 $ echo '[extensions]' >> .hg/hgrc
473 $ echo 'hookext = hookext.py' >> .hg/hgrc
472 $ echo 'hookext = hookext.py' >> .hg/hgrc
474
473
475 $ touch foo
474 $ touch foo
476 $ hg add foo
475 $ hg add foo
477 $ hg ci -d '0 0' -m 'add foo'
476 $ hg ci -d '0 0' -m 'add foo'
478 Automatically installed hook
477 Automatically installed hook
479 $ echo >> foo
478 $ echo >> foo
480 $ hg ci --debug -d '0 0' -m 'change foo'
479 $ hg ci --debug -d '0 0' -m 'change foo'
481 foo
480 foo
482 calling hook commit.auto: <function autohook at *> (glob)
481 calling hook commit.auto: <function autohook at *> (glob)
483 Automatically installed hook
482 Automatically installed hook
484 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
483 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
485
484
486 $ hg showconfig hooks
485 $ hg showconfig hooks
487 hooks.commit.auto=<function autohook at *> (glob)
486 hooks.commit.auto=<function autohook at *> (glob)
488
487
489 test python hook configured with python:[file]:[hook] syntax
488 test python hook configured with python:[file]:[hook] syntax
490
489
491 $ cd ..
490 $ cd ..
492 $ mkdir d
491 $ mkdir d
493 $ cd d
492 $ cd d
494 $ hg init repo
493 $ hg init repo
495 $ mkdir hooks
494 $ mkdir hooks
496
495
497 $ cd hooks
496 $ cd hooks
498 $ cat > testhooks.py <<EOF
497 $ cat > testhooks.py <<EOF
499 > def testhook(**args):
498 > def testhook(**args):
500 > print 'hook works'
499 > print 'hook works'
501 > EOF
500 > EOF
502 $ echo '[hooks]' > ../repo/.hg/hgrc
501 $ echo '[hooks]' > ../repo/.hg/hgrc
503 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
502 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
504
503
505 $ cd ../repo
504 $ cd ../repo
506 $ hg commit -d '0 0'
505 $ hg commit -d '0 0'
507 hook works
506 hook works
508 nothing changed
507 nothing changed
509 [1]
508 [1]
510
509
511 $ cd ../../b
510 $ cd ../../b
512
511
513 make sure --traceback works on hook import failure
512 make sure --traceback works on hook import failure
514
513
515 $ cat > importfail.py <<EOF
514 $ cat > importfail.py <<EOF
516 > import somebogusmodule
515 > import somebogusmodule
517 > # dereference something in the module to force demandimport to load it
516 > # dereference something in the module to force demandimport to load it
518 > somebogusmodule.whatever
517 > somebogusmodule.whatever
519 > EOF
518 > EOF
520
519
521 $ echo '[hooks]' > .hg/hgrc
520 $ echo '[hooks]' > .hg/hgrc
522 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
521 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
523
522
524 $ echo a >> a
523 $ echo a >> a
525 $ hg --traceback commit -ma 2>&1 | egrep '^(exception|Traceback|ImportError)'
524 $ hg --traceback commit -ma 2>&1 | egrep '^(exception|Traceback|ImportError)'
526 exception from first failed import attempt:
525 exception from first failed import attempt:
527 Traceback (most recent call last):
526 Traceback (most recent call last):
528 ImportError: No module named somebogusmodule
527 ImportError: No module named somebogusmodule
529 exception from second failed import attempt:
528 exception from second failed import attempt:
530 Traceback (most recent call last):
529 Traceback (most recent call last):
531 ImportError: No module named hgext_importfail
530 ImportError: No module named hgext_importfail
532 Traceback (most recent call last):
531 Traceback (most recent call last):
533
532
534 Issue1827: Hooks Update & Commit not completely post operation
533 Issue1827: Hooks Update & Commit not completely post operation
535
534
536 commit and update hooks should run after command completion
535 commit and update hooks should run after command completion
537
536
538 $ echo '[hooks]' > .hg/hgrc
537 $ echo '[hooks]' > .hg/hgrc
539 $ echo 'commit = hg id' >> .hg/hgrc
538 $ echo 'commit = hg id' >> .hg/hgrc
540 $ echo 'update = hg id' >> .hg/hgrc
539 $ echo 'update = hg id' >> .hg/hgrc
541 $ echo bb > a
540 $ echo bb > a
542 $ hg ci -ma
541 $ hg ci -ma
543 223eafe2750c tip
542 223eafe2750c tip
544 $ hg up 0
543 $ hg up 0
545 cb9a9f314b8b
544 cb9a9f314b8b
546 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
545 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
547
546
548 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
547 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
549 that is passed to pre/post hooks
548 that is passed to pre/post hooks
550
549
551 $ echo '[hooks]' > .hg/hgrc
550 $ echo '[hooks]' > .hg/hgrc
552 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
551 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
553 $ hg id
552 $ hg id
554 cb9a9f314b8b
553 cb9a9f314b8b
555 $ hg id --verbose
554 $ hg id --verbose
556 calling hook pre-identify: hooktests.verbosehook
555 calling hook pre-identify: hooktests.verbosehook
557 verbose output from hook
556 verbose output from hook
558 cb9a9f314b8b
557 cb9a9f314b8b
559
558
560 Ensure hooks can be prioritized
559 Ensure hooks can be prioritized
561
560
562 $ echo '[hooks]' > .hg/hgrc
561 $ echo '[hooks]' > .hg/hgrc
563 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
562 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
564 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
563 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
565 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
564 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
566 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
565 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
567 $ hg id --verbose
566 $ hg id --verbose
568 calling hook pre-identify.b: hooktests.verbosehook
567 calling hook pre-identify.b: hooktests.verbosehook
569 verbose output from hook
568 verbose output from hook
570 calling hook pre-identify.a: hooktests.verbosehook
569 calling hook pre-identify.a: hooktests.verbosehook
571 verbose output from hook
570 verbose output from hook
572 calling hook pre-identify.c: hooktests.verbosehook
571 calling hook pre-identify.c: hooktests.verbosehook
573 verbose output from hook
572 verbose output from hook
574 cb9a9f314b8b
573 cb9a9f314b8b
575
574
576 new tags must be visible in pretxncommit (issue3210)
575 new tags must be visible in pretxncommit (issue3210)
577
576
578 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
577 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
579 $ hg tag -f foo
578 $ hg tag -f foo
580 ['a', 'foo', 'tip']
579 ['a', 'foo', 'tip']
581
580
@@ -1,1016 +1,1018 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > graphlog=
3 > graphlog=
4 > EOF
4 > EOF
5 $ alias hgph='hg log -G --template "{rev} {phase} {desc} - {node|short}\n"'
5 $ alias hgph='hg log -G --template "{rev} {phase} {desc} - {node|short}\n"'
6
6
7 $ mkcommit() {
7 $ mkcommit() {
8 > echo "$1" > "$1"
8 > echo "$1" > "$1"
9 > hg add "$1"
9 > hg add "$1"
10 > message="$1"
10 > message="$1"
11 > shift
11 > shift
12 > hg ci -m "$message" $*
12 > hg ci -m "$message" $*
13 > }
13 > }
14
14
15 $ hg init alpha
15 $ hg init alpha
16 $ cd alpha
16 $ cd alpha
17 $ mkcommit a-A
17 $ mkcommit a-A
18 $ mkcommit a-B
18 $ mkcommit a-B
19 $ mkcommit a-C
19 $ mkcommit a-C
20 $ mkcommit a-D
20 $ mkcommit a-D
21 $ hgph
21 $ hgph
22 @ 3 draft a-D - b555f63b6063
22 @ 3 draft a-D - b555f63b6063
23 |
23 |
24 o 2 draft a-C - 54acac6f23ab
24 o 2 draft a-C - 54acac6f23ab
25 |
25 |
26 o 1 draft a-B - 548a3d25dbf0
26 o 1 draft a-B - 548a3d25dbf0
27 |
27 |
28 o 0 draft a-A - 054250a37db4
28 o 0 draft a-A - 054250a37db4
29
29
30
30
31 $ hg init ../beta
31 $ hg init ../beta
32 $ hg push -r 1 ../beta
32 $ hg push -r 1 ../beta
33 pushing to ../beta
33 pushing to ../beta
34 searching for changes
34 searching for changes
35 adding changesets
35 adding changesets
36 adding manifests
36 adding manifests
37 adding file changes
37 adding file changes
38 added 2 changesets with 2 changes to 2 files
38 added 2 changesets with 2 changes to 2 files
39 $ hgph
39 $ hgph
40 @ 3 draft a-D - b555f63b6063
40 @ 3 draft a-D - b555f63b6063
41 |
41 |
42 o 2 draft a-C - 54acac6f23ab
42 o 2 draft a-C - 54acac6f23ab
43 |
43 |
44 o 1 public a-B - 548a3d25dbf0
44 o 1 public a-B - 548a3d25dbf0
45 |
45 |
46 o 0 public a-A - 054250a37db4
46 o 0 public a-A - 054250a37db4
47
47
48
48
49 $ cd ../beta
49 $ cd ../beta
50 $ hgph
50 $ hgph
51 o 1 public a-B - 548a3d25dbf0
51 o 1 public a-B - 548a3d25dbf0
52 |
52 |
53 o 0 public a-A - 054250a37db4
53 o 0 public a-A - 054250a37db4
54
54
55 $ hg up -q
55 $ hg up -q
56 $ mkcommit b-A
56 $ mkcommit b-A
57 $ hgph
57 $ hgph
58 @ 2 draft b-A - f54f1bb90ff3
58 @ 2 draft b-A - f54f1bb90ff3
59 |
59 |
60 o 1 public a-B - 548a3d25dbf0
60 o 1 public a-B - 548a3d25dbf0
61 |
61 |
62 o 0 public a-A - 054250a37db4
62 o 0 public a-A - 054250a37db4
63
63
64 $ hg pull ../alpha
64 $ hg pull ../alpha
65 pulling from ../alpha
65 pulling from ../alpha
66 searching for changes
66 searching for changes
67 adding changesets
67 adding changesets
68 adding manifests
68 adding manifests
69 adding file changes
69 adding file changes
70 added 2 changesets with 2 changes to 2 files (+1 heads)
70 added 2 changesets with 2 changes to 2 files (+1 heads)
71 (run 'hg heads' to see heads, 'hg merge' to merge)
71 (run 'hg heads' to see heads, 'hg merge' to merge)
72 $ hgph
72 $ hgph
73 o 4 public a-D - b555f63b6063
73 o 4 public a-D - b555f63b6063
74 |
74 |
75 o 3 public a-C - 54acac6f23ab
75 o 3 public a-C - 54acac6f23ab
76 |
76 |
77 | @ 2 draft b-A - f54f1bb90ff3
77 | @ 2 draft b-A - f54f1bb90ff3
78 |/
78 |/
79 o 1 public a-B - 548a3d25dbf0
79 o 1 public a-B - 548a3d25dbf0
80 |
80 |
81 o 0 public a-A - 054250a37db4
81 o 0 public a-A - 054250a37db4
82
82
83
83
84 pull did not updated ../alpha state.
84 pull did not updated ../alpha state.
85 push from alpha to beta should update phase even if nothing is transfered
85 push from alpha to beta should update phase even if nothing is transfered
86
86
87 $ cd ../alpha
87 $ cd ../alpha
88 $ hgph # not updated by remote pull
88 $ hgph # not updated by remote pull
89 @ 3 draft a-D - b555f63b6063
89 @ 3 draft a-D - b555f63b6063
90 |
90 |
91 o 2 draft a-C - 54acac6f23ab
91 o 2 draft a-C - 54acac6f23ab
92 |
92 |
93 o 1 public a-B - 548a3d25dbf0
93 o 1 public a-B - 548a3d25dbf0
94 |
94 |
95 o 0 public a-A - 054250a37db4
95 o 0 public a-A - 054250a37db4
96
96
97 $ hg push ../beta
97 $ hg push ../beta
98 pushing to ../beta
98 pushing to ../beta
99 searching for changes
99 searching for changes
100 no changes found
100 no changes found
101 $ hgph
101 $ hgph
102 @ 3 public a-D - b555f63b6063
102 @ 3 public a-D - b555f63b6063
103 |
103 |
104 o 2 public a-C - 54acac6f23ab
104 o 2 public a-C - 54acac6f23ab
105 |
105 |
106 o 1 public a-B - 548a3d25dbf0
106 o 1 public a-B - 548a3d25dbf0
107 |
107 |
108 o 0 public a-A - 054250a37db4
108 o 0 public a-A - 054250a37db4
109
109
110
110
111 update must update phase of common changeset too
111 update must update phase of common changeset too
112
112
113 $ hg pull ../beta # getting b-A
113 $ hg pull ../beta # getting b-A
114 pulling from ../beta
114 pulling from ../beta
115 searching for changes
115 searching for changes
116 adding changesets
116 adding changesets
117 adding manifests
117 adding manifests
118 adding file changes
118 adding file changes
119 added 1 changesets with 1 changes to 1 files (+1 heads)
119 added 1 changesets with 1 changes to 1 files (+1 heads)
120 (run 'hg heads' to see heads, 'hg merge' to merge)
120 (run 'hg heads' to see heads, 'hg merge' to merge)
121
121
122 $ cd ../beta
122 $ cd ../beta
123 $ hgph # not updated by remote pull
123 $ hgph # not updated by remote pull
124 o 4 public a-D - b555f63b6063
124 o 4 public a-D - b555f63b6063
125 |
125 |
126 o 3 public a-C - 54acac6f23ab
126 o 3 public a-C - 54acac6f23ab
127 |
127 |
128 | @ 2 draft b-A - f54f1bb90ff3
128 | @ 2 draft b-A - f54f1bb90ff3
129 |/
129 |/
130 o 1 public a-B - 548a3d25dbf0
130 o 1 public a-B - 548a3d25dbf0
131 |
131 |
132 o 0 public a-A - 054250a37db4
132 o 0 public a-A - 054250a37db4
133
133
134 $ hg pull ../alpha
134 $ hg pull ../alpha
135 pulling from ../alpha
135 pulling from ../alpha
136 searching for changes
136 searching for changes
137 no changes found
137 no changes found
138 $ hgph
138 $ hgph
139 o 4 public a-D - b555f63b6063
139 o 4 public a-D - b555f63b6063
140 |
140 |
141 o 3 public a-C - 54acac6f23ab
141 o 3 public a-C - 54acac6f23ab
142 |
142 |
143 | @ 2 public b-A - f54f1bb90ff3
143 | @ 2 public b-A - f54f1bb90ff3
144 |/
144 |/
145 o 1 public a-B - 548a3d25dbf0
145 o 1 public a-B - 548a3d25dbf0
146 |
146 |
147 o 0 public a-A - 054250a37db4
147 o 0 public a-A - 054250a37db4
148
148
149
149
150 Publish configuration option
150 Publish configuration option
151 ----------------------------
151 ----------------------------
152
152
153 Pull
153 Pull
154 ````
154 ````
155
155
156 changegroup are added without phase movement
156 changegroup are added without phase movement
157
157
158 $ hg bundle -a ../base.bundle
158 $ hg bundle -a ../base.bundle
159 5 changesets found
159 5 changesets found
160 $ cd ..
160 $ cd ..
161 $ hg init mu
161 $ hg init mu
162 $ cd mu
162 $ cd mu
163 $ cat > .hg/hgrc << EOF
163 $ cat > .hg/hgrc << EOF
164 > [phases]
164 > [phases]
165 > publish=0
165 > publish=0
166 > EOF
166 > EOF
167 $ hg unbundle ../base.bundle
167 $ hg unbundle ../base.bundle
168 adding changesets
168 adding changesets
169 adding manifests
169 adding manifests
170 adding file changes
170 adding file changes
171 added 5 changesets with 5 changes to 5 files (+1 heads)
171 added 5 changesets with 5 changes to 5 files (+1 heads)
172 (run 'hg heads' to see heads, 'hg merge' to merge)
172 (run 'hg heads' to see heads, 'hg merge' to merge)
173 $ hgph
173 $ hgph
174 o 4 draft a-D - b555f63b6063
174 o 4 draft a-D - b555f63b6063
175 |
175 |
176 o 3 draft a-C - 54acac6f23ab
176 o 3 draft a-C - 54acac6f23ab
177 |
177 |
178 | o 2 draft b-A - f54f1bb90ff3
178 | o 2 draft b-A - f54f1bb90ff3
179 |/
179 |/
180 o 1 draft a-B - 548a3d25dbf0
180 o 1 draft a-B - 548a3d25dbf0
181 |
181 |
182 o 0 draft a-A - 054250a37db4
182 o 0 draft a-A - 054250a37db4
183
183
184 $ cd ..
184 $ cd ..
185
185
186 Pulling from publish=False to publish=False does not move boundary.
186 Pulling from publish=False to publish=False does not move boundary.
187
187
188 $ hg init nu
188 $ hg init nu
189 $ cd nu
189 $ cd nu
190 $ cat > .hg/hgrc << EOF
190 $ cat > .hg/hgrc << EOF
191 > [phases]
191 > [phases]
192 > publish=0
192 > publish=0
193 > EOF
193 > EOF
194 $ hg pull ../mu -r 54acac6f23ab
194 $ hg pull ../mu -r 54acac6f23ab
195 pulling from ../mu
195 pulling from ../mu
196 adding changesets
196 adding changesets
197 adding manifests
197 adding manifests
198 adding file changes
198 adding file changes
199 added 3 changesets with 3 changes to 3 files
199 added 3 changesets with 3 changes to 3 files
200 (run 'hg update' to get a working copy)
200 (run 'hg update' to get a working copy)
201 $ hgph
201 $ hgph
202 o 2 draft a-C - 54acac6f23ab
202 o 2 draft a-C - 54acac6f23ab
203 |
203 |
204 o 1 draft a-B - 548a3d25dbf0
204 o 1 draft a-B - 548a3d25dbf0
205 |
205 |
206 o 0 draft a-A - 054250a37db4
206 o 0 draft a-A - 054250a37db4
207
207
208
208
209 Even for common
209 Even for common
210
210
211 $ hg pull ../mu -r f54f1bb90ff3
211 $ hg pull ../mu -r f54f1bb90ff3
212 pulling from ../mu
212 pulling from ../mu
213 searching for changes
213 searching for changes
214 adding changesets
214 adding changesets
215 adding manifests
215 adding manifests
216 adding file changes
216 adding file changes
217 added 1 changesets with 1 changes to 1 files (+1 heads)
217 added 1 changesets with 1 changes to 1 files (+1 heads)
218 (run 'hg heads' to see heads, 'hg merge' to merge)
218 (run 'hg heads' to see heads, 'hg merge' to merge)
219 $ hgph
219 $ hgph
220 o 3 draft b-A - f54f1bb90ff3
220 o 3 draft b-A - f54f1bb90ff3
221 |
221 |
222 | o 2 draft a-C - 54acac6f23ab
222 | o 2 draft a-C - 54acac6f23ab
223 |/
223 |/
224 o 1 draft a-B - 548a3d25dbf0
224 o 1 draft a-B - 548a3d25dbf0
225 |
225 |
226 o 0 draft a-A - 054250a37db4
226 o 0 draft a-A - 054250a37db4
227
227
228
228
229
229
230 Pulling from Publish=True to Publish=False move boundary in common set.
230 Pulling from Publish=True to Publish=False move boundary in common set.
231 we are in nu
231 we are in nu
232
232
233 $ hg pull ../alpha -r b555f63b6063
233 $ hg pull ../alpha -r b555f63b6063
234 pulling from ../alpha
234 pulling from ../alpha
235 searching for changes
235 searching for changes
236 adding changesets
236 adding changesets
237 adding manifests
237 adding manifests
238 adding file changes
238 adding file changes
239 added 1 changesets with 1 changes to 1 files
239 added 1 changesets with 1 changes to 1 files
240 (run 'hg update' to get a working copy)
240 (run 'hg update' to get a working copy)
241 $ hgph
241 $ hgph # f54f1bb90ff3 stay draft, not ancestor of -r
242 o 4 public a-D - b555f63b6063
242 o 4 public a-D - b555f63b6063
243 |
243 |
244 | o 3 public b-A - f54f1bb90ff3
244 | o 3 draft b-A - f54f1bb90ff3
245 | |
245 | |
246 o | 2 public a-C - 54acac6f23ab
246 o | 2 public a-C - 54acac6f23ab
247 |/
247 |/
248 o 1 public a-B - 548a3d25dbf0
248 o 1 public a-B - 548a3d25dbf0
249 |
249 |
250 o 0 public a-A - 054250a37db4
250 o 0 public a-A - 054250a37db4
251
251
252
252
253 pulling from Publish=False to publish=False with some public
253 pulling from Publish=False to publish=False with some public
254
254
255 $ hg up -q f54f1bb90ff3
255 $ hg up -q f54f1bb90ff3
256 $ mkcommit n-A
256 $ mkcommit n-A
257 $ mkcommit n-B
257 $ mkcommit n-B
258 $ hgph
258 $ hgph
259 @ 6 draft n-B - 145e75495359
259 @ 6 draft n-B - 145e75495359
260 |
260 |
261 o 5 draft n-A - d6bcb4f74035
261 o 5 draft n-A - d6bcb4f74035
262 |
262 |
263 | o 4 public a-D - b555f63b6063
263 | o 4 public a-D - b555f63b6063
264 | |
264 | |
265 o | 3 public b-A - f54f1bb90ff3
265 o | 3 draft b-A - f54f1bb90ff3
266 | |
266 | |
267 | o 2 public a-C - 54acac6f23ab
267 | o 2 public a-C - 54acac6f23ab
268 |/
268 |/
269 o 1 public a-B - 548a3d25dbf0
269 o 1 public a-B - 548a3d25dbf0
270 |
270 |
271 o 0 public a-A - 054250a37db4
271 o 0 public a-A - 054250a37db4
272
272
273 $ cd ../mu
273 $ cd ../mu
274 $ hg pull ../nu
274 $ hg pull ../nu
275 pulling from ../nu
275 pulling from ../nu
276 searching for changes
276 searching for changes
277 adding changesets
277 adding changesets
278 adding manifests
278 adding manifests
279 adding file changes
279 adding file changes
280 added 2 changesets with 2 changes to 2 files
280 added 2 changesets with 2 changes to 2 files
281 (run 'hg update' to get a working copy)
281 (run 'hg update' to get a working copy)
282 $ hgph
282 $ hgph
283 o 6 draft n-B - 145e75495359
283 o 6 draft n-B - 145e75495359
284 |
284 |
285 o 5 draft n-A - d6bcb4f74035
285 o 5 draft n-A - d6bcb4f74035
286 |
286 |
287 | o 4 public a-D - b555f63b6063
287 | o 4 public a-D - b555f63b6063
288 | |
288 | |
289 | o 3 public a-C - 54acac6f23ab
289 | o 3 public a-C - 54acac6f23ab
290 | |
290 | |
291 o | 2 public b-A - f54f1bb90ff3
291 o | 2 draft b-A - f54f1bb90ff3
292 |/
292 |/
293 o 1 public a-B - 548a3d25dbf0
293 o 1 public a-B - 548a3d25dbf0
294 |
294 |
295 o 0 public a-A - 054250a37db4
295 o 0 public a-A - 054250a37db4
296
296
297 $ cd ..
297 $ cd ..
298
298
299 pulling into publish=True
299 pulling into publish=True
300
300
301 $ cd alpha
301 $ cd alpha
302 $ hgph
302 $ hgph
303 o 4 public b-A - f54f1bb90ff3
303 o 4 public b-A - f54f1bb90ff3
304 |
304 |
305 | @ 3 public a-D - b555f63b6063
305 | @ 3 public a-D - b555f63b6063
306 | |
306 | |
307 | o 2 public a-C - 54acac6f23ab
307 | o 2 public a-C - 54acac6f23ab
308 |/
308 |/
309 o 1 public a-B - 548a3d25dbf0
309 o 1 public a-B - 548a3d25dbf0
310 |
310 |
311 o 0 public a-A - 054250a37db4
311 o 0 public a-A - 054250a37db4
312
312
313 $ hg pull ../mu
313 $ hg pull ../mu
314 pulling from ../mu
314 pulling from ../mu
315 searching for changes
315 searching for changes
316 adding changesets
316 adding changesets
317 adding manifests
317 adding manifests
318 adding file changes
318 adding file changes
319 added 2 changesets with 2 changes to 2 files
319 added 2 changesets with 2 changes to 2 files
320 (run 'hg update' to get a working copy)
320 (run 'hg update' to get a working copy)
321 $ hgph
321 $ hgph
322 o 6 draft n-B - 145e75495359
322 o 6 draft n-B - 145e75495359
323 |
323 |
324 o 5 draft n-A - d6bcb4f74035
324 o 5 draft n-A - d6bcb4f74035
325 |
325 |
326 o 4 public b-A - f54f1bb90ff3
326 o 4 public b-A - f54f1bb90ff3
327 |
327 |
328 | @ 3 public a-D - b555f63b6063
328 | @ 3 public a-D - b555f63b6063
329 | |
329 | |
330 | o 2 public a-C - 54acac6f23ab
330 | o 2 public a-C - 54acac6f23ab
331 |/
331 |/
332 o 1 public a-B - 548a3d25dbf0
332 o 1 public a-B - 548a3d25dbf0
333 |
333 |
334 o 0 public a-A - 054250a37db4
334 o 0 public a-A - 054250a37db4
335
335
336 $ cd ..
336 $ cd ..
337
337
338 pulling back into original repo
338 pulling back into original repo
339
339
340 $ cd nu
340 $ cd nu
341 $ hg pull ../alpha
341 $ hg pull ../alpha
342 pulling from ../alpha
342 pulling from ../alpha
343 searching for changes
343 searching for changes
344 no changes found
344 no changes found
345 $ hgph
345 $ hgph
346 @ 6 public n-B - 145e75495359
346 @ 6 public n-B - 145e75495359
347 |
347 |
348 o 5 public n-A - d6bcb4f74035
348 o 5 public n-A - d6bcb4f74035
349 |
349 |
350 | o 4 public a-D - b555f63b6063
350 | o 4 public a-D - b555f63b6063
351 | |
351 | |
352 o | 3 public b-A - f54f1bb90ff3
352 o | 3 public b-A - f54f1bb90ff3
353 | |
353 | |
354 | o 2 public a-C - 54acac6f23ab
354 | o 2 public a-C - 54acac6f23ab
355 |/
355 |/
356 o 1 public a-B - 548a3d25dbf0
356 o 1 public a-B - 548a3d25dbf0
357 |
357 |
358 o 0 public a-A - 054250a37db4
358 o 0 public a-A - 054250a37db4
359
359
360
360
361 Push
361 Push
362 ````
362 ````
363
363
364 (inserted)
364 (inserted)
365
365
366 Test that phase are pushed even when they are nothing to pus
366 Test that phase are pushed even when they are nothing to pus
367 (this might be tested later bu are very convenient to not alter too much test)
367 (this might be tested later bu are very convenient to not alter too much test)
368
368
369 Push back to alpha
369 Push back to alpha
370
370
371 $ hg push ../alpha # from nu
371 $ hg push ../alpha # from nu
372 pushing to ../alpha
372 pushing to ../alpha
373 searching for changes
373 searching for changes
374 no changes found
374 no changes found
375 $ cd ..
375 $ cd ..
376 $ cd alpha
376 $ cd alpha
377 $ hgph
377 $ hgph
378 o 6 public n-B - 145e75495359
378 o 6 public n-B - 145e75495359
379 |
379 |
380 o 5 public n-A - d6bcb4f74035
380 o 5 public n-A - d6bcb4f74035
381 |
381 |
382 o 4 public b-A - f54f1bb90ff3
382 o 4 public b-A - f54f1bb90ff3
383 |
383 |
384 | @ 3 public a-D - b555f63b6063
384 | @ 3 public a-D - b555f63b6063
385 | |
385 | |
386 | o 2 public a-C - 54acac6f23ab
386 | o 2 public a-C - 54acac6f23ab
387 |/
387 |/
388 o 1 public a-B - 548a3d25dbf0
388 o 1 public a-B - 548a3d25dbf0
389 |
389 |
390 o 0 public a-A - 054250a37db4
390 o 0 public a-A - 054250a37db4
391
391
392
392
393 (end insertion)
393 (end insertion)
394
394
395
395
396 initial setup
396 initial setup
397
397
398 $ hg glog # of alpha
398 $ hg glog # of alpha
399 o changeset: 6:145e75495359
399 o changeset: 6:145e75495359
400 | tag: tip
400 | tag: tip
401 | user: test
401 | user: test
402 | date: Thu Jan 01 00:00:00 1970 +0000
402 | date: Thu Jan 01 00:00:00 1970 +0000
403 | summary: n-B
403 | summary: n-B
404 |
404 |
405 o changeset: 5:d6bcb4f74035
405 o changeset: 5:d6bcb4f74035
406 | user: test
406 | user: test
407 | date: Thu Jan 01 00:00:00 1970 +0000
407 | date: Thu Jan 01 00:00:00 1970 +0000
408 | summary: n-A
408 | summary: n-A
409 |
409 |
410 o changeset: 4:f54f1bb90ff3
410 o changeset: 4:f54f1bb90ff3
411 | parent: 1:548a3d25dbf0
411 | parent: 1:548a3d25dbf0
412 | user: test
412 | user: test
413 | date: Thu Jan 01 00:00:00 1970 +0000
413 | date: Thu Jan 01 00:00:00 1970 +0000
414 | summary: b-A
414 | summary: b-A
415 |
415 |
416 | @ changeset: 3:b555f63b6063
416 | @ changeset: 3:b555f63b6063
417 | | user: test
417 | | user: test
418 | | date: Thu Jan 01 00:00:00 1970 +0000
418 | | date: Thu Jan 01 00:00:00 1970 +0000
419 | | summary: a-D
419 | | summary: a-D
420 | |
420 | |
421 | o changeset: 2:54acac6f23ab
421 | o changeset: 2:54acac6f23ab
422 |/ user: test
422 |/ user: test
423 | date: Thu Jan 01 00:00:00 1970 +0000
423 | date: Thu Jan 01 00:00:00 1970 +0000
424 | summary: a-C
424 | summary: a-C
425 |
425 |
426 o changeset: 1:548a3d25dbf0
426 o changeset: 1:548a3d25dbf0
427 | user: test
427 | user: test
428 | date: Thu Jan 01 00:00:00 1970 +0000
428 | date: Thu Jan 01 00:00:00 1970 +0000
429 | summary: a-B
429 | summary: a-B
430 |
430 |
431 o changeset: 0:054250a37db4
431 o changeset: 0:054250a37db4
432 user: test
432 user: test
433 date: Thu Jan 01 00:00:00 1970 +0000
433 date: Thu Jan 01 00:00:00 1970 +0000
434 summary: a-A
434 summary: a-A
435
435
436 $ mkcommit a-E
436 $ mkcommit a-E
437 $ mkcommit a-F
437 $ mkcommit a-F
438 $ mkcommit a-G
438 $ mkcommit a-G
439 $ hg up d6bcb4f74035 -q
439 $ hg up d6bcb4f74035 -q
440 $ mkcommit a-H
440 $ mkcommit a-H
441 created new head
441 created new head
442 $ hgph
442 $ hgph
443 @ 10 draft a-H - 967b449fbc94
443 @ 10 draft a-H - 967b449fbc94
444 |
444 |
445 | o 9 draft a-G - 3e27b6f1eee1
445 | o 9 draft a-G - 3e27b6f1eee1
446 | |
446 | |
447 | o 8 draft a-F - b740e3e5c05d
447 | o 8 draft a-F - b740e3e5c05d
448 | |
448 | |
449 | o 7 draft a-E - e9f537e46dea
449 | o 7 draft a-E - e9f537e46dea
450 | |
450 | |
451 +---o 6 public n-B - 145e75495359
451 +---o 6 public n-B - 145e75495359
452 | |
452 | |
453 o | 5 public n-A - d6bcb4f74035
453 o | 5 public n-A - d6bcb4f74035
454 | |
454 | |
455 o | 4 public b-A - f54f1bb90ff3
455 o | 4 public b-A - f54f1bb90ff3
456 | |
456 | |
457 | o 3 public a-D - b555f63b6063
457 | o 3 public a-D - b555f63b6063
458 | |
458 | |
459 | o 2 public a-C - 54acac6f23ab
459 | o 2 public a-C - 54acac6f23ab
460 |/
460 |/
461 o 1 public a-B - 548a3d25dbf0
461 o 1 public a-B - 548a3d25dbf0
462 |
462 |
463 o 0 public a-A - 054250a37db4
463 o 0 public a-A - 054250a37db4
464
464
465
465
466 Pushing to Publish=False (unknown changeset)
466 Pushing to Publish=False (unknown changeset)
467
467
468 $ hg push ../mu -r b740e3e5c05d # a-F
468 $ hg push ../mu -r b740e3e5c05d # a-F
469 pushing to ../mu
469 pushing to ../mu
470 searching for changes
470 searching for changes
471 adding changesets
471 adding changesets
472 adding manifests
472 adding manifests
473 adding file changes
473 adding file changes
474 added 2 changesets with 2 changes to 2 files
474 added 2 changesets with 2 changes to 2 files
475 $ hgph
475 $ hgph
476 @ 10 draft a-H - 967b449fbc94
476 @ 10 draft a-H - 967b449fbc94
477 |
477 |
478 | o 9 draft a-G - 3e27b6f1eee1
478 | o 9 draft a-G - 3e27b6f1eee1
479 | |
479 | |
480 | o 8 draft a-F - b740e3e5c05d
480 | o 8 draft a-F - b740e3e5c05d
481 | |
481 | |
482 | o 7 draft a-E - e9f537e46dea
482 | o 7 draft a-E - e9f537e46dea
483 | |
483 | |
484 +---o 6 public n-B - 145e75495359
484 +---o 6 public n-B - 145e75495359
485 | |
485 | |
486 o | 5 public n-A - d6bcb4f74035
486 o | 5 public n-A - d6bcb4f74035
487 | |
487 | |
488 o | 4 public b-A - f54f1bb90ff3
488 o | 4 public b-A - f54f1bb90ff3
489 | |
489 | |
490 | o 3 public a-D - b555f63b6063
490 | o 3 public a-D - b555f63b6063
491 | |
491 | |
492 | o 2 public a-C - 54acac6f23ab
492 | o 2 public a-C - 54acac6f23ab
493 |/
493 |/
494 o 1 public a-B - 548a3d25dbf0
494 o 1 public a-B - 548a3d25dbf0
495 |
495 |
496 o 0 public a-A - 054250a37db4
496 o 0 public a-A - 054250a37db4
497
497
498
498
499 $ cd ../mu
499 $ cd ../mu
500 $ hgph # d6bcb4f74035 and 145e75495359 changed because common is too smart
500 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
501 > # not ancestor of -r
501 o 8 draft a-F - b740e3e5c05d
502 o 8 draft a-F - b740e3e5c05d
502 |
503 |
503 o 7 draft a-E - e9f537e46dea
504 o 7 draft a-E - e9f537e46dea
504 |
505 |
505 | o 6 public n-B - 145e75495359
506 | o 6 draft n-B - 145e75495359
506 | |
507 | |
507 | o 5 public n-A - d6bcb4f74035
508 | o 5 draft n-A - d6bcb4f74035
508 | |
509 | |
509 o | 4 public a-D - b555f63b6063
510 o | 4 public a-D - b555f63b6063
510 | |
511 | |
511 o | 3 public a-C - 54acac6f23ab
512 o | 3 public a-C - 54acac6f23ab
512 | |
513 | |
513 | o 2 public b-A - f54f1bb90ff3
514 | o 2 draft b-A - f54f1bb90ff3
514 |/
515 |/
515 o 1 public a-B - 548a3d25dbf0
516 o 1 public a-B - 548a3d25dbf0
516 |
517 |
517 o 0 public a-A - 054250a37db4
518 o 0 public a-A - 054250a37db4
518
519
519
520
520 Pushing to Publish=True (unknown changeset)
521 Pushing to Publish=True (unknown changeset)
521
522
522 $ hg push ../beta -r b740e3e5c05d
523 $ hg push ../beta -r b740e3e5c05d
523 pushing to ../beta
524 pushing to ../beta
524 searching for changes
525 searching for changes
525 adding changesets
526 adding changesets
526 adding manifests
527 adding manifests
527 adding file changes
528 adding file changes
528 added 2 changesets with 2 changes to 2 files
529 added 2 changesets with 2 changes to 2 files
529 $ hgph # again d6bcb4f74035 and 145e75495359 changed because common is too smart
530 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
531 > # not ancestor of -r
530 o 8 public a-F - b740e3e5c05d
532 o 8 public a-F - b740e3e5c05d
531 |
533 |
532 o 7 public a-E - e9f537e46dea
534 o 7 public a-E - e9f537e46dea
533 |
535 |
534 | o 6 public n-B - 145e75495359
536 | o 6 draft n-B - 145e75495359
535 | |
537 | |
536 | o 5 public n-A - d6bcb4f74035
538 | o 5 draft n-A - d6bcb4f74035
537 | |
539 | |
538 o | 4 public a-D - b555f63b6063
540 o | 4 public a-D - b555f63b6063
539 | |
541 | |
540 o | 3 public a-C - 54acac6f23ab
542 o | 3 public a-C - 54acac6f23ab
541 | |
543 | |
542 | o 2 public b-A - f54f1bb90ff3
544 | o 2 draft b-A - f54f1bb90ff3
543 |/
545 |/
544 o 1 public a-B - 548a3d25dbf0
546 o 1 public a-B - 548a3d25dbf0
545 |
547 |
546 o 0 public a-A - 054250a37db4
548 o 0 public a-A - 054250a37db4
547
549
548
550
549 Pushing to Publish=True (common changeset)
551 Pushing to Publish=True (common changeset)
550
552
551 $ cd ../beta
553 $ cd ../beta
552 $ hg push ../alpha
554 $ hg push ../alpha
553 pushing to ../alpha
555 pushing to ../alpha
554 searching for changes
556 searching for changes
555 no changes found
557 no changes found
556 $ hgph
558 $ hgph
557 o 6 public a-F - b740e3e5c05d
559 o 6 public a-F - b740e3e5c05d
558 |
560 |
559 o 5 public a-E - e9f537e46dea
561 o 5 public a-E - e9f537e46dea
560 |
562 |
561 o 4 public a-D - b555f63b6063
563 o 4 public a-D - b555f63b6063
562 |
564 |
563 o 3 public a-C - 54acac6f23ab
565 o 3 public a-C - 54acac6f23ab
564 |
566 |
565 | @ 2 public b-A - f54f1bb90ff3
567 | @ 2 public b-A - f54f1bb90ff3
566 |/
568 |/
567 o 1 public a-B - 548a3d25dbf0
569 o 1 public a-B - 548a3d25dbf0
568 |
570 |
569 o 0 public a-A - 054250a37db4
571 o 0 public a-A - 054250a37db4
570
572
571 $ cd ../alpha
573 $ cd ../alpha
572 $ hgph # e9f537e46dea and b740e3e5c05d should have been sync to 0
574 $ hgph
573 @ 10 draft a-H - 967b449fbc94
575 @ 10 draft a-H - 967b449fbc94
574 |
576 |
575 | o 9 draft a-G - 3e27b6f1eee1
577 | o 9 draft a-G - 3e27b6f1eee1
576 | |
578 | |
577 | o 8 public a-F - b740e3e5c05d
579 | o 8 public a-F - b740e3e5c05d
578 | |
580 | |
579 | o 7 public a-E - e9f537e46dea
581 | o 7 public a-E - e9f537e46dea
580 | |
582 | |
581 +---o 6 public n-B - 145e75495359
583 +---o 6 public n-B - 145e75495359
582 | |
584 | |
583 o | 5 public n-A - d6bcb4f74035
585 o | 5 public n-A - d6bcb4f74035
584 | |
586 | |
585 o | 4 public b-A - f54f1bb90ff3
587 o | 4 public b-A - f54f1bb90ff3
586 | |
588 | |
587 | o 3 public a-D - b555f63b6063
589 | o 3 public a-D - b555f63b6063
588 | |
590 | |
589 | o 2 public a-C - 54acac6f23ab
591 | o 2 public a-C - 54acac6f23ab
590 |/
592 |/
591 o 1 public a-B - 548a3d25dbf0
593 o 1 public a-B - 548a3d25dbf0
592 |
594 |
593 o 0 public a-A - 054250a37db4
595 o 0 public a-A - 054250a37db4
594
596
595
597
596 Pushing to Publish=False (common changeset that change phase + unknown one)
598 Pushing to Publish=False (common changeset that change phase + unknown one)
597
599
598 $ hg push ../mu -r 967b449fbc94 -f
600 $ hg push ../mu -r 967b449fbc94 -f
599 pushing to ../mu
601 pushing to ../mu
600 searching for changes
602 searching for changes
601 adding changesets
603 adding changesets
602 adding manifests
604 adding manifests
603 adding file changes
605 adding file changes
604 added 1 changesets with 1 changes to 1 files (+1 heads)
606 added 1 changesets with 1 changes to 1 files (+1 heads)
605 $ hgph
607 $ hgph
606 @ 10 draft a-H - 967b449fbc94
608 @ 10 draft a-H - 967b449fbc94
607 |
609 |
608 | o 9 draft a-G - 3e27b6f1eee1
610 | o 9 draft a-G - 3e27b6f1eee1
609 | |
611 | |
610 | o 8 public a-F - b740e3e5c05d
612 | o 8 public a-F - b740e3e5c05d
611 | |
613 | |
612 | o 7 public a-E - e9f537e46dea
614 | o 7 public a-E - e9f537e46dea
613 | |
615 | |
614 +---o 6 public n-B - 145e75495359
616 +---o 6 public n-B - 145e75495359
615 | |
617 | |
616 o | 5 public n-A - d6bcb4f74035
618 o | 5 public n-A - d6bcb4f74035
617 | |
619 | |
618 o | 4 public b-A - f54f1bb90ff3
620 o | 4 public b-A - f54f1bb90ff3
619 | |
621 | |
620 | o 3 public a-D - b555f63b6063
622 | o 3 public a-D - b555f63b6063
621 | |
623 | |
622 | o 2 public a-C - 54acac6f23ab
624 | o 2 public a-C - 54acac6f23ab
623 |/
625 |/
624 o 1 public a-B - 548a3d25dbf0
626 o 1 public a-B - 548a3d25dbf0
625 |
627 |
626 o 0 public a-A - 054250a37db4
628 o 0 public a-A - 054250a37db4
627
629
628 $ cd ../mu
630 $ cd ../mu
629 $ hgph # d6bcb4f74035 should have changed phase
631 $ hgph # d6bcb4f74035 should have changed phase
630 > # again d6bcb4f74035 and 145e75495359 changed because common was too smart
632 > # 145e75495359 is still draft. not ancestor of -r
631 o 9 draft a-H - 967b449fbc94
633 o 9 draft a-H - 967b449fbc94
632 |
634 |
633 | o 8 public a-F - b740e3e5c05d
635 | o 8 public a-F - b740e3e5c05d
634 | |
636 | |
635 | o 7 public a-E - e9f537e46dea
637 | o 7 public a-E - e9f537e46dea
636 | |
638 | |
637 +---o 6 public n-B - 145e75495359
639 +---o 6 draft n-B - 145e75495359
638 | |
640 | |
639 o | 5 public n-A - d6bcb4f74035
641 o | 5 public n-A - d6bcb4f74035
640 | |
642 | |
641 | o 4 public a-D - b555f63b6063
643 | o 4 public a-D - b555f63b6063
642 | |
644 | |
643 | o 3 public a-C - 54acac6f23ab
645 | o 3 public a-C - 54acac6f23ab
644 | |
646 | |
645 o | 2 public b-A - f54f1bb90ff3
647 o | 2 public b-A - f54f1bb90ff3
646 |/
648 |/
647 o 1 public a-B - 548a3d25dbf0
649 o 1 public a-B - 548a3d25dbf0
648 |
650 |
649 o 0 public a-A - 054250a37db4
651 o 0 public a-A - 054250a37db4
650
652
651
653
652
654
653 Pushing to Publish=True (common changeset from publish=False)
655 Pushing to Publish=True (common changeset from publish=False)
654
656
655 (in mu)
657 (in mu)
656 $ hg push ../alpha
658 $ hg push ../alpha
657 pushing to ../alpha
659 pushing to ../alpha
658 searching for changes
660 searching for changes
659 no changes found
661 no changes found
660 $ hgph
662 $ hgph
661 o 9 public a-H - 967b449fbc94
663 o 9 public a-H - 967b449fbc94
662 |
664 |
663 | o 8 public a-F - b740e3e5c05d
665 | o 8 public a-F - b740e3e5c05d
664 | |
666 | |
665 | o 7 public a-E - e9f537e46dea
667 | o 7 public a-E - e9f537e46dea
666 | |
668 | |
667 +---o 6 public n-B - 145e75495359
669 +---o 6 public n-B - 145e75495359
668 | |
670 | |
669 o | 5 public n-A - d6bcb4f74035
671 o | 5 public n-A - d6bcb4f74035
670 | |
672 | |
671 | o 4 public a-D - b555f63b6063
673 | o 4 public a-D - b555f63b6063
672 | |
674 | |
673 | o 3 public a-C - 54acac6f23ab
675 | o 3 public a-C - 54acac6f23ab
674 | |
676 | |
675 o | 2 public b-A - f54f1bb90ff3
677 o | 2 public b-A - f54f1bb90ff3
676 |/
678 |/
677 o 1 public a-B - 548a3d25dbf0
679 o 1 public a-B - 548a3d25dbf0
678 |
680 |
679 o 0 public a-A - 054250a37db4
681 o 0 public a-A - 054250a37db4
680
682
681 $ hgph -R ../alpha # a-H should have been synced to 0
683 $ hgph -R ../alpha # a-H should have been synced to 0
682 @ 10 public a-H - 967b449fbc94
684 @ 10 public a-H - 967b449fbc94
683 |
685 |
684 | o 9 draft a-G - 3e27b6f1eee1
686 | o 9 draft a-G - 3e27b6f1eee1
685 | |
687 | |
686 | o 8 public a-F - b740e3e5c05d
688 | o 8 public a-F - b740e3e5c05d
687 | |
689 | |
688 | o 7 public a-E - e9f537e46dea
690 | o 7 public a-E - e9f537e46dea
689 | |
691 | |
690 +---o 6 public n-B - 145e75495359
692 +---o 6 public n-B - 145e75495359
691 | |
693 | |
692 o | 5 public n-A - d6bcb4f74035
694 o | 5 public n-A - d6bcb4f74035
693 | |
695 | |
694 o | 4 public b-A - f54f1bb90ff3
696 o | 4 public b-A - f54f1bb90ff3
695 | |
697 | |
696 | o 3 public a-D - b555f63b6063
698 | o 3 public a-D - b555f63b6063
697 | |
699 | |
698 | o 2 public a-C - 54acac6f23ab
700 | o 2 public a-C - 54acac6f23ab
699 |/
701 |/
700 o 1 public a-B - 548a3d25dbf0
702 o 1 public a-B - 548a3d25dbf0
701 |
703 |
702 o 0 public a-A - 054250a37db4
704 o 0 public a-A - 054250a37db4
703
705
704
706
705
707
706 Discovery locally secret changeset on a remote repository:
708 Discovery locally secret changeset on a remote repository:
707
709
708 - should make it non-secret
710 - should make it non-secret
709
711
710 $ cd ../alpha
712 $ cd ../alpha
711 $ mkcommit A-secret --config phases.new-commit=2
713 $ mkcommit A-secret --config phases.new-commit=2
712 $ hgph
714 $ hgph
713 @ 11 secret A-secret - 435b5d83910c
715 @ 11 secret A-secret - 435b5d83910c
714 |
716 |
715 o 10 public a-H - 967b449fbc94
717 o 10 public a-H - 967b449fbc94
716 |
718 |
717 | o 9 draft a-G - 3e27b6f1eee1
719 | o 9 draft a-G - 3e27b6f1eee1
718 | |
720 | |
719 | o 8 public a-F - b740e3e5c05d
721 | o 8 public a-F - b740e3e5c05d
720 | |
722 | |
721 | o 7 public a-E - e9f537e46dea
723 | o 7 public a-E - e9f537e46dea
722 | |
724 | |
723 +---o 6 public n-B - 145e75495359
725 +---o 6 public n-B - 145e75495359
724 | |
726 | |
725 o | 5 public n-A - d6bcb4f74035
727 o | 5 public n-A - d6bcb4f74035
726 | |
728 | |
727 o | 4 public b-A - f54f1bb90ff3
729 o | 4 public b-A - f54f1bb90ff3
728 | |
730 | |
729 | o 3 public a-D - b555f63b6063
731 | o 3 public a-D - b555f63b6063
730 | |
732 | |
731 | o 2 public a-C - 54acac6f23ab
733 | o 2 public a-C - 54acac6f23ab
732 |/
734 |/
733 o 1 public a-B - 548a3d25dbf0
735 o 1 public a-B - 548a3d25dbf0
734 |
736 |
735 o 0 public a-A - 054250a37db4
737 o 0 public a-A - 054250a37db4
736
738
737 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
739 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
738 1 changesets found
740 1 changesets found
739 $ hg -R ../mu unbundle ../secret-bundle.hg
741 $ hg -R ../mu unbundle ../secret-bundle.hg
740 adding changesets
742 adding changesets
741 adding manifests
743 adding manifests
742 adding file changes
744 adding file changes
743 added 1 changesets with 1 changes to 1 files
745 added 1 changesets with 1 changes to 1 files
744 (run 'hg update' to get a working copy)
746 (run 'hg update' to get a working copy)
745 $ hgph -R ../mu
747 $ hgph -R ../mu
746 o 10 draft A-secret - 435b5d83910c
748 o 10 draft A-secret - 435b5d83910c
747 |
749 |
748 o 9 public a-H - 967b449fbc94
750 o 9 public a-H - 967b449fbc94
749 |
751 |
750 | o 8 public a-F - b740e3e5c05d
752 | o 8 public a-F - b740e3e5c05d
751 | |
753 | |
752 | o 7 public a-E - e9f537e46dea
754 | o 7 public a-E - e9f537e46dea
753 | |
755 | |
754 +---o 6 public n-B - 145e75495359
756 +---o 6 public n-B - 145e75495359
755 | |
757 | |
756 o | 5 public n-A - d6bcb4f74035
758 o | 5 public n-A - d6bcb4f74035
757 | |
759 | |
758 | o 4 public a-D - b555f63b6063
760 | o 4 public a-D - b555f63b6063
759 | |
761 | |
760 | o 3 public a-C - 54acac6f23ab
762 | o 3 public a-C - 54acac6f23ab
761 | |
763 | |
762 o | 2 public b-A - f54f1bb90ff3
764 o | 2 public b-A - f54f1bb90ff3
763 |/
765 |/
764 o 1 public a-B - 548a3d25dbf0
766 o 1 public a-B - 548a3d25dbf0
765 |
767 |
766 o 0 public a-A - 054250a37db4
768 o 0 public a-A - 054250a37db4
767
769
768 $ hg pull ../mu
770 $ hg pull ../mu
769 pulling from ../mu
771 pulling from ../mu
770 searching for changes
772 searching for changes
771 no changes found
773 no changes found
772 $ hgph
774 $ hgph
773 @ 11 draft A-secret - 435b5d83910c
775 @ 11 draft A-secret - 435b5d83910c
774 |
776 |
775 o 10 public a-H - 967b449fbc94
777 o 10 public a-H - 967b449fbc94
776 |
778 |
777 | o 9 draft a-G - 3e27b6f1eee1
779 | o 9 draft a-G - 3e27b6f1eee1
778 | |
780 | |
779 | o 8 public a-F - b740e3e5c05d
781 | o 8 public a-F - b740e3e5c05d
780 | |
782 | |
781 | o 7 public a-E - e9f537e46dea
783 | o 7 public a-E - e9f537e46dea
782 | |
784 | |
783 +---o 6 public n-B - 145e75495359
785 +---o 6 public n-B - 145e75495359
784 | |
786 | |
785 o | 5 public n-A - d6bcb4f74035
787 o | 5 public n-A - d6bcb4f74035
786 | |
788 | |
787 o | 4 public b-A - f54f1bb90ff3
789 o | 4 public b-A - f54f1bb90ff3
788 | |
790 | |
789 | o 3 public a-D - b555f63b6063
791 | o 3 public a-D - b555f63b6063
790 | |
792 | |
791 | o 2 public a-C - 54acac6f23ab
793 | o 2 public a-C - 54acac6f23ab
792 |/
794 |/
793 o 1 public a-B - 548a3d25dbf0
795 o 1 public a-B - 548a3d25dbf0
794 |
796 |
795 o 0 public a-A - 054250a37db4
797 o 0 public a-A - 054250a37db4
796
798
797
799
798 pushing a locally public and draft changesets remotly secret should make them appear on the remote side
800 pushing a locally public and draft changesets remotly secret should make them appear on the remote side
799
801
800 $ hg -R ../mu phase --secret --force 967b449fbc94
802 $ hg -R ../mu phase --secret --force 967b449fbc94
801 $ hg push -r 435b5d83910c ../mu
803 $ hg push -r 435b5d83910c ../mu
802 pushing to ../mu
804 pushing to ../mu
803 searching for changes
805 searching for changes
804 adding changesets
806 adding changesets
805 adding manifests
807 adding manifests
806 adding file changes
808 adding file changes
807 added 0 changesets with 0 changes to 2 files
809 added 0 changesets with 0 changes to 2 files
808 $ hgph -R ../mu
810 $ hgph -R ../mu
809 o 10 draft A-secret - 435b5d83910c
811 o 10 draft A-secret - 435b5d83910c
810 |
812 |
811 o 9 public a-H - 967b449fbc94
813 o 9 public a-H - 967b449fbc94
812 |
814 |
813 | o 8 public a-F - b740e3e5c05d
815 | o 8 public a-F - b740e3e5c05d
814 | |
816 | |
815 | o 7 public a-E - e9f537e46dea
817 | o 7 public a-E - e9f537e46dea
816 | |
818 | |
817 +---o 6 public n-B - 145e75495359
819 +---o 6 public n-B - 145e75495359
818 | |
820 | |
819 o | 5 public n-A - d6bcb4f74035
821 o | 5 public n-A - d6bcb4f74035
820 | |
822 | |
821 | o 4 public a-D - b555f63b6063
823 | o 4 public a-D - b555f63b6063
822 | |
824 | |
823 | o 3 public a-C - 54acac6f23ab
825 | o 3 public a-C - 54acac6f23ab
824 | |
826 | |
825 o | 2 public b-A - f54f1bb90ff3
827 o | 2 public b-A - f54f1bb90ff3
826 |/
828 |/
827 o 1 public a-B - 548a3d25dbf0
829 o 1 public a-B - 548a3d25dbf0
828 |
830 |
829 o 0 public a-A - 054250a37db4
831 o 0 public a-A - 054250a37db4
830
832
831
833
832 pull new changeset with common draft locally
834 pull new changeset with common draft locally
833
835
834 $ hg up -q 967b449fbc94 # create a new root for draft
836 $ hg up -q 967b449fbc94 # create a new root for draft
835 $ mkcommit 'alpha-more'
837 $ mkcommit 'alpha-more'
836 created new head
838 created new head
837 $ hg push -fr . ../mu
839 $ hg push -fr . ../mu
838 pushing to ../mu
840 pushing to ../mu
839 searching for changes
841 searching for changes
840 adding changesets
842 adding changesets
841 adding manifests
843 adding manifests
842 adding file changes
844 adding file changes
843 added 1 changesets with 1 changes to 1 files (+1 heads)
845 added 1 changesets with 1 changes to 1 files (+1 heads)
844 $ cd ../mu
846 $ cd ../mu
845 $ hg phase --secret --force 1c5cfd894796
847 $ hg phase --secret --force 1c5cfd894796
846 $ hg up -q 435b5d83910c
848 $ hg up -q 435b5d83910c
847 $ mkcommit 'mu-more'
849 $ mkcommit 'mu-more'
848 $ cd ../alpha
850 $ cd ../alpha
849 $ hg pull ../mu
851 $ hg pull ../mu
850 pulling from ../mu
852 pulling from ../mu
851 searching for changes
853 searching for changes
852 adding changesets
854 adding changesets
853 adding manifests
855 adding manifests
854 adding file changes
856 adding file changes
855 added 1 changesets with 1 changes to 1 files
857 added 1 changesets with 1 changes to 1 files
856 (run 'hg update' to get a working copy)
858 (run 'hg update' to get a working copy)
857 $ hgph
859 $ hgph
858 o 13 draft mu-more - 5237fb433fc8
860 o 13 draft mu-more - 5237fb433fc8
859 |
861 |
860 | @ 12 draft alpha-more - 1c5cfd894796
862 | @ 12 draft alpha-more - 1c5cfd894796
861 | |
863 | |
862 o | 11 draft A-secret - 435b5d83910c
864 o | 11 draft A-secret - 435b5d83910c
863 |/
865 |/
864 o 10 public a-H - 967b449fbc94
866 o 10 public a-H - 967b449fbc94
865 |
867 |
866 | o 9 draft a-G - 3e27b6f1eee1
868 | o 9 draft a-G - 3e27b6f1eee1
867 | |
869 | |
868 | o 8 public a-F - b740e3e5c05d
870 | o 8 public a-F - b740e3e5c05d
869 | |
871 | |
870 | o 7 public a-E - e9f537e46dea
872 | o 7 public a-E - e9f537e46dea
871 | |
873 | |
872 +---o 6 public n-B - 145e75495359
874 +---o 6 public n-B - 145e75495359
873 | |
875 | |
874 o | 5 public n-A - d6bcb4f74035
876 o | 5 public n-A - d6bcb4f74035
875 | |
877 | |
876 o | 4 public b-A - f54f1bb90ff3
878 o | 4 public b-A - f54f1bb90ff3
877 | |
879 | |
878 | o 3 public a-D - b555f63b6063
880 | o 3 public a-D - b555f63b6063
879 | |
881 | |
880 | o 2 public a-C - 54acac6f23ab
882 | o 2 public a-C - 54acac6f23ab
881 |/
883 |/
882 o 1 public a-B - 548a3d25dbf0
884 o 1 public a-B - 548a3d25dbf0
883 |
885 |
884 o 0 public a-A - 054250a37db4
886 o 0 public a-A - 054250a37db4
885
887
886
888
887 Test that test are properly ignored on remote event when existing locally
889 Test that test are properly ignored on remote event when existing locally
888
890
889 $ cd ..
891 $ cd ..
890 $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
892 $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
891
893
892 # pathological case are
894 # pathological case are
893 #
895 #
894 # * secret remotely
896 # * secret remotely
895 # * known locally
897 # * known locally
896 # * repo have uncommon changeset
898 # * repo have uncommon changeset
897
899
898 $ hg -R beta phase --secret --force f54f1bb90ff3
900 $ hg -R beta phase --secret --force f54f1bb90ff3
899 $ hg -R gamma phase --draft --force f54f1bb90ff3
901 $ hg -R gamma phase --draft --force f54f1bb90ff3
900
902
901 $ cd gamma
903 $ cd gamma
902 $ hg pull ../beta
904 $ hg pull ../beta
903 pulling from ../beta
905 pulling from ../beta
904 searching for changes
906 searching for changes
905 adding changesets
907 adding changesets
906 adding manifests
908 adding manifests
907 adding file changes
909 adding file changes
908 added 2 changesets with 2 changes to 2 files
910 added 2 changesets with 2 changes to 2 files
909 (run 'hg update' to get a working copy)
911 (run 'hg update' to get a working copy)
910 $ hg phase f54f1bb90ff3
912 $ hg phase f54f1bb90ff3
911 2: draft
913 2: draft
912
914
913 same over the wire
915 same over the wire
914
916
915 $ cd ../beta
917 $ cd ../beta
916 $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log
918 $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log
917 $ cat ../beta.pid >> $DAEMON_PIDS
919 $ cat ../beta.pid >> $DAEMON_PIDS
918 $ cd ../gamma
920 $ cd ../gamma
919
921
920 $ hg pull http://localhost:$HGPORT/
922 $ hg pull http://localhost:$HGPORT/
921 pulling from http://localhost:$HGPORT/
923 pulling from http://localhost:$HGPORT/
922 searching for changes
924 searching for changes
923 no changes found
925 no changes found
924 $ hg phase f54f1bb90ff3
926 $ hg phase f54f1bb90ff3
925 2: draft
927 2: draft
926
928
927 check that secret local on both side are not synced to public
929 check that secret local on both side are not synced to public
928
930
929 $ hg push -r b555f63b6063 http://localhost:$HGPORT/
931 $ hg push -r b555f63b6063 http://localhost:$HGPORT/
930 pushing to http://localhost:$HGPORT/
932 pushing to http://localhost:$HGPORT/
931 searching for changes
933 searching for changes
932 no changes found
934 no changes found
933 $ hg phase f54f1bb90ff3
935 $ hg phase f54f1bb90ff3
934 2: draft
936 2: draft
935
937
936 put the changeset in the draft state again
938 put the changeset in the draft state again
937 (first test after this one expect to be able to copy)
939 (first test after this one expect to be able to copy)
938
940
939 $ cd ..
941 $ cd ..
940
942
941
943
942 Test Clone behavior
944 Test Clone behavior
943
945
944 A. Clone without secret changeset
946 A. Clone without secret changeset
945
947
946 1. cloning non-publishing repository
948 1. cloning non-publishing repository
947 (Phase should be preserved)
949 (Phase should be preserved)
948
950
949 # make sure there is no secret so we can use a copy clone
951 # make sure there is no secret so we can use a copy clone
950
952
951 $ hg -R mu phase --draft 'secret()'
953 $ hg -R mu phase --draft 'secret()'
952
954
953 $ hg clone -U mu Tau
955 $ hg clone -U mu Tau
954 $ hgph -R Tau
956 $ hgph -R Tau
955 o 12 draft mu-more - 5237fb433fc8
957 o 12 draft mu-more - 5237fb433fc8
956 |
958 |
957 | o 11 draft alpha-more - 1c5cfd894796
959 | o 11 draft alpha-more - 1c5cfd894796
958 | |
960 | |
959 o | 10 draft A-secret - 435b5d83910c
961 o | 10 draft A-secret - 435b5d83910c
960 |/
962 |/
961 o 9 public a-H - 967b449fbc94
963 o 9 public a-H - 967b449fbc94
962 |
964 |
963 | o 8 public a-F - b740e3e5c05d
965 | o 8 public a-F - b740e3e5c05d
964 | |
966 | |
965 | o 7 public a-E - e9f537e46dea
967 | o 7 public a-E - e9f537e46dea
966 | |
968 | |
967 +---o 6 public n-B - 145e75495359
969 +---o 6 public n-B - 145e75495359
968 | |
970 | |
969 o | 5 public n-A - d6bcb4f74035
971 o | 5 public n-A - d6bcb4f74035
970 | |
972 | |
971 | o 4 public a-D - b555f63b6063
973 | o 4 public a-D - b555f63b6063
972 | |
974 | |
973 | o 3 public a-C - 54acac6f23ab
975 | o 3 public a-C - 54acac6f23ab
974 | |
976 | |
975 o | 2 public b-A - f54f1bb90ff3
977 o | 2 public b-A - f54f1bb90ff3
976 |/
978 |/
977 o 1 public a-B - 548a3d25dbf0
979 o 1 public a-B - 548a3d25dbf0
978 |
980 |
979 o 0 public a-A - 054250a37db4
981 o 0 public a-A - 054250a37db4
980
982
981
983
982 2. cloning publishing repository
984 2. cloning publishing repository
983
985
984 (everything should be public)
986 (everything should be public)
985
987
986 $ hg clone -U alpha Upsilon
988 $ hg clone -U alpha Upsilon
987 $ hgph -R Upsilon
989 $ hgph -R Upsilon
988 o 13 public mu-more - 5237fb433fc8
990 o 13 public mu-more - 5237fb433fc8
989 |
991 |
990 | o 12 public alpha-more - 1c5cfd894796
992 | o 12 public alpha-more - 1c5cfd894796
991 | |
993 | |
992 o | 11 public A-secret - 435b5d83910c
994 o | 11 public A-secret - 435b5d83910c
993 |/
995 |/
994 o 10 public a-H - 967b449fbc94
996 o 10 public a-H - 967b449fbc94
995 |
997 |
996 | o 9 public a-G - 3e27b6f1eee1
998 | o 9 public a-G - 3e27b6f1eee1
997 | |
999 | |
998 | o 8 public a-F - b740e3e5c05d
1000 | o 8 public a-F - b740e3e5c05d
999 | |
1001 | |
1000 | o 7 public a-E - e9f537e46dea
1002 | o 7 public a-E - e9f537e46dea
1001 | |
1003 | |
1002 +---o 6 public n-B - 145e75495359
1004 +---o 6 public n-B - 145e75495359
1003 | |
1005 | |
1004 o | 5 public n-A - d6bcb4f74035
1006 o | 5 public n-A - d6bcb4f74035
1005 | |
1007 | |
1006 o | 4 public b-A - f54f1bb90ff3
1008 o | 4 public b-A - f54f1bb90ff3
1007 | |
1009 | |
1008 | o 3 public a-D - b555f63b6063
1010 | o 3 public a-D - b555f63b6063
1009 | |
1011 | |
1010 | o 2 public a-C - 54acac6f23ab
1012 | o 2 public a-C - 54acac6f23ab
1011 |/
1013 |/
1012 o 1 public a-B - 548a3d25dbf0
1014 o 1 public a-B - 548a3d25dbf0
1013 |
1015 |
1014 o 0 public a-A - 054250a37db4
1016 o 0 public a-A - 054250a37db4
1015
1017
1016
1018
General Comments 0
You need to be logged in to leave comments. Login now