##// END OF EJS Templates
discovery: properly filter changeset in 'peer.known' (issue4982)...
Pierre-Yves David -
r27319:b64b6fdc default
parent child Browse files
Show More
@@ -1,1921 +1,1922 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, wdirrev, short
7 from node import hex, nullid, wdirrev, short
8 from i18n import _
8 from i18n import _
9 import urllib
9 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock as lockmod
12 import lock as lockmod
13 import transaction, store, encoding, exchange, bundle2
13 import transaction, store, encoding, exchange, bundle2
14 import scmutil, util, extensions, hook, error, revset, cmdutil
14 import scmutil, util, extensions, hook, error, revset, cmdutil
15 import match as matchmod
15 import match as matchmod
16 import merge as mergemod
16 import merge as mergemod
17 import tags as tagsmod
17 import tags as tagsmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect, random
19 import weakref, errno, os, time, inspect, random
20 import branchmap, pathutil
20 import branchmap, pathutil
21 import namespaces
21 import namespaces
22 propertycache = util.propertycache
22 propertycache = util.propertycache
23 filecache = scmutil.filecache
23 filecache = scmutil.filecache
24
24
25 class repofilecache(filecache):
25 class repofilecache(filecache):
26 """All filecache usage on repo are done for logic that should be unfiltered
26 """All filecache usage on repo are done for logic that should be unfiltered
27 """
27 """
28
28
29 def __get__(self, repo, type=None):
29 def __get__(self, repo, type=None):
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
31 def __set__(self, repo, value):
31 def __set__(self, repo, value):
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
33 def __delete__(self, repo):
33 def __delete__(self, repo):
34 return super(repofilecache, self).__delete__(repo.unfiltered())
34 return super(repofilecache, self).__delete__(repo.unfiltered())
35
35
36 class storecache(repofilecache):
36 class storecache(repofilecache):
37 """filecache for files in the store"""
37 """filecache for files in the store"""
38 def join(self, obj, fname):
38 def join(self, obj, fname):
39 return obj.sjoin(fname)
39 return obj.sjoin(fname)
40
40
41 class unfilteredpropertycache(propertycache):
41 class unfilteredpropertycache(propertycache):
42 """propertycache that apply to unfiltered repo only"""
42 """propertycache that apply to unfiltered repo only"""
43
43
44 def __get__(self, repo, type=None):
44 def __get__(self, repo, type=None):
45 unfi = repo.unfiltered()
45 unfi = repo.unfiltered()
46 if unfi is repo:
46 if unfi is repo:
47 return super(unfilteredpropertycache, self).__get__(unfi)
47 return super(unfilteredpropertycache, self).__get__(unfi)
48 return getattr(unfi, self.name)
48 return getattr(unfi, self.name)
49
49
50 class filteredpropertycache(propertycache):
50 class filteredpropertycache(propertycache):
51 """propertycache that must take filtering in account"""
51 """propertycache that must take filtering in account"""
52
52
53 def cachevalue(self, obj, value):
53 def cachevalue(self, obj, value):
54 object.__setattr__(obj, self.name, value)
54 object.__setattr__(obj, self.name, value)
55
55
56
56
57 def hasunfilteredcache(repo, name):
57 def hasunfilteredcache(repo, name):
58 """check if a repo has an unfilteredpropertycache value for <name>"""
58 """check if a repo has an unfilteredpropertycache value for <name>"""
59 return name in vars(repo.unfiltered())
59 return name in vars(repo.unfiltered())
60
60
61 def unfilteredmethod(orig):
61 def unfilteredmethod(orig):
62 """decorate method that always need to be run on unfiltered version"""
62 """decorate method that always need to be run on unfiltered version"""
63 def wrapper(repo, *args, **kwargs):
63 def wrapper(repo, *args, **kwargs):
64 return orig(repo.unfiltered(), *args, **kwargs)
64 return orig(repo.unfiltered(), *args, **kwargs)
65 return wrapper
65 return wrapper
66
66
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
68 'unbundle'))
68 'unbundle'))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
70
70
71 class localpeer(peer.peerrepository):
71 class localpeer(peer.peerrepository):
72 '''peer for a local repo; reflects only the most recent API'''
72 '''peer for a local repo; reflects only the most recent API'''
73
73
74 def __init__(self, repo, caps=moderncaps):
74 def __init__(self, repo, caps=moderncaps):
75 peer.peerrepository.__init__(self)
75 peer.peerrepository.__init__(self)
76 self._repo = repo.filtered('served')
76 self._repo = repo.filtered('served')
77 self.ui = repo.ui
77 self.ui = repo.ui
78 self._caps = repo._restrictcapabilities(caps)
78 self._caps = repo._restrictcapabilities(caps)
79 self.requirements = repo.requirements
79 self.requirements = repo.requirements
80 self.supportedformats = repo.supportedformats
80 self.supportedformats = repo.supportedformats
81
81
82 def close(self):
82 def close(self):
83 self._repo.close()
83 self._repo.close()
84
84
85 def _capabilities(self):
85 def _capabilities(self):
86 return self._caps
86 return self._caps
87
87
88 def local(self):
88 def local(self):
89 return self._repo
89 return self._repo
90
90
91 def canpush(self):
91 def canpush(self):
92 return True
92 return True
93
93
94 def url(self):
94 def url(self):
95 return self._repo.url()
95 return self._repo.url()
96
96
97 def lookup(self, key):
97 def lookup(self, key):
98 return self._repo.lookup(key)
98 return self._repo.lookup(key)
99
99
100 def branchmap(self):
100 def branchmap(self):
101 return self._repo.branchmap()
101 return self._repo.branchmap()
102
102
103 def heads(self):
103 def heads(self):
104 return self._repo.heads()
104 return self._repo.heads()
105
105
106 def known(self, nodes):
106 def known(self, nodes):
107 return self._repo.known(nodes)
107 return self._repo.known(nodes)
108
108
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
110 **kwargs):
110 **kwargs):
111 cg = exchange.getbundle(self._repo, source, heads=heads,
111 cg = exchange.getbundle(self._repo, source, heads=heads,
112 common=common, bundlecaps=bundlecaps, **kwargs)
112 common=common, bundlecaps=bundlecaps, **kwargs)
113 if bundlecaps is not None and 'HG20' in bundlecaps:
113 if bundlecaps is not None and 'HG20' in bundlecaps:
114 # When requesting a bundle2, getbundle returns a stream to make the
114 # When requesting a bundle2, getbundle returns a stream to make the
115 # wire level function happier. We need to build a proper object
115 # wire level function happier. We need to build a proper object
116 # from it in local peer.
116 # from it in local peer.
117 cg = bundle2.getunbundler(self.ui, cg)
117 cg = bundle2.getunbundler(self.ui, cg)
118 return cg
118 return cg
119
119
120 # TODO We might want to move the next two calls into legacypeer and add
120 # TODO We might want to move the next two calls into legacypeer and add
121 # unbundle instead.
121 # unbundle instead.
122
122
123 def unbundle(self, cg, heads, url):
123 def unbundle(self, cg, heads, url):
124 """apply a bundle on a repo
124 """apply a bundle on a repo
125
125
126 This function handles the repo locking itself."""
126 This function handles the repo locking itself."""
127 try:
127 try:
128 try:
128 try:
129 cg = exchange.readbundle(self.ui, cg, None)
129 cg = exchange.readbundle(self.ui, cg, None)
130 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
130 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
131 if util.safehasattr(ret, 'getchunks'):
131 if util.safehasattr(ret, 'getchunks'):
132 # This is a bundle20 object, turn it into an unbundler.
132 # This is a bundle20 object, turn it into an unbundler.
133 # This little dance should be dropped eventually when the
133 # This little dance should be dropped eventually when the
134 # API is finally improved.
134 # API is finally improved.
135 stream = util.chunkbuffer(ret.getchunks())
135 stream = util.chunkbuffer(ret.getchunks())
136 ret = bundle2.getunbundler(self.ui, stream)
136 ret = bundle2.getunbundler(self.ui, stream)
137 return ret
137 return ret
138 except Exception as exc:
138 except Exception as exc:
139 # If the exception contains output salvaged from a bundle2
139 # If the exception contains output salvaged from a bundle2
140 # reply, we need to make sure it is printed before continuing
140 # reply, we need to make sure it is printed before continuing
141 # to fail. So we build a bundle2 with such output and consume
141 # to fail. So we build a bundle2 with such output and consume
142 # it directly.
142 # it directly.
143 #
143 #
144 # This is not very elegant but allows a "simple" solution for
144 # This is not very elegant but allows a "simple" solution for
145 # issue4594
145 # issue4594
146 output = getattr(exc, '_bundle2salvagedoutput', ())
146 output = getattr(exc, '_bundle2salvagedoutput', ())
147 if output:
147 if output:
148 bundler = bundle2.bundle20(self._repo.ui)
148 bundler = bundle2.bundle20(self._repo.ui)
149 for out in output:
149 for out in output:
150 bundler.addpart(out)
150 bundler.addpart(out)
151 stream = util.chunkbuffer(bundler.getchunks())
151 stream = util.chunkbuffer(bundler.getchunks())
152 b = bundle2.getunbundler(self.ui, stream)
152 b = bundle2.getunbundler(self.ui, stream)
153 bundle2.processbundle(self._repo, b)
153 bundle2.processbundle(self._repo, b)
154 raise
154 raise
155 except error.PushRaced as exc:
155 except error.PushRaced as exc:
156 raise error.ResponseError(_('push failed:'), str(exc))
156 raise error.ResponseError(_('push failed:'), str(exc))
157
157
158 def lock(self):
158 def lock(self):
159 return self._repo.lock()
159 return self._repo.lock()
160
160
161 def addchangegroup(self, cg, source, url):
161 def addchangegroup(self, cg, source, url):
162 return cg.apply(self._repo, source, url)
162 return cg.apply(self._repo, source, url)
163
163
164 def pushkey(self, namespace, key, old, new):
164 def pushkey(self, namespace, key, old, new):
165 return self._repo.pushkey(namespace, key, old, new)
165 return self._repo.pushkey(namespace, key, old, new)
166
166
167 def listkeys(self, namespace):
167 def listkeys(self, namespace):
168 return self._repo.listkeys(namespace)
168 return self._repo.listkeys(namespace)
169
169
170 def debugwireargs(self, one, two, three=None, four=None, five=None):
170 def debugwireargs(self, one, two, three=None, four=None, five=None):
171 '''used to test argument passing over the wire'''
171 '''used to test argument passing over the wire'''
172 return "%s %s %s %s %s" % (one, two, three, four, five)
172 return "%s %s %s %s %s" % (one, two, three, four, five)
173
173
174 class locallegacypeer(localpeer):
174 class locallegacypeer(localpeer):
175 '''peer extension which implements legacy methods too; used for tests with
175 '''peer extension which implements legacy methods too; used for tests with
176 restricted capabilities'''
176 restricted capabilities'''
177
177
178 def __init__(self, repo):
178 def __init__(self, repo):
179 localpeer.__init__(self, repo, caps=legacycaps)
179 localpeer.__init__(self, repo, caps=legacycaps)
180
180
181 def branches(self, nodes):
181 def branches(self, nodes):
182 return self._repo.branches(nodes)
182 return self._repo.branches(nodes)
183
183
184 def between(self, pairs):
184 def between(self, pairs):
185 return self._repo.between(pairs)
185 return self._repo.between(pairs)
186
186
187 def changegroup(self, basenodes, source):
187 def changegroup(self, basenodes, source):
188 return changegroup.changegroup(self._repo, basenodes, source)
188 return changegroup.changegroup(self._repo, basenodes, source)
189
189
190 def changegroupsubset(self, bases, heads, source):
190 def changegroupsubset(self, bases, heads, source):
191 return changegroup.changegroupsubset(self._repo, bases, heads, source)
191 return changegroup.changegroupsubset(self._repo, bases, heads, source)
192
192
193 class localrepository(object):
193 class localrepository(object):
194
194
195 supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
195 supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
196 'manifestv2'))
196 'manifestv2'))
197 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
197 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
198 'dotencode'))
198 'dotencode'))
199 openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
199 openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
200 filtername = None
200 filtername = None
201
201
202 # a list of (ui, featureset) functions.
202 # a list of (ui, featureset) functions.
203 # only functions defined in module of enabled extensions are invoked
203 # only functions defined in module of enabled extensions are invoked
204 featuresetupfuncs = set()
204 featuresetupfuncs = set()
205
205
206 def _baserequirements(self, create):
206 def _baserequirements(self, create):
207 return ['revlogv1']
207 return ['revlogv1']
208
208
209 def __init__(self, baseui, path=None, create=False):
209 def __init__(self, baseui, path=None, create=False):
210 self.requirements = set()
210 self.requirements = set()
211 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
211 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
212 self.wopener = self.wvfs
212 self.wopener = self.wvfs
213 self.root = self.wvfs.base
213 self.root = self.wvfs.base
214 self.path = self.wvfs.join(".hg")
214 self.path = self.wvfs.join(".hg")
215 self.origroot = path
215 self.origroot = path
216 self.auditor = pathutil.pathauditor(self.root, self._checknested)
216 self.auditor = pathutil.pathauditor(self.root, self._checknested)
217 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
217 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
218 realfs=False)
218 realfs=False)
219 self.vfs = scmutil.vfs(self.path)
219 self.vfs = scmutil.vfs(self.path)
220 self.opener = self.vfs
220 self.opener = self.vfs
221 self.baseui = baseui
221 self.baseui = baseui
222 self.ui = baseui.copy()
222 self.ui = baseui.copy()
223 self.ui.copy = baseui.copy # prevent copying repo configuration
223 self.ui.copy = baseui.copy # prevent copying repo configuration
224 # A list of callback to shape the phase if no data were found.
224 # A list of callback to shape the phase if no data were found.
225 # Callback are in the form: func(repo, roots) --> processed root.
225 # Callback are in the form: func(repo, roots) --> processed root.
226 # This list it to be filled by extension during repo setup
226 # This list it to be filled by extension during repo setup
227 self._phasedefaults = []
227 self._phasedefaults = []
228 try:
228 try:
229 self.ui.readconfig(self.join("hgrc"), self.root)
229 self.ui.readconfig(self.join("hgrc"), self.root)
230 extensions.loadall(self.ui)
230 extensions.loadall(self.ui)
231 except IOError:
231 except IOError:
232 pass
232 pass
233
233
234 if self.featuresetupfuncs:
234 if self.featuresetupfuncs:
235 self.supported = set(self._basesupported) # use private copy
235 self.supported = set(self._basesupported) # use private copy
236 extmods = set(m.__name__ for n, m
236 extmods = set(m.__name__ for n, m
237 in extensions.extensions(self.ui))
237 in extensions.extensions(self.ui))
238 for setupfunc in self.featuresetupfuncs:
238 for setupfunc in self.featuresetupfuncs:
239 if setupfunc.__module__ in extmods:
239 if setupfunc.__module__ in extmods:
240 setupfunc(self.ui, self.supported)
240 setupfunc(self.ui, self.supported)
241 else:
241 else:
242 self.supported = self._basesupported
242 self.supported = self._basesupported
243
243
244 if not self.vfs.isdir():
244 if not self.vfs.isdir():
245 if create:
245 if create:
246 if not self.wvfs.exists():
246 if not self.wvfs.exists():
247 self.wvfs.makedirs()
247 self.wvfs.makedirs()
248 self.vfs.makedir(notindexed=True)
248 self.vfs.makedir(notindexed=True)
249 self.requirements.update(self._baserequirements(create))
249 self.requirements.update(self._baserequirements(create))
250 if self.ui.configbool('format', 'usestore', True):
250 if self.ui.configbool('format', 'usestore', True):
251 self.vfs.mkdir("store")
251 self.vfs.mkdir("store")
252 self.requirements.add("store")
252 self.requirements.add("store")
253 if self.ui.configbool('format', 'usefncache', True):
253 if self.ui.configbool('format', 'usefncache', True):
254 self.requirements.add("fncache")
254 self.requirements.add("fncache")
255 if self.ui.configbool('format', 'dotencode', True):
255 if self.ui.configbool('format', 'dotencode', True):
256 self.requirements.add('dotencode')
256 self.requirements.add('dotencode')
257 # create an invalid changelog
257 # create an invalid changelog
258 self.vfs.append(
258 self.vfs.append(
259 "00changelog.i",
259 "00changelog.i",
260 '\0\0\0\2' # represents revlogv2
260 '\0\0\0\2' # represents revlogv2
261 ' dummy changelog to prevent using the old repo layout'
261 ' dummy changelog to prevent using the old repo layout'
262 )
262 )
263 if scmutil.gdinitconfig(self.ui):
263 if scmutil.gdinitconfig(self.ui):
264 self.requirements.add("generaldelta")
264 self.requirements.add("generaldelta")
265 if self.ui.configbool('experimental', 'treemanifest', False):
265 if self.ui.configbool('experimental', 'treemanifest', False):
266 self.requirements.add("treemanifest")
266 self.requirements.add("treemanifest")
267 if self.ui.configbool('experimental', 'manifestv2', False):
267 if self.ui.configbool('experimental', 'manifestv2', False):
268 self.requirements.add("manifestv2")
268 self.requirements.add("manifestv2")
269 else:
269 else:
270 raise error.RepoError(_("repository %s not found") % path)
270 raise error.RepoError(_("repository %s not found") % path)
271 elif create:
271 elif create:
272 raise error.RepoError(_("repository %s already exists") % path)
272 raise error.RepoError(_("repository %s already exists") % path)
273 else:
273 else:
274 try:
274 try:
275 self.requirements = scmutil.readrequires(
275 self.requirements = scmutil.readrequires(
276 self.vfs, self.supported)
276 self.vfs, self.supported)
277 except IOError as inst:
277 except IOError as inst:
278 if inst.errno != errno.ENOENT:
278 if inst.errno != errno.ENOENT:
279 raise
279 raise
280
280
281 self.sharedpath = self.path
281 self.sharedpath = self.path
282 try:
282 try:
283 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
283 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
284 realpath=True)
284 realpath=True)
285 s = vfs.base
285 s = vfs.base
286 if not vfs.exists():
286 if not vfs.exists():
287 raise error.RepoError(
287 raise error.RepoError(
288 _('.hg/sharedpath points to nonexistent directory %s') % s)
288 _('.hg/sharedpath points to nonexistent directory %s') % s)
289 self.sharedpath = s
289 self.sharedpath = s
290 except IOError as inst:
290 except IOError as inst:
291 if inst.errno != errno.ENOENT:
291 if inst.errno != errno.ENOENT:
292 raise
292 raise
293
293
294 self.store = store.store(
294 self.store = store.store(
295 self.requirements, self.sharedpath, scmutil.vfs)
295 self.requirements, self.sharedpath, scmutil.vfs)
296 self.spath = self.store.path
296 self.spath = self.store.path
297 self.svfs = self.store.vfs
297 self.svfs = self.store.vfs
298 self.sjoin = self.store.join
298 self.sjoin = self.store.join
299 self.vfs.createmode = self.store.createmode
299 self.vfs.createmode = self.store.createmode
300 self._applyopenerreqs()
300 self._applyopenerreqs()
301 if create:
301 if create:
302 self._writerequirements()
302 self._writerequirements()
303
303
304 self._dirstatevalidatewarned = False
304 self._dirstatevalidatewarned = False
305
305
306 self._branchcaches = {}
306 self._branchcaches = {}
307 self._revbranchcache = None
307 self._revbranchcache = None
308 self.filterpats = {}
308 self.filterpats = {}
309 self._datafilters = {}
309 self._datafilters = {}
310 self._transref = self._lockref = self._wlockref = None
310 self._transref = self._lockref = self._wlockref = None
311
311
312 # A cache for various files under .hg/ that tracks file changes,
312 # A cache for various files under .hg/ that tracks file changes,
313 # (used by the filecache decorator)
313 # (used by the filecache decorator)
314 #
314 #
315 # Maps a property name to its util.filecacheentry
315 # Maps a property name to its util.filecacheentry
316 self._filecache = {}
316 self._filecache = {}
317
317
318 # hold sets of revision to be filtered
318 # hold sets of revision to be filtered
319 # should be cleared when something might have changed the filter value:
319 # should be cleared when something might have changed the filter value:
320 # - new changesets,
320 # - new changesets,
321 # - phase change,
321 # - phase change,
322 # - new obsolescence marker,
322 # - new obsolescence marker,
323 # - working directory parent change,
323 # - working directory parent change,
324 # - bookmark changes
324 # - bookmark changes
325 self.filteredrevcache = {}
325 self.filteredrevcache = {}
326
326
327 # generic mapping between names and nodes
327 # generic mapping between names and nodes
328 self.names = namespaces.namespaces()
328 self.names = namespaces.namespaces()
329
329
330 def close(self):
330 def close(self):
331 self._writecaches()
331 self._writecaches()
332
332
333 def _writecaches(self):
333 def _writecaches(self):
334 if self._revbranchcache:
334 if self._revbranchcache:
335 self._revbranchcache.write()
335 self._revbranchcache.write()
336
336
337 def _restrictcapabilities(self, caps):
337 def _restrictcapabilities(self, caps):
338 if self.ui.configbool('experimental', 'bundle2-advertise', True):
338 if self.ui.configbool('experimental', 'bundle2-advertise', True):
339 caps = set(caps)
339 caps = set(caps)
340 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
340 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
341 caps.add('bundle2=' + urllib.quote(capsblob))
341 caps.add('bundle2=' + urllib.quote(capsblob))
342 return caps
342 return caps
343
343
344 def _applyopenerreqs(self):
344 def _applyopenerreqs(self):
345 self.svfs.options = dict((r, 1) for r in self.requirements
345 self.svfs.options = dict((r, 1) for r in self.requirements
346 if r in self.openerreqs)
346 if r in self.openerreqs)
347 # experimental config: format.chunkcachesize
347 # experimental config: format.chunkcachesize
348 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
348 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
349 if chunkcachesize is not None:
349 if chunkcachesize is not None:
350 self.svfs.options['chunkcachesize'] = chunkcachesize
350 self.svfs.options['chunkcachesize'] = chunkcachesize
351 # experimental config: format.maxchainlen
351 # experimental config: format.maxchainlen
352 maxchainlen = self.ui.configint('format', 'maxchainlen')
352 maxchainlen = self.ui.configint('format', 'maxchainlen')
353 if maxchainlen is not None:
353 if maxchainlen is not None:
354 self.svfs.options['maxchainlen'] = maxchainlen
354 self.svfs.options['maxchainlen'] = maxchainlen
355 # experimental config: format.manifestcachesize
355 # experimental config: format.manifestcachesize
356 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
356 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
357 if manifestcachesize is not None:
357 if manifestcachesize is not None:
358 self.svfs.options['manifestcachesize'] = manifestcachesize
358 self.svfs.options['manifestcachesize'] = manifestcachesize
359 # experimental config: format.aggressivemergedeltas
359 # experimental config: format.aggressivemergedeltas
360 aggressivemergedeltas = self.ui.configbool('format',
360 aggressivemergedeltas = self.ui.configbool('format',
361 'aggressivemergedeltas', False)
361 'aggressivemergedeltas', False)
362 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
362 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
363 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
363 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
364
364
365 def _writerequirements(self):
365 def _writerequirements(self):
366 scmutil.writerequires(self.vfs, self.requirements)
366 scmutil.writerequires(self.vfs, self.requirements)
367
367
368 def _checknested(self, path):
368 def _checknested(self, path):
369 """Determine if path is a legal nested repository."""
369 """Determine if path is a legal nested repository."""
370 if not path.startswith(self.root):
370 if not path.startswith(self.root):
371 return False
371 return False
372 subpath = path[len(self.root) + 1:]
372 subpath = path[len(self.root) + 1:]
373 normsubpath = util.pconvert(subpath)
373 normsubpath = util.pconvert(subpath)
374
374
375 # XXX: Checking against the current working copy is wrong in
375 # XXX: Checking against the current working copy is wrong in
376 # the sense that it can reject things like
376 # the sense that it can reject things like
377 #
377 #
378 # $ hg cat -r 10 sub/x.txt
378 # $ hg cat -r 10 sub/x.txt
379 #
379 #
380 # if sub/ is no longer a subrepository in the working copy
380 # if sub/ is no longer a subrepository in the working copy
381 # parent revision.
381 # parent revision.
382 #
382 #
383 # However, it can of course also allow things that would have
383 # However, it can of course also allow things that would have
384 # been rejected before, such as the above cat command if sub/
384 # been rejected before, such as the above cat command if sub/
385 # is a subrepository now, but was a normal directory before.
385 # is a subrepository now, but was a normal directory before.
386 # The old path auditor would have rejected by mistake since it
386 # The old path auditor would have rejected by mistake since it
387 # panics when it sees sub/.hg/.
387 # panics when it sees sub/.hg/.
388 #
388 #
389 # All in all, checking against the working copy seems sensible
389 # All in all, checking against the working copy seems sensible
390 # since we want to prevent access to nested repositories on
390 # since we want to prevent access to nested repositories on
391 # the filesystem *now*.
391 # the filesystem *now*.
392 ctx = self[None]
392 ctx = self[None]
393 parts = util.splitpath(subpath)
393 parts = util.splitpath(subpath)
394 while parts:
394 while parts:
395 prefix = '/'.join(parts)
395 prefix = '/'.join(parts)
396 if prefix in ctx.substate:
396 if prefix in ctx.substate:
397 if prefix == normsubpath:
397 if prefix == normsubpath:
398 return True
398 return True
399 else:
399 else:
400 sub = ctx.sub(prefix)
400 sub = ctx.sub(prefix)
401 return sub.checknested(subpath[len(prefix) + 1:])
401 return sub.checknested(subpath[len(prefix) + 1:])
402 else:
402 else:
403 parts.pop()
403 parts.pop()
404 return False
404 return False
405
405
406 def peer(self):
406 def peer(self):
407 return localpeer(self) # not cached to avoid reference cycle
407 return localpeer(self) # not cached to avoid reference cycle
408
408
409 def unfiltered(self):
409 def unfiltered(self):
410 """Return unfiltered version of the repository
410 """Return unfiltered version of the repository
411
411
412 Intended to be overwritten by filtered repo."""
412 Intended to be overwritten by filtered repo."""
413 return self
413 return self
414
414
415 def filtered(self, name):
415 def filtered(self, name):
416 """Return a filtered version of a repository"""
416 """Return a filtered version of a repository"""
417 # build a new class with the mixin and the current class
417 # build a new class with the mixin and the current class
418 # (possibly subclass of the repo)
418 # (possibly subclass of the repo)
419 class proxycls(repoview.repoview, self.unfiltered().__class__):
419 class proxycls(repoview.repoview, self.unfiltered().__class__):
420 pass
420 pass
421 return proxycls(self, name)
421 return proxycls(self, name)
422
422
423 @repofilecache('bookmarks')
423 @repofilecache('bookmarks')
424 def _bookmarks(self):
424 def _bookmarks(self):
425 return bookmarks.bmstore(self)
425 return bookmarks.bmstore(self)
426
426
427 @repofilecache('bookmarks.current')
427 @repofilecache('bookmarks.current')
428 def _activebookmark(self):
428 def _activebookmark(self):
429 return bookmarks.readactive(self)
429 return bookmarks.readactive(self)
430
430
431 def bookmarkheads(self, bookmark):
431 def bookmarkheads(self, bookmark):
432 name = bookmark.split('@', 1)[0]
432 name = bookmark.split('@', 1)[0]
433 heads = []
433 heads = []
434 for mark, n in self._bookmarks.iteritems():
434 for mark, n in self._bookmarks.iteritems():
435 if mark.split('@', 1)[0] == name:
435 if mark.split('@', 1)[0] == name:
436 heads.append(n)
436 heads.append(n)
437 return heads
437 return heads
438
438
439 # _phaserevs and _phasesets depend on changelog. what we need is to
439 # _phaserevs and _phasesets depend on changelog. what we need is to
440 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
440 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
441 # can't be easily expressed in filecache mechanism.
441 # can't be easily expressed in filecache mechanism.
442 @storecache('phaseroots', '00changelog.i')
442 @storecache('phaseroots', '00changelog.i')
443 def _phasecache(self):
443 def _phasecache(self):
444 return phases.phasecache(self, self._phasedefaults)
444 return phases.phasecache(self, self._phasedefaults)
445
445
446 @storecache('obsstore')
446 @storecache('obsstore')
447 def obsstore(self):
447 def obsstore(self):
448 # read default format for new obsstore.
448 # read default format for new obsstore.
449 # developer config: format.obsstore-version
449 # developer config: format.obsstore-version
450 defaultformat = self.ui.configint('format', 'obsstore-version', None)
450 defaultformat = self.ui.configint('format', 'obsstore-version', None)
451 # rely on obsstore class default when possible.
451 # rely on obsstore class default when possible.
452 kwargs = {}
452 kwargs = {}
453 if defaultformat is not None:
453 if defaultformat is not None:
454 kwargs['defaultformat'] = defaultformat
454 kwargs['defaultformat'] = defaultformat
455 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
455 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
456 store = obsolete.obsstore(self.svfs, readonly=readonly,
456 store = obsolete.obsstore(self.svfs, readonly=readonly,
457 **kwargs)
457 **kwargs)
458 if store and readonly:
458 if store and readonly:
459 self.ui.warn(
459 self.ui.warn(
460 _('obsolete feature not enabled but %i markers found!\n')
460 _('obsolete feature not enabled but %i markers found!\n')
461 % len(list(store)))
461 % len(list(store)))
462 return store
462 return store
463
463
464 @storecache('00changelog.i')
464 @storecache('00changelog.i')
465 def changelog(self):
465 def changelog(self):
466 c = changelog.changelog(self.svfs)
466 c = changelog.changelog(self.svfs)
467 if 'HG_PENDING' in os.environ:
467 if 'HG_PENDING' in os.environ:
468 p = os.environ['HG_PENDING']
468 p = os.environ['HG_PENDING']
469 if p.startswith(self.root):
469 if p.startswith(self.root):
470 c.readpending('00changelog.i.a')
470 c.readpending('00changelog.i.a')
471 return c
471 return c
472
472
473 @storecache('00manifest.i')
473 @storecache('00manifest.i')
474 def manifest(self):
474 def manifest(self):
475 return manifest.manifest(self.svfs)
475 return manifest.manifest(self.svfs)
476
476
477 def dirlog(self, dir):
477 def dirlog(self, dir):
478 return self.manifest.dirlog(dir)
478 return self.manifest.dirlog(dir)
479
479
480 @repofilecache('dirstate')
480 @repofilecache('dirstate')
481 def dirstate(self):
481 def dirstate(self):
482 return dirstate.dirstate(self.vfs, self.ui, self.root,
482 return dirstate.dirstate(self.vfs, self.ui, self.root,
483 self._dirstatevalidate)
483 self._dirstatevalidate)
484
484
485 def _dirstatevalidate(self, node):
485 def _dirstatevalidate(self, node):
486 try:
486 try:
487 self.changelog.rev(node)
487 self.changelog.rev(node)
488 return node
488 return node
489 except error.LookupError:
489 except error.LookupError:
490 if not self._dirstatevalidatewarned:
490 if not self._dirstatevalidatewarned:
491 self._dirstatevalidatewarned = True
491 self._dirstatevalidatewarned = True
492 self.ui.warn(_("warning: ignoring unknown"
492 self.ui.warn(_("warning: ignoring unknown"
493 " working parent %s!\n") % short(node))
493 " working parent %s!\n") % short(node))
494 return nullid
494 return nullid
495
495
496 def __getitem__(self, changeid):
496 def __getitem__(self, changeid):
497 if changeid is None or changeid == wdirrev:
497 if changeid is None or changeid == wdirrev:
498 return context.workingctx(self)
498 return context.workingctx(self)
499 if isinstance(changeid, slice):
499 if isinstance(changeid, slice):
500 return [context.changectx(self, i)
500 return [context.changectx(self, i)
501 for i in xrange(*changeid.indices(len(self)))
501 for i in xrange(*changeid.indices(len(self)))
502 if i not in self.changelog.filteredrevs]
502 if i not in self.changelog.filteredrevs]
503 return context.changectx(self, changeid)
503 return context.changectx(self, changeid)
504
504
505 def __contains__(self, changeid):
505 def __contains__(self, changeid):
506 try:
506 try:
507 self[changeid]
507 self[changeid]
508 return True
508 return True
509 except error.RepoLookupError:
509 except error.RepoLookupError:
510 return False
510 return False
511
511
512 def __nonzero__(self):
512 def __nonzero__(self):
513 return True
513 return True
514
514
515 def __len__(self):
515 def __len__(self):
516 return len(self.changelog)
516 return len(self.changelog)
517
517
518 def __iter__(self):
518 def __iter__(self):
519 return iter(self.changelog)
519 return iter(self.changelog)
520
520
521 def revs(self, expr, *args):
521 def revs(self, expr, *args):
522 '''Find revisions matching a revset.
522 '''Find revisions matching a revset.
523
523
524 The revset is specified as a string ``expr`` that may contain
524 The revset is specified as a string ``expr`` that may contain
525 %-formatting to escape certain types. See ``revset.formatspec``.
525 %-formatting to escape certain types. See ``revset.formatspec``.
526
526
527 Return a revset.abstractsmartset, which is a list-like interface
527 Return a revset.abstractsmartset, which is a list-like interface
528 that contains integer revisions.
528 that contains integer revisions.
529 '''
529 '''
530 expr = revset.formatspec(expr, *args)
530 expr = revset.formatspec(expr, *args)
531 m = revset.match(None, expr)
531 m = revset.match(None, expr)
532 return m(self)
532 return m(self)
533
533
534 def set(self, expr, *args):
534 def set(self, expr, *args):
535 '''Find revisions matching a revset and emit changectx instances.
535 '''Find revisions matching a revset and emit changectx instances.
536
536
537 This is a convenience wrapper around ``revs()`` that iterates the
537 This is a convenience wrapper around ``revs()`` that iterates the
538 result and is a generator of changectx instances.
538 result and is a generator of changectx instances.
539 '''
539 '''
540 for r in self.revs(expr, *args):
540 for r in self.revs(expr, *args):
541 yield self[r]
541 yield self[r]
542
542
543 def url(self):
543 def url(self):
544 return 'file:' + self.root
544 return 'file:' + self.root
545
545
546 def hook(self, name, throw=False, **args):
546 def hook(self, name, throw=False, **args):
547 """Call a hook, passing this repo instance.
547 """Call a hook, passing this repo instance.
548
548
549 This a convenience method to aid invoking hooks. Extensions likely
549 This a convenience method to aid invoking hooks. Extensions likely
550 won't call this unless they have registered a custom hook or are
550 won't call this unless they have registered a custom hook or are
551 replacing code that is expected to call a hook.
551 replacing code that is expected to call a hook.
552 """
552 """
553 return hook.hook(self.ui, self, name, throw, **args)
553 return hook.hook(self.ui, self, name, throw, **args)
554
554
555 @unfilteredmethod
555 @unfilteredmethod
556 def _tag(self, names, node, message, local, user, date, extra=None,
556 def _tag(self, names, node, message, local, user, date, extra=None,
557 editor=False):
557 editor=False):
558 if isinstance(names, str):
558 if isinstance(names, str):
559 names = (names,)
559 names = (names,)
560
560
561 branches = self.branchmap()
561 branches = self.branchmap()
562 for name in names:
562 for name in names:
563 self.hook('pretag', throw=True, node=hex(node), tag=name,
563 self.hook('pretag', throw=True, node=hex(node), tag=name,
564 local=local)
564 local=local)
565 if name in branches:
565 if name in branches:
566 self.ui.warn(_("warning: tag %s conflicts with existing"
566 self.ui.warn(_("warning: tag %s conflicts with existing"
567 " branch name\n") % name)
567 " branch name\n") % name)
568
568
569 def writetags(fp, names, munge, prevtags):
569 def writetags(fp, names, munge, prevtags):
570 fp.seek(0, 2)
570 fp.seek(0, 2)
571 if prevtags and prevtags[-1] != '\n':
571 if prevtags and prevtags[-1] != '\n':
572 fp.write('\n')
572 fp.write('\n')
573 for name in names:
573 for name in names:
574 if munge:
574 if munge:
575 m = munge(name)
575 m = munge(name)
576 else:
576 else:
577 m = name
577 m = name
578
578
579 if (self._tagscache.tagtypes and
579 if (self._tagscache.tagtypes and
580 name in self._tagscache.tagtypes):
580 name in self._tagscache.tagtypes):
581 old = self.tags().get(name, nullid)
581 old = self.tags().get(name, nullid)
582 fp.write('%s %s\n' % (hex(old), m))
582 fp.write('%s %s\n' % (hex(old), m))
583 fp.write('%s %s\n' % (hex(node), m))
583 fp.write('%s %s\n' % (hex(node), m))
584 fp.close()
584 fp.close()
585
585
586 prevtags = ''
586 prevtags = ''
587 if local:
587 if local:
588 try:
588 try:
589 fp = self.vfs('localtags', 'r+')
589 fp = self.vfs('localtags', 'r+')
590 except IOError:
590 except IOError:
591 fp = self.vfs('localtags', 'a')
591 fp = self.vfs('localtags', 'a')
592 else:
592 else:
593 prevtags = fp.read()
593 prevtags = fp.read()
594
594
595 # local tags are stored in the current charset
595 # local tags are stored in the current charset
596 writetags(fp, names, None, prevtags)
596 writetags(fp, names, None, prevtags)
597 for name in names:
597 for name in names:
598 self.hook('tag', node=hex(node), tag=name, local=local)
598 self.hook('tag', node=hex(node), tag=name, local=local)
599 return
599 return
600
600
601 try:
601 try:
602 fp = self.wfile('.hgtags', 'rb+')
602 fp = self.wfile('.hgtags', 'rb+')
603 except IOError as e:
603 except IOError as e:
604 if e.errno != errno.ENOENT:
604 if e.errno != errno.ENOENT:
605 raise
605 raise
606 fp = self.wfile('.hgtags', 'ab')
606 fp = self.wfile('.hgtags', 'ab')
607 else:
607 else:
608 prevtags = fp.read()
608 prevtags = fp.read()
609
609
610 # committed tags are stored in UTF-8
610 # committed tags are stored in UTF-8
611 writetags(fp, names, encoding.fromlocal, prevtags)
611 writetags(fp, names, encoding.fromlocal, prevtags)
612
612
613 fp.close()
613 fp.close()
614
614
615 self.invalidatecaches()
615 self.invalidatecaches()
616
616
617 if '.hgtags' not in self.dirstate:
617 if '.hgtags' not in self.dirstate:
618 self[None].add(['.hgtags'])
618 self[None].add(['.hgtags'])
619
619
620 m = matchmod.exact(self.root, '', ['.hgtags'])
620 m = matchmod.exact(self.root, '', ['.hgtags'])
621 tagnode = self.commit(message, user, date, extra=extra, match=m,
621 tagnode = self.commit(message, user, date, extra=extra, match=m,
622 editor=editor)
622 editor=editor)
623
623
624 for name in names:
624 for name in names:
625 self.hook('tag', node=hex(node), tag=name, local=local)
625 self.hook('tag', node=hex(node), tag=name, local=local)
626
626
627 return tagnode
627 return tagnode
628
628
629 def tag(self, names, node, message, local, user, date, editor=False):
629 def tag(self, names, node, message, local, user, date, editor=False):
630 '''tag a revision with one or more symbolic names.
630 '''tag a revision with one or more symbolic names.
631
631
632 names is a list of strings or, when adding a single tag, names may be a
632 names is a list of strings or, when adding a single tag, names may be a
633 string.
633 string.
634
634
635 if local is True, the tags are stored in a per-repository file.
635 if local is True, the tags are stored in a per-repository file.
636 otherwise, they are stored in the .hgtags file, and a new
636 otherwise, they are stored in the .hgtags file, and a new
637 changeset is committed with the change.
637 changeset is committed with the change.
638
638
639 keyword arguments:
639 keyword arguments:
640
640
641 local: whether to store tags in non-version-controlled file
641 local: whether to store tags in non-version-controlled file
642 (default False)
642 (default False)
643
643
644 message: commit message to use if committing
644 message: commit message to use if committing
645
645
646 user: name of user to use if committing
646 user: name of user to use if committing
647
647
648 date: date tuple to use if committing'''
648 date: date tuple to use if committing'''
649
649
650 if not local:
650 if not local:
651 m = matchmod.exact(self.root, '', ['.hgtags'])
651 m = matchmod.exact(self.root, '', ['.hgtags'])
652 if any(self.status(match=m, unknown=True, ignored=True)):
652 if any(self.status(match=m, unknown=True, ignored=True)):
653 raise error.Abort(_('working copy of .hgtags is changed'),
653 raise error.Abort(_('working copy of .hgtags is changed'),
654 hint=_('please commit .hgtags manually'))
654 hint=_('please commit .hgtags manually'))
655
655
656 self.tags() # instantiate the cache
656 self.tags() # instantiate the cache
657 self._tag(names, node, message, local, user, date, editor=editor)
657 self._tag(names, node, message, local, user, date, editor=editor)
658
658
659 @filteredpropertycache
659 @filteredpropertycache
660 def _tagscache(self):
660 def _tagscache(self):
661 '''Returns a tagscache object that contains various tags related
661 '''Returns a tagscache object that contains various tags related
662 caches.'''
662 caches.'''
663
663
664 # This simplifies its cache management by having one decorated
664 # This simplifies its cache management by having one decorated
665 # function (this one) and the rest simply fetch things from it.
665 # function (this one) and the rest simply fetch things from it.
666 class tagscache(object):
666 class tagscache(object):
667 def __init__(self):
667 def __init__(self):
668 # These two define the set of tags for this repository. tags
668 # These two define the set of tags for this repository. tags
669 # maps tag name to node; tagtypes maps tag name to 'global' or
669 # maps tag name to node; tagtypes maps tag name to 'global' or
670 # 'local'. (Global tags are defined by .hgtags across all
670 # 'local'. (Global tags are defined by .hgtags across all
671 # heads, and local tags are defined in .hg/localtags.)
671 # heads, and local tags are defined in .hg/localtags.)
672 # They constitute the in-memory cache of tags.
672 # They constitute the in-memory cache of tags.
673 self.tags = self.tagtypes = None
673 self.tags = self.tagtypes = None
674
674
675 self.nodetagscache = self.tagslist = None
675 self.nodetagscache = self.tagslist = None
676
676
677 cache = tagscache()
677 cache = tagscache()
678 cache.tags, cache.tagtypes = self._findtags()
678 cache.tags, cache.tagtypes = self._findtags()
679
679
680 return cache
680 return cache
681
681
682 def tags(self):
682 def tags(self):
683 '''return a mapping of tag to node'''
683 '''return a mapping of tag to node'''
684 t = {}
684 t = {}
685 if self.changelog.filteredrevs:
685 if self.changelog.filteredrevs:
686 tags, tt = self._findtags()
686 tags, tt = self._findtags()
687 else:
687 else:
688 tags = self._tagscache.tags
688 tags = self._tagscache.tags
689 for k, v in tags.iteritems():
689 for k, v in tags.iteritems():
690 try:
690 try:
691 # ignore tags to unknown nodes
691 # ignore tags to unknown nodes
692 self.changelog.rev(v)
692 self.changelog.rev(v)
693 t[k] = v
693 t[k] = v
694 except (error.LookupError, ValueError):
694 except (error.LookupError, ValueError):
695 pass
695 pass
696 return t
696 return t
697
697
698 def _findtags(self):
698 def _findtags(self):
699 '''Do the hard work of finding tags. Return a pair of dicts
699 '''Do the hard work of finding tags. Return a pair of dicts
700 (tags, tagtypes) where tags maps tag name to node, and tagtypes
700 (tags, tagtypes) where tags maps tag name to node, and tagtypes
701 maps tag name to a string like \'global\' or \'local\'.
701 maps tag name to a string like \'global\' or \'local\'.
702 Subclasses or extensions are free to add their own tags, but
702 Subclasses or extensions are free to add their own tags, but
703 should be aware that the returned dicts will be retained for the
703 should be aware that the returned dicts will be retained for the
704 duration of the localrepo object.'''
704 duration of the localrepo object.'''
705
705
706 # XXX what tagtype should subclasses/extensions use? Currently
706 # XXX what tagtype should subclasses/extensions use? Currently
707 # mq and bookmarks add tags, but do not set the tagtype at all.
707 # mq and bookmarks add tags, but do not set the tagtype at all.
708 # Should each extension invent its own tag type? Should there
708 # Should each extension invent its own tag type? Should there
709 # be one tagtype for all such "virtual" tags? Or is the status
709 # be one tagtype for all such "virtual" tags? Or is the status
710 # quo fine?
710 # quo fine?
711
711
712 alltags = {} # map tag name to (node, hist)
712 alltags = {} # map tag name to (node, hist)
713 tagtypes = {}
713 tagtypes = {}
714
714
715 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
715 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
716 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
716 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
717
717
718 # Build the return dicts. Have to re-encode tag names because
718 # Build the return dicts. Have to re-encode tag names because
719 # the tags module always uses UTF-8 (in order not to lose info
719 # the tags module always uses UTF-8 (in order not to lose info
720 # writing to the cache), but the rest of Mercurial wants them in
720 # writing to the cache), but the rest of Mercurial wants them in
721 # local encoding.
721 # local encoding.
722 tags = {}
722 tags = {}
723 for (name, (node, hist)) in alltags.iteritems():
723 for (name, (node, hist)) in alltags.iteritems():
724 if node != nullid:
724 if node != nullid:
725 tags[encoding.tolocal(name)] = node
725 tags[encoding.tolocal(name)] = node
726 tags['tip'] = self.changelog.tip()
726 tags['tip'] = self.changelog.tip()
727 tagtypes = dict([(encoding.tolocal(name), value)
727 tagtypes = dict([(encoding.tolocal(name), value)
728 for (name, value) in tagtypes.iteritems()])
728 for (name, value) in tagtypes.iteritems()])
729 return (tags, tagtypes)
729 return (tags, tagtypes)
730
730
731 def tagtype(self, tagname):
731 def tagtype(self, tagname):
732 '''
732 '''
733 return the type of the given tag. result can be:
733 return the type of the given tag. result can be:
734
734
735 'local' : a local tag
735 'local' : a local tag
736 'global' : a global tag
736 'global' : a global tag
737 None : tag does not exist
737 None : tag does not exist
738 '''
738 '''
739
739
740 return self._tagscache.tagtypes.get(tagname)
740 return self._tagscache.tagtypes.get(tagname)
741
741
742 def tagslist(self):
742 def tagslist(self):
743 '''return a list of tags ordered by revision'''
743 '''return a list of tags ordered by revision'''
744 if not self._tagscache.tagslist:
744 if not self._tagscache.tagslist:
745 l = []
745 l = []
746 for t, n in self.tags().iteritems():
746 for t, n in self.tags().iteritems():
747 l.append((self.changelog.rev(n), t, n))
747 l.append((self.changelog.rev(n), t, n))
748 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
748 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
749
749
750 return self._tagscache.tagslist
750 return self._tagscache.tagslist
751
751
752 def nodetags(self, node):
752 def nodetags(self, node):
753 '''return the tags associated with a node'''
753 '''return the tags associated with a node'''
754 if not self._tagscache.nodetagscache:
754 if not self._tagscache.nodetagscache:
755 nodetagscache = {}
755 nodetagscache = {}
756 for t, n in self._tagscache.tags.iteritems():
756 for t, n in self._tagscache.tags.iteritems():
757 nodetagscache.setdefault(n, []).append(t)
757 nodetagscache.setdefault(n, []).append(t)
758 for tags in nodetagscache.itervalues():
758 for tags in nodetagscache.itervalues():
759 tags.sort()
759 tags.sort()
760 self._tagscache.nodetagscache = nodetagscache
760 self._tagscache.nodetagscache = nodetagscache
761 return self._tagscache.nodetagscache.get(node, [])
761 return self._tagscache.nodetagscache.get(node, [])
762
762
763 def nodebookmarks(self, node):
763 def nodebookmarks(self, node):
764 """return the list of bookmarks pointing to the specified node"""
764 """return the list of bookmarks pointing to the specified node"""
765 marks = []
765 marks = []
766 for bookmark, n in self._bookmarks.iteritems():
766 for bookmark, n in self._bookmarks.iteritems():
767 if n == node:
767 if n == node:
768 marks.append(bookmark)
768 marks.append(bookmark)
769 return sorted(marks)
769 return sorted(marks)
770
770
771 def branchmap(self):
771 def branchmap(self):
772 '''returns a dictionary {branch: [branchheads]} with branchheads
772 '''returns a dictionary {branch: [branchheads]} with branchheads
773 ordered by increasing revision number'''
773 ordered by increasing revision number'''
774 branchmap.updatecache(self)
774 branchmap.updatecache(self)
775 return self._branchcaches[self.filtername]
775 return self._branchcaches[self.filtername]
776
776
777 @unfilteredmethod
777 @unfilteredmethod
778 def revbranchcache(self):
778 def revbranchcache(self):
779 if not self._revbranchcache:
779 if not self._revbranchcache:
780 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
780 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
781 return self._revbranchcache
781 return self._revbranchcache
782
782
783 def branchtip(self, branch, ignoremissing=False):
783 def branchtip(self, branch, ignoremissing=False):
784 '''return the tip node for a given branch
784 '''return the tip node for a given branch
785
785
786 If ignoremissing is True, then this method will not raise an error.
786 If ignoremissing is True, then this method will not raise an error.
787 This is helpful for callers that only expect None for a missing branch
787 This is helpful for callers that only expect None for a missing branch
788 (e.g. namespace).
788 (e.g. namespace).
789
789
790 '''
790 '''
791 try:
791 try:
792 return self.branchmap().branchtip(branch)
792 return self.branchmap().branchtip(branch)
793 except KeyError:
793 except KeyError:
794 if not ignoremissing:
794 if not ignoremissing:
795 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
795 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
796 else:
796 else:
797 pass
797 pass
798
798
799 def lookup(self, key):
799 def lookup(self, key):
800 return self[key].node()
800 return self[key].node()
801
801
802 def lookupbranch(self, key, remote=None):
802 def lookupbranch(self, key, remote=None):
803 repo = remote or self
803 repo = remote or self
804 if key in repo.branchmap():
804 if key in repo.branchmap():
805 return key
805 return key
806
806
807 repo = (remote and remote.local()) and remote or self
807 repo = (remote and remote.local()) and remote or self
808 return repo[key].branch()
808 return repo[key].branch()
809
809
810 def known(self, nodes):
810 def known(self, nodes):
811 nm = self.changelog.nodemap
811 cl = self.changelog
812 pc = self._phasecache
812 nm = cl.nodemap
813 filtered = cl.filteredrevs
813 result = []
814 result = []
814 for n in nodes:
815 for n in nodes:
815 r = nm.get(n)
816 r = nm.get(n)
816 resp = not (r is None or pc.phase(self, r) >= phases.secret)
817 resp = not (r is None or r in filtered)
817 result.append(resp)
818 result.append(resp)
818 return result
819 return result
819
820
820 def local(self):
821 def local(self):
821 return self
822 return self
822
823
823 def publishing(self):
824 def publishing(self):
824 # it's safe (and desirable) to trust the publish flag unconditionally
825 # it's safe (and desirable) to trust the publish flag unconditionally
825 # so that we don't finalize changes shared between users via ssh or nfs
826 # so that we don't finalize changes shared between users via ssh or nfs
826 return self.ui.configbool('phases', 'publish', True, untrusted=True)
827 return self.ui.configbool('phases', 'publish', True, untrusted=True)
827
828
828 def cancopy(self):
829 def cancopy(self):
829 # so statichttprepo's override of local() works
830 # so statichttprepo's override of local() works
830 if not self.local():
831 if not self.local():
831 return False
832 return False
832 if not self.publishing():
833 if not self.publishing():
833 return True
834 return True
834 # if publishing we can't copy if there is filtered content
835 # if publishing we can't copy if there is filtered content
835 return not self.filtered('visible').changelog.filteredrevs
836 return not self.filtered('visible').changelog.filteredrevs
836
837
837 def shared(self):
838 def shared(self):
838 '''the type of shared repository (None if not shared)'''
839 '''the type of shared repository (None if not shared)'''
839 if self.sharedpath != self.path:
840 if self.sharedpath != self.path:
840 return 'store'
841 return 'store'
841 return None
842 return None
842
843
843 def join(self, f, *insidef):
844 def join(self, f, *insidef):
844 return self.vfs.join(os.path.join(f, *insidef))
845 return self.vfs.join(os.path.join(f, *insidef))
845
846
846 def wjoin(self, f, *insidef):
847 def wjoin(self, f, *insidef):
847 return self.vfs.reljoin(self.root, f, *insidef)
848 return self.vfs.reljoin(self.root, f, *insidef)
848
849
849 def file(self, f):
850 def file(self, f):
850 if f[0] == '/':
851 if f[0] == '/':
851 f = f[1:]
852 f = f[1:]
852 return filelog.filelog(self.svfs, f)
853 return filelog.filelog(self.svfs, f)
853
854
854 def parents(self, changeid=None):
855 def parents(self, changeid=None):
855 '''get list of changectxs for parents of changeid'''
856 '''get list of changectxs for parents of changeid'''
856 msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid
857 msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid
857 self.ui.deprecwarn(msg, '3.7')
858 self.ui.deprecwarn(msg, '3.7')
858 return self[changeid].parents()
859 return self[changeid].parents()
859
860
860 def changectx(self, changeid):
861 def changectx(self, changeid):
861 return self[changeid]
862 return self[changeid]
862
863
863 def setparents(self, p1, p2=nullid):
864 def setparents(self, p1, p2=nullid):
864 self.dirstate.beginparentchange()
865 self.dirstate.beginparentchange()
865 copies = self.dirstate.setparents(p1, p2)
866 copies = self.dirstate.setparents(p1, p2)
866 pctx = self[p1]
867 pctx = self[p1]
867 if copies:
868 if copies:
868 # Adjust copy records, the dirstate cannot do it, it
869 # Adjust copy records, the dirstate cannot do it, it
869 # requires access to parents manifests. Preserve them
870 # requires access to parents manifests. Preserve them
870 # only for entries added to first parent.
871 # only for entries added to first parent.
871 for f in copies:
872 for f in copies:
872 if f not in pctx and copies[f] in pctx:
873 if f not in pctx and copies[f] in pctx:
873 self.dirstate.copy(copies[f], f)
874 self.dirstate.copy(copies[f], f)
874 if p2 == nullid:
875 if p2 == nullid:
875 for f, s in sorted(self.dirstate.copies().items()):
876 for f, s in sorted(self.dirstate.copies().items()):
876 if f not in pctx and s not in pctx:
877 if f not in pctx and s not in pctx:
877 self.dirstate.copy(None, f)
878 self.dirstate.copy(None, f)
878 self.dirstate.endparentchange()
879 self.dirstate.endparentchange()
879
880
880 def filectx(self, path, changeid=None, fileid=None):
881 def filectx(self, path, changeid=None, fileid=None):
881 """changeid can be a changeset revision, node, or tag.
882 """changeid can be a changeset revision, node, or tag.
882 fileid can be a file revision or node."""
883 fileid can be a file revision or node."""
883 return context.filectx(self, path, changeid, fileid)
884 return context.filectx(self, path, changeid, fileid)
884
885
885 def getcwd(self):
886 def getcwd(self):
886 return self.dirstate.getcwd()
887 return self.dirstate.getcwd()
887
888
888 def pathto(self, f, cwd=None):
889 def pathto(self, f, cwd=None):
889 return self.dirstate.pathto(f, cwd)
890 return self.dirstate.pathto(f, cwd)
890
891
891 def wfile(self, f, mode='r'):
892 def wfile(self, f, mode='r'):
892 return self.wvfs(f, mode)
893 return self.wvfs(f, mode)
893
894
894 def _link(self, f):
895 def _link(self, f):
895 return self.wvfs.islink(f)
896 return self.wvfs.islink(f)
896
897
897 def _loadfilter(self, filter):
898 def _loadfilter(self, filter):
898 if filter not in self.filterpats:
899 if filter not in self.filterpats:
899 l = []
900 l = []
900 for pat, cmd in self.ui.configitems(filter):
901 for pat, cmd in self.ui.configitems(filter):
901 if cmd == '!':
902 if cmd == '!':
902 continue
903 continue
903 mf = matchmod.match(self.root, '', [pat])
904 mf = matchmod.match(self.root, '', [pat])
904 fn = None
905 fn = None
905 params = cmd
906 params = cmd
906 for name, filterfn in self._datafilters.iteritems():
907 for name, filterfn in self._datafilters.iteritems():
907 if cmd.startswith(name):
908 if cmd.startswith(name):
908 fn = filterfn
909 fn = filterfn
909 params = cmd[len(name):].lstrip()
910 params = cmd[len(name):].lstrip()
910 break
911 break
911 if not fn:
912 if not fn:
912 fn = lambda s, c, **kwargs: util.filter(s, c)
913 fn = lambda s, c, **kwargs: util.filter(s, c)
913 # Wrap old filters not supporting keyword arguments
914 # Wrap old filters not supporting keyword arguments
914 if not inspect.getargspec(fn)[2]:
915 if not inspect.getargspec(fn)[2]:
915 oldfn = fn
916 oldfn = fn
916 fn = lambda s, c, **kwargs: oldfn(s, c)
917 fn = lambda s, c, **kwargs: oldfn(s, c)
917 l.append((mf, fn, params))
918 l.append((mf, fn, params))
918 self.filterpats[filter] = l
919 self.filterpats[filter] = l
919 return self.filterpats[filter]
920 return self.filterpats[filter]
920
921
921 def _filter(self, filterpats, filename, data):
922 def _filter(self, filterpats, filename, data):
922 for mf, fn, cmd in filterpats:
923 for mf, fn, cmd in filterpats:
923 if mf(filename):
924 if mf(filename):
924 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
925 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
925 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
926 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
926 break
927 break
927
928
928 return data
929 return data
929
930
930 @unfilteredpropertycache
931 @unfilteredpropertycache
931 def _encodefilterpats(self):
932 def _encodefilterpats(self):
932 return self._loadfilter('encode')
933 return self._loadfilter('encode')
933
934
934 @unfilteredpropertycache
935 @unfilteredpropertycache
935 def _decodefilterpats(self):
936 def _decodefilterpats(self):
936 return self._loadfilter('decode')
937 return self._loadfilter('decode')
937
938
938 def adddatafilter(self, name, filter):
939 def adddatafilter(self, name, filter):
939 self._datafilters[name] = filter
940 self._datafilters[name] = filter
940
941
941 def wread(self, filename):
942 def wread(self, filename):
942 if self._link(filename):
943 if self._link(filename):
943 data = self.wvfs.readlink(filename)
944 data = self.wvfs.readlink(filename)
944 else:
945 else:
945 data = self.wvfs.read(filename)
946 data = self.wvfs.read(filename)
946 return self._filter(self._encodefilterpats, filename, data)
947 return self._filter(self._encodefilterpats, filename, data)
947
948
948 def wwrite(self, filename, data, flags):
949 def wwrite(self, filename, data, flags):
949 """write ``data`` into ``filename`` in the working directory
950 """write ``data`` into ``filename`` in the working directory
950
951
951 This returns length of written (maybe decoded) data.
952 This returns length of written (maybe decoded) data.
952 """
953 """
953 data = self._filter(self._decodefilterpats, filename, data)
954 data = self._filter(self._decodefilterpats, filename, data)
954 if 'l' in flags:
955 if 'l' in flags:
955 self.wvfs.symlink(data, filename)
956 self.wvfs.symlink(data, filename)
956 else:
957 else:
957 self.wvfs.write(filename, data)
958 self.wvfs.write(filename, data)
958 if 'x' in flags:
959 if 'x' in flags:
959 self.wvfs.setflags(filename, False, True)
960 self.wvfs.setflags(filename, False, True)
960 return len(data)
961 return len(data)
961
962
962 def wwritedata(self, filename, data):
963 def wwritedata(self, filename, data):
963 return self._filter(self._decodefilterpats, filename, data)
964 return self._filter(self._decodefilterpats, filename, data)
964
965
965 def currenttransaction(self):
966 def currenttransaction(self):
966 """return the current transaction or None if non exists"""
967 """return the current transaction or None if non exists"""
967 if self._transref:
968 if self._transref:
968 tr = self._transref()
969 tr = self._transref()
969 else:
970 else:
970 tr = None
971 tr = None
971
972
972 if tr and tr.running():
973 if tr and tr.running():
973 return tr
974 return tr
974 return None
975 return None
975
976
976 def transaction(self, desc, report=None):
977 def transaction(self, desc, report=None):
977 if (self.ui.configbool('devel', 'all-warnings')
978 if (self.ui.configbool('devel', 'all-warnings')
978 or self.ui.configbool('devel', 'check-locks')):
979 or self.ui.configbool('devel', 'check-locks')):
979 l = self._lockref and self._lockref()
980 l = self._lockref and self._lockref()
980 if l is None or not l.held:
981 if l is None or not l.held:
981 self.ui.develwarn('transaction with no lock')
982 self.ui.develwarn('transaction with no lock')
982 tr = self.currenttransaction()
983 tr = self.currenttransaction()
983 if tr is not None:
984 if tr is not None:
984 return tr.nest()
985 return tr.nest()
985
986
986 # abort here if the journal already exists
987 # abort here if the journal already exists
987 if self.svfs.exists("journal"):
988 if self.svfs.exists("journal"):
988 raise error.RepoError(
989 raise error.RepoError(
989 _("abandoned transaction found"),
990 _("abandoned transaction found"),
990 hint=_("run 'hg recover' to clean up transaction"))
991 hint=_("run 'hg recover' to clean up transaction"))
991
992
992 # make journal.dirstate contain in-memory changes at this point
993 # make journal.dirstate contain in-memory changes at this point
993 self.dirstate.write(None)
994 self.dirstate.write(None)
994
995
995 idbase = "%.40f#%f" % (random.random(), time.time())
996 idbase = "%.40f#%f" % (random.random(), time.time())
996 txnid = 'TXN:' + util.sha1(idbase).hexdigest()
997 txnid = 'TXN:' + util.sha1(idbase).hexdigest()
997 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
998 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
998
999
999 self._writejournal(desc)
1000 self._writejournal(desc)
1000 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1001 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1001 if report:
1002 if report:
1002 rp = report
1003 rp = report
1003 else:
1004 else:
1004 rp = self.ui.warn
1005 rp = self.ui.warn
1005 vfsmap = {'plain': self.vfs} # root of .hg/
1006 vfsmap = {'plain': self.vfs} # root of .hg/
1006 # we must avoid cyclic reference between repo and transaction.
1007 # we must avoid cyclic reference between repo and transaction.
1007 reporef = weakref.ref(self)
1008 reporef = weakref.ref(self)
1008 def validate(tr):
1009 def validate(tr):
1009 """will run pre-closing hooks"""
1010 """will run pre-closing hooks"""
1010 reporef().hook('pretxnclose', throw=True,
1011 reporef().hook('pretxnclose', throw=True,
1011 txnname=desc, **tr.hookargs)
1012 txnname=desc, **tr.hookargs)
1012 def releasefn(tr, success):
1013 def releasefn(tr, success):
1013 repo = reporef()
1014 repo = reporef()
1014 if success:
1015 if success:
1015 # this should be explicitly invoked here, because
1016 # this should be explicitly invoked here, because
1016 # in-memory changes aren't written out at closing
1017 # in-memory changes aren't written out at closing
1017 # transaction, if tr.addfilegenerator (via
1018 # transaction, if tr.addfilegenerator (via
1018 # dirstate.write or so) isn't invoked while
1019 # dirstate.write or so) isn't invoked while
1019 # transaction running
1020 # transaction running
1020 repo.dirstate.write(None)
1021 repo.dirstate.write(None)
1021 else:
1022 else:
1022 # prevent in-memory changes from being written out at
1023 # prevent in-memory changes from being written out at
1023 # the end of outer wlock scope or so
1024 # the end of outer wlock scope or so
1024 repo.dirstate.invalidate()
1025 repo.dirstate.invalidate()
1025
1026
1026 # discard all changes (including ones already written
1027 # discard all changes (including ones already written
1027 # out) in this transaction
1028 # out) in this transaction
1028 repo.vfs.rename('journal.dirstate', 'dirstate')
1029 repo.vfs.rename('journal.dirstate', 'dirstate')
1029
1030
1030 repo.invalidate(clearfilecache=True)
1031 repo.invalidate(clearfilecache=True)
1031
1032
1032 tr = transaction.transaction(rp, self.svfs, vfsmap,
1033 tr = transaction.transaction(rp, self.svfs, vfsmap,
1033 "journal",
1034 "journal",
1034 "undo",
1035 "undo",
1035 aftertrans(renames),
1036 aftertrans(renames),
1036 self.store.createmode,
1037 self.store.createmode,
1037 validator=validate,
1038 validator=validate,
1038 releasefn=releasefn)
1039 releasefn=releasefn)
1039
1040
1040 tr.hookargs['txnid'] = txnid
1041 tr.hookargs['txnid'] = txnid
1041 # note: writing the fncache only during finalize mean that the file is
1042 # note: writing the fncache only during finalize mean that the file is
1042 # outdated when running hooks. As fncache is used for streaming clone,
1043 # outdated when running hooks. As fncache is used for streaming clone,
1043 # this is not expected to break anything that happen during the hooks.
1044 # this is not expected to break anything that happen during the hooks.
1044 tr.addfinalize('flush-fncache', self.store.write)
1045 tr.addfinalize('flush-fncache', self.store.write)
1045 def txnclosehook(tr2):
1046 def txnclosehook(tr2):
1046 """To be run if transaction is successful, will schedule a hook run
1047 """To be run if transaction is successful, will schedule a hook run
1047 """
1048 """
1048 def hook():
1049 def hook():
1049 reporef().hook('txnclose', throw=False, txnname=desc,
1050 reporef().hook('txnclose', throw=False, txnname=desc,
1050 **tr2.hookargs)
1051 **tr2.hookargs)
1051 reporef()._afterlock(hook)
1052 reporef()._afterlock(hook)
1052 tr.addfinalize('txnclose-hook', txnclosehook)
1053 tr.addfinalize('txnclose-hook', txnclosehook)
1053 def txnaborthook(tr2):
1054 def txnaborthook(tr2):
1054 """To be run if transaction is aborted
1055 """To be run if transaction is aborted
1055 """
1056 """
1056 reporef().hook('txnabort', throw=False, txnname=desc,
1057 reporef().hook('txnabort', throw=False, txnname=desc,
1057 **tr2.hookargs)
1058 **tr2.hookargs)
1058 tr.addabort('txnabort-hook', txnaborthook)
1059 tr.addabort('txnabort-hook', txnaborthook)
1059 # avoid eager cache invalidation. in-memory data should be identical
1060 # avoid eager cache invalidation. in-memory data should be identical
1060 # to stored data if transaction has no error.
1061 # to stored data if transaction has no error.
1061 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1062 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1062 self._transref = weakref.ref(tr)
1063 self._transref = weakref.ref(tr)
1063 return tr
1064 return tr
1064
1065
1065 def _journalfiles(self):
1066 def _journalfiles(self):
1066 return ((self.svfs, 'journal'),
1067 return ((self.svfs, 'journal'),
1067 (self.vfs, 'journal.dirstate'),
1068 (self.vfs, 'journal.dirstate'),
1068 (self.vfs, 'journal.branch'),
1069 (self.vfs, 'journal.branch'),
1069 (self.vfs, 'journal.desc'),
1070 (self.vfs, 'journal.desc'),
1070 (self.vfs, 'journal.bookmarks'),
1071 (self.vfs, 'journal.bookmarks'),
1071 (self.svfs, 'journal.phaseroots'))
1072 (self.svfs, 'journal.phaseroots'))
1072
1073
1073 def undofiles(self):
1074 def undofiles(self):
1074 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1075 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1075
1076
1076 def _writejournal(self, desc):
1077 def _writejournal(self, desc):
1077 self.vfs.write("journal.dirstate",
1078 self.vfs.write("journal.dirstate",
1078 self.vfs.tryread("dirstate"))
1079 self.vfs.tryread("dirstate"))
1079 self.vfs.write("journal.branch",
1080 self.vfs.write("journal.branch",
1080 encoding.fromlocal(self.dirstate.branch()))
1081 encoding.fromlocal(self.dirstate.branch()))
1081 self.vfs.write("journal.desc",
1082 self.vfs.write("journal.desc",
1082 "%d\n%s\n" % (len(self), desc))
1083 "%d\n%s\n" % (len(self), desc))
1083 self.vfs.write("journal.bookmarks",
1084 self.vfs.write("journal.bookmarks",
1084 self.vfs.tryread("bookmarks"))
1085 self.vfs.tryread("bookmarks"))
1085 self.svfs.write("journal.phaseroots",
1086 self.svfs.write("journal.phaseroots",
1086 self.svfs.tryread("phaseroots"))
1087 self.svfs.tryread("phaseroots"))
1087
1088
1088 def recover(self):
1089 def recover(self):
1089 lock = self.lock()
1090 lock = self.lock()
1090 try:
1091 try:
1091 if self.svfs.exists("journal"):
1092 if self.svfs.exists("journal"):
1092 self.ui.status(_("rolling back interrupted transaction\n"))
1093 self.ui.status(_("rolling back interrupted transaction\n"))
1093 vfsmap = {'': self.svfs,
1094 vfsmap = {'': self.svfs,
1094 'plain': self.vfs,}
1095 'plain': self.vfs,}
1095 transaction.rollback(self.svfs, vfsmap, "journal",
1096 transaction.rollback(self.svfs, vfsmap, "journal",
1096 self.ui.warn)
1097 self.ui.warn)
1097 self.invalidate()
1098 self.invalidate()
1098 return True
1099 return True
1099 else:
1100 else:
1100 self.ui.warn(_("no interrupted transaction available\n"))
1101 self.ui.warn(_("no interrupted transaction available\n"))
1101 return False
1102 return False
1102 finally:
1103 finally:
1103 lock.release()
1104 lock.release()
1104
1105
1105 def rollback(self, dryrun=False, force=False):
1106 def rollback(self, dryrun=False, force=False):
1106 wlock = lock = dsguard = None
1107 wlock = lock = dsguard = None
1107 try:
1108 try:
1108 wlock = self.wlock()
1109 wlock = self.wlock()
1109 lock = self.lock()
1110 lock = self.lock()
1110 if self.svfs.exists("undo"):
1111 if self.svfs.exists("undo"):
1111 dsguard = cmdutil.dirstateguard(self, 'rollback')
1112 dsguard = cmdutil.dirstateguard(self, 'rollback')
1112
1113
1113 return self._rollback(dryrun, force, dsguard)
1114 return self._rollback(dryrun, force, dsguard)
1114 else:
1115 else:
1115 self.ui.warn(_("no rollback information available\n"))
1116 self.ui.warn(_("no rollback information available\n"))
1116 return 1
1117 return 1
1117 finally:
1118 finally:
1118 release(dsguard, lock, wlock)
1119 release(dsguard, lock, wlock)
1119
1120
1120 @unfilteredmethod # Until we get smarter cache management
1121 @unfilteredmethod # Until we get smarter cache management
1121 def _rollback(self, dryrun, force, dsguard):
1122 def _rollback(self, dryrun, force, dsguard):
1122 ui = self.ui
1123 ui = self.ui
1123 try:
1124 try:
1124 args = self.vfs.read('undo.desc').splitlines()
1125 args = self.vfs.read('undo.desc').splitlines()
1125 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1126 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1126 if len(args) >= 3:
1127 if len(args) >= 3:
1127 detail = args[2]
1128 detail = args[2]
1128 oldtip = oldlen - 1
1129 oldtip = oldlen - 1
1129
1130
1130 if detail and ui.verbose:
1131 if detail and ui.verbose:
1131 msg = (_('repository tip rolled back to revision %s'
1132 msg = (_('repository tip rolled back to revision %s'
1132 ' (undo %s: %s)\n')
1133 ' (undo %s: %s)\n')
1133 % (oldtip, desc, detail))
1134 % (oldtip, desc, detail))
1134 else:
1135 else:
1135 msg = (_('repository tip rolled back to revision %s'
1136 msg = (_('repository tip rolled back to revision %s'
1136 ' (undo %s)\n')
1137 ' (undo %s)\n')
1137 % (oldtip, desc))
1138 % (oldtip, desc))
1138 except IOError:
1139 except IOError:
1139 msg = _('rolling back unknown transaction\n')
1140 msg = _('rolling back unknown transaction\n')
1140 desc = None
1141 desc = None
1141
1142
1142 if not force and self['.'] != self['tip'] and desc == 'commit':
1143 if not force and self['.'] != self['tip'] and desc == 'commit':
1143 raise error.Abort(
1144 raise error.Abort(
1144 _('rollback of last commit while not checked out '
1145 _('rollback of last commit while not checked out '
1145 'may lose data'), hint=_('use -f to force'))
1146 'may lose data'), hint=_('use -f to force'))
1146
1147
1147 ui.status(msg)
1148 ui.status(msg)
1148 if dryrun:
1149 if dryrun:
1149 return 0
1150 return 0
1150
1151
1151 parents = self.dirstate.parents()
1152 parents = self.dirstate.parents()
1152 self.destroying()
1153 self.destroying()
1153 vfsmap = {'plain': self.vfs, '': self.svfs}
1154 vfsmap = {'plain': self.vfs, '': self.svfs}
1154 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1155 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1155 if self.vfs.exists('undo.bookmarks'):
1156 if self.vfs.exists('undo.bookmarks'):
1156 self.vfs.rename('undo.bookmarks', 'bookmarks')
1157 self.vfs.rename('undo.bookmarks', 'bookmarks')
1157 if self.svfs.exists('undo.phaseroots'):
1158 if self.svfs.exists('undo.phaseroots'):
1158 self.svfs.rename('undo.phaseroots', 'phaseroots')
1159 self.svfs.rename('undo.phaseroots', 'phaseroots')
1159 self.invalidate()
1160 self.invalidate()
1160
1161
1161 parentgone = (parents[0] not in self.changelog.nodemap or
1162 parentgone = (parents[0] not in self.changelog.nodemap or
1162 parents[1] not in self.changelog.nodemap)
1163 parents[1] not in self.changelog.nodemap)
1163 if parentgone:
1164 if parentgone:
1164 # prevent dirstateguard from overwriting already restored one
1165 # prevent dirstateguard from overwriting already restored one
1165 dsguard.close()
1166 dsguard.close()
1166
1167
1167 self.vfs.rename('undo.dirstate', 'dirstate')
1168 self.vfs.rename('undo.dirstate', 'dirstate')
1168 try:
1169 try:
1169 branch = self.vfs.read('undo.branch')
1170 branch = self.vfs.read('undo.branch')
1170 self.dirstate.setbranch(encoding.tolocal(branch))
1171 self.dirstate.setbranch(encoding.tolocal(branch))
1171 except IOError:
1172 except IOError:
1172 ui.warn(_('named branch could not be reset: '
1173 ui.warn(_('named branch could not be reset: '
1173 'current branch is still \'%s\'\n')
1174 'current branch is still \'%s\'\n')
1174 % self.dirstate.branch())
1175 % self.dirstate.branch())
1175
1176
1176 self.dirstate.invalidate()
1177 self.dirstate.invalidate()
1177 parents = tuple([p.rev() for p in self[None].parents()])
1178 parents = tuple([p.rev() for p in self[None].parents()])
1178 if len(parents) > 1:
1179 if len(parents) > 1:
1179 ui.status(_('working directory now based on '
1180 ui.status(_('working directory now based on '
1180 'revisions %d and %d\n') % parents)
1181 'revisions %d and %d\n') % parents)
1181 else:
1182 else:
1182 ui.status(_('working directory now based on '
1183 ui.status(_('working directory now based on '
1183 'revision %d\n') % parents)
1184 'revision %d\n') % parents)
1184 mergemod.mergestate.clean(self, self['.'].node())
1185 mergemod.mergestate.clean(self, self['.'].node())
1185
1186
1186 # TODO: if we know which new heads may result from this rollback, pass
1187 # TODO: if we know which new heads may result from this rollback, pass
1187 # them to destroy(), which will prevent the branchhead cache from being
1188 # them to destroy(), which will prevent the branchhead cache from being
1188 # invalidated.
1189 # invalidated.
1189 self.destroyed()
1190 self.destroyed()
1190 return 0
1191 return 0
1191
1192
1192 def invalidatecaches(self):
1193 def invalidatecaches(self):
1193
1194
1194 if '_tagscache' in vars(self):
1195 if '_tagscache' in vars(self):
1195 # can't use delattr on proxy
1196 # can't use delattr on proxy
1196 del self.__dict__['_tagscache']
1197 del self.__dict__['_tagscache']
1197
1198
1198 self.unfiltered()._branchcaches.clear()
1199 self.unfiltered()._branchcaches.clear()
1199 self.invalidatevolatilesets()
1200 self.invalidatevolatilesets()
1200
1201
1201 def invalidatevolatilesets(self):
1202 def invalidatevolatilesets(self):
1202 self.filteredrevcache.clear()
1203 self.filteredrevcache.clear()
1203 obsolete.clearobscaches(self)
1204 obsolete.clearobscaches(self)
1204
1205
1205 def invalidatedirstate(self):
1206 def invalidatedirstate(self):
1206 '''Invalidates the dirstate, causing the next call to dirstate
1207 '''Invalidates the dirstate, causing the next call to dirstate
1207 to check if it was modified since the last time it was read,
1208 to check if it was modified since the last time it was read,
1208 rereading it if it has.
1209 rereading it if it has.
1209
1210
1210 This is different to dirstate.invalidate() that it doesn't always
1211 This is different to dirstate.invalidate() that it doesn't always
1211 rereads the dirstate. Use dirstate.invalidate() if you want to
1212 rereads the dirstate. Use dirstate.invalidate() if you want to
1212 explicitly read the dirstate again (i.e. restoring it to a previous
1213 explicitly read the dirstate again (i.e. restoring it to a previous
1213 known good state).'''
1214 known good state).'''
1214 if hasunfilteredcache(self, 'dirstate'):
1215 if hasunfilteredcache(self, 'dirstate'):
1215 for k in self.dirstate._filecache:
1216 for k in self.dirstate._filecache:
1216 try:
1217 try:
1217 delattr(self.dirstate, k)
1218 delattr(self.dirstate, k)
1218 except AttributeError:
1219 except AttributeError:
1219 pass
1220 pass
1220 delattr(self.unfiltered(), 'dirstate')
1221 delattr(self.unfiltered(), 'dirstate')
1221
1222
1222 def invalidate(self, clearfilecache=False):
1223 def invalidate(self, clearfilecache=False):
1223 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1224 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1224 for k in self._filecache.keys():
1225 for k in self._filecache.keys():
1225 # dirstate is invalidated separately in invalidatedirstate()
1226 # dirstate is invalidated separately in invalidatedirstate()
1226 if k == 'dirstate':
1227 if k == 'dirstate':
1227 continue
1228 continue
1228
1229
1229 if clearfilecache:
1230 if clearfilecache:
1230 del self._filecache[k]
1231 del self._filecache[k]
1231 try:
1232 try:
1232 delattr(unfiltered, k)
1233 delattr(unfiltered, k)
1233 except AttributeError:
1234 except AttributeError:
1234 pass
1235 pass
1235 self.invalidatecaches()
1236 self.invalidatecaches()
1236 self.store.invalidatecaches()
1237 self.store.invalidatecaches()
1237
1238
1238 def invalidateall(self):
1239 def invalidateall(self):
1239 '''Fully invalidates both store and non-store parts, causing the
1240 '''Fully invalidates both store and non-store parts, causing the
1240 subsequent operation to reread any outside changes.'''
1241 subsequent operation to reread any outside changes.'''
1241 # extension should hook this to invalidate its caches
1242 # extension should hook this to invalidate its caches
1242 self.invalidate()
1243 self.invalidate()
1243 self.invalidatedirstate()
1244 self.invalidatedirstate()
1244
1245
1245 def _refreshfilecachestats(self, tr):
1246 def _refreshfilecachestats(self, tr):
1246 """Reload stats of cached files so that they are flagged as valid"""
1247 """Reload stats of cached files so that they are flagged as valid"""
1247 for k, ce in self._filecache.items():
1248 for k, ce in self._filecache.items():
1248 if k == 'dirstate' or k not in self.__dict__:
1249 if k == 'dirstate' or k not in self.__dict__:
1249 continue
1250 continue
1250 ce.refresh()
1251 ce.refresh()
1251
1252
1252 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1253 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1253 inheritchecker=None, parentenvvar=None):
1254 inheritchecker=None, parentenvvar=None):
1254 parentlock = None
1255 parentlock = None
1255 # the contents of parentenvvar are used by the underlying lock to
1256 # the contents of parentenvvar are used by the underlying lock to
1256 # determine whether it can be inherited
1257 # determine whether it can be inherited
1257 if parentenvvar is not None:
1258 if parentenvvar is not None:
1258 parentlock = os.environ.get(parentenvvar)
1259 parentlock = os.environ.get(parentenvvar)
1259 try:
1260 try:
1260 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1261 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1261 acquirefn=acquirefn, desc=desc,
1262 acquirefn=acquirefn, desc=desc,
1262 inheritchecker=inheritchecker,
1263 inheritchecker=inheritchecker,
1263 parentlock=parentlock)
1264 parentlock=parentlock)
1264 except error.LockHeld as inst:
1265 except error.LockHeld as inst:
1265 if not wait:
1266 if not wait:
1266 raise
1267 raise
1267 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1268 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1268 (desc, inst.locker))
1269 (desc, inst.locker))
1269 # default to 600 seconds timeout
1270 # default to 600 seconds timeout
1270 l = lockmod.lock(vfs, lockname,
1271 l = lockmod.lock(vfs, lockname,
1271 int(self.ui.config("ui", "timeout", "600")),
1272 int(self.ui.config("ui", "timeout", "600")),
1272 releasefn=releasefn, acquirefn=acquirefn,
1273 releasefn=releasefn, acquirefn=acquirefn,
1273 desc=desc)
1274 desc=desc)
1274 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1275 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1275 return l
1276 return l
1276
1277
1277 def _afterlock(self, callback):
1278 def _afterlock(self, callback):
1278 """add a callback to be run when the repository is fully unlocked
1279 """add a callback to be run when the repository is fully unlocked
1279
1280
1280 The callback will be executed when the outermost lock is released
1281 The callback will be executed when the outermost lock is released
1281 (with wlock being higher level than 'lock')."""
1282 (with wlock being higher level than 'lock')."""
1282 for ref in (self._wlockref, self._lockref):
1283 for ref in (self._wlockref, self._lockref):
1283 l = ref and ref()
1284 l = ref and ref()
1284 if l and l.held:
1285 if l and l.held:
1285 l.postrelease.append(callback)
1286 l.postrelease.append(callback)
1286 break
1287 break
1287 else: # no lock have been found.
1288 else: # no lock have been found.
1288 callback()
1289 callback()
1289
1290
1290 def lock(self, wait=True):
1291 def lock(self, wait=True):
1291 '''Lock the repository store (.hg/store) and return a weak reference
1292 '''Lock the repository store (.hg/store) and return a weak reference
1292 to the lock. Use this before modifying the store (e.g. committing or
1293 to the lock. Use this before modifying the store (e.g. committing or
1293 stripping). If you are opening a transaction, get a lock as well.)
1294 stripping). If you are opening a transaction, get a lock as well.)
1294
1295
1295 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1296 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1296 'wlock' first to avoid a dead-lock hazard.'''
1297 'wlock' first to avoid a dead-lock hazard.'''
1297 l = self._lockref and self._lockref()
1298 l = self._lockref and self._lockref()
1298 if l is not None and l.held:
1299 if l is not None and l.held:
1299 l.lock()
1300 l.lock()
1300 return l
1301 return l
1301
1302
1302 l = self._lock(self.svfs, "lock", wait, None,
1303 l = self._lock(self.svfs, "lock", wait, None,
1303 self.invalidate, _('repository %s') % self.origroot)
1304 self.invalidate, _('repository %s') % self.origroot)
1304 self._lockref = weakref.ref(l)
1305 self._lockref = weakref.ref(l)
1305 return l
1306 return l
1306
1307
1307 def _wlockchecktransaction(self):
1308 def _wlockchecktransaction(self):
1308 if self.currenttransaction() is not None:
1309 if self.currenttransaction() is not None:
1309 raise error.LockInheritanceContractViolation(
1310 raise error.LockInheritanceContractViolation(
1310 'wlock cannot be inherited in the middle of a transaction')
1311 'wlock cannot be inherited in the middle of a transaction')
1311
1312
1312 def wlock(self, wait=True):
1313 def wlock(self, wait=True):
1313 '''Lock the non-store parts of the repository (everything under
1314 '''Lock the non-store parts of the repository (everything under
1314 .hg except .hg/store) and return a weak reference to the lock.
1315 .hg except .hg/store) and return a weak reference to the lock.
1315
1316
1316 Use this before modifying files in .hg.
1317 Use this before modifying files in .hg.
1317
1318
1318 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1319 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1319 'wlock' first to avoid a dead-lock hazard.'''
1320 'wlock' first to avoid a dead-lock hazard.'''
1320 l = self._wlockref and self._wlockref()
1321 l = self._wlockref and self._wlockref()
1321 if l is not None and l.held:
1322 if l is not None and l.held:
1322 l.lock()
1323 l.lock()
1323 return l
1324 return l
1324
1325
1325 # We do not need to check for non-waiting lock acquisition. Such
1326 # We do not need to check for non-waiting lock acquisition. Such
1326 # acquisition would not cause dead-lock as they would just fail.
1327 # acquisition would not cause dead-lock as they would just fail.
1327 if wait and (self.ui.configbool('devel', 'all-warnings')
1328 if wait and (self.ui.configbool('devel', 'all-warnings')
1328 or self.ui.configbool('devel', 'check-locks')):
1329 or self.ui.configbool('devel', 'check-locks')):
1329 l = self._lockref and self._lockref()
1330 l = self._lockref and self._lockref()
1330 if l is not None and l.held:
1331 if l is not None and l.held:
1331 self.ui.develwarn('"wlock" acquired after "lock"')
1332 self.ui.develwarn('"wlock" acquired after "lock"')
1332
1333
1333 def unlock():
1334 def unlock():
1334 if self.dirstate.pendingparentchange():
1335 if self.dirstate.pendingparentchange():
1335 self.dirstate.invalidate()
1336 self.dirstate.invalidate()
1336 else:
1337 else:
1337 self.dirstate.write(None)
1338 self.dirstate.write(None)
1338
1339
1339 self._filecache['dirstate'].refresh()
1340 self._filecache['dirstate'].refresh()
1340
1341
1341 l = self._lock(self.vfs, "wlock", wait, unlock,
1342 l = self._lock(self.vfs, "wlock", wait, unlock,
1342 self.invalidatedirstate, _('working directory of %s') %
1343 self.invalidatedirstate, _('working directory of %s') %
1343 self.origroot,
1344 self.origroot,
1344 inheritchecker=self._wlockchecktransaction,
1345 inheritchecker=self._wlockchecktransaction,
1345 parentenvvar='HG_WLOCK_LOCKER')
1346 parentenvvar='HG_WLOCK_LOCKER')
1346 self._wlockref = weakref.ref(l)
1347 self._wlockref = weakref.ref(l)
1347 return l
1348 return l
1348
1349
1349 def _currentlock(self, lockref):
1350 def _currentlock(self, lockref):
1350 """Returns the lock if it's held, or None if it's not."""
1351 """Returns the lock if it's held, or None if it's not."""
1351 if lockref is None:
1352 if lockref is None:
1352 return None
1353 return None
1353 l = lockref()
1354 l = lockref()
1354 if l is None or not l.held:
1355 if l is None or not l.held:
1355 return None
1356 return None
1356 return l
1357 return l
1357
1358
1358 def currentwlock(self):
1359 def currentwlock(self):
1359 """Returns the wlock if it's held, or None if it's not."""
1360 """Returns the wlock if it's held, or None if it's not."""
1360 return self._currentlock(self._wlockref)
1361 return self._currentlock(self._wlockref)
1361
1362
1362 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1363 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1363 """
1364 """
1364 commit an individual file as part of a larger transaction
1365 commit an individual file as part of a larger transaction
1365 """
1366 """
1366
1367
1367 fname = fctx.path()
1368 fname = fctx.path()
1368 fparent1 = manifest1.get(fname, nullid)
1369 fparent1 = manifest1.get(fname, nullid)
1369 fparent2 = manifest2.get(fname, nullid)
1370 fparent2 = manifest2.get(fname, nullid)
1370 if isinstance(fctx, context.filectx):
1371 if isinstance(fctx, context.filectx):
1371 node = fctx.filenode()
1372 node = fctx.filenode()
1372 if node in [fparent1, fparent2]:
1373 if node in [fparent1, fparent2]:
1373 self.ui.debug('reusing %s filelog entry\n' % fname)
1374 self.ui.debug('reusing %s filelog entry\n' % fname)
1374 return node
1375 return node
1375
1376
1376 flog = self.file(fname)
1377 flog = self.file(fname)
1377 meta = {}
1378 meta = {}
1378 copy = fctx.renamed()
1379 copy = fctx.renamed()
1379 if copy and copy[0] != fname:
1380 if copy and copy[0] != fname:
1380 # Mark the new revision of this file as a copy of another
1381 # Mark the new revision of this file as a copy of another
1381 # file. This copy data will effectively act as a parent
1382 # file. This copy data will effectively act as a parent
1382 # of this new revision. If this is a merge, the first
1383 # of this new revision. If this is a merge, the first
1383 # parent will be the nullid (meaning "look up the copy data")
1384 # parent will be the nullid (meaning "look up the copy data")
1384 # and the second one will be the other parent. For example:
1385 # and the second one will be the other parent. For example:
1385 #
1386 #
1386 # 0 --- 1 --- 3 rev1 changes file foo
1387 # 0 --- 1 --- 3 rev1 changes file foo
1387 # \ / rev2 renames foo to bar and changes it
1388 # \ / rev2 renames foo to bar and changes it
1388 # \- 2 -/ rev3 should have bar with all changes and
1389 # \- 2 -/ rev3 should have bar with all changes and
1389 # should record that bar descends from
1390 # should record that bar descends from
1390 # bar in rev2 and foo in rev1
1391 # bar in rev2 and foo in rev1
1391 #
1392 #
1392 # this allows this merge to succeed:
1393 # this allows this merge to succeed:
1393 #
1394 #
1394 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1395 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1395 # \ / merging rev3 and rev4 should use bar@rev2
1396 # \ / merging rev3 and rev4 should use bar@rev2
1396 # \- 2 --- 4 as the merge base
1397 # \- 2 --- 4 as the merge base
1397 #
1398 #
1398
1399
1399 cfname = copy[0]
1400 cfname = copy[0]
1400 crev = manifest1.get(cfname)
1401 crev = manifest1.get(cfname)
1401 newfparent = fparent2
1402 newfparent = fparent2
1402
1403
1403 if manifest2: # branch merge
1404 if manifest2: # branch merge
1404 if fparent2 == nullid or crev is None: # copied on remote side
1405 if fparent2 == nullid or crev is None: # copied on remote side
1405 if cfname in manifest2:
1406 if cfname in manifest2:
1406 crev = manifest2[cfname]
1407 crev = manifest2[cfname]
1407 newfparent = fparent1
1408 newfparent = fparent1
1408
1409
1409 # Here, we used to search backwards through history to try to find
1410 # Here, we used to search backwards through history to try to find
1410 # where the file copy came from if the source of a copy was not in
1411 # where the file copy came from if the source of a copy was not in
1411 # the parent directory. However, this doesn't actually make sense to
1412 # the parent directory. However, this doesn't actually make sense to
1412 # do (what does a copy from something not in your working copy even
1413 # do (what does a copy from something not in your working copy even
1413 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1414 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1414 # the user that copy information was dropped, so if they didn't
1415 # the user that copy information was dropped, so if they didn't
1415 # expect this outcome it can be fixed, but this is the correct
1416 # expect this outcome it can be fixed, but this is the correct
1416 # behavior in this circumstance.
1417 # behavior in this circumstance.
1417
1418
1418 if crev:
1419 if crev:
1419 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1420 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1420 meta["copy"] = cfname
1421 meta["copy"] = cfname
1421 meta["copyrev"] = hex(crev)
1422 meta["copyrev"] = hex(crev)
1422 fparent1, fparent2 = nullid, newfparent
1423 fparent1, fparent2 = nullid, newfparent
1423 else:
1424 else:
1424 self.ui.warn(_("warning: can't find ancestor for '%s' "
1425 self.ui.warn(_("warning: can't find ancestor for '%s' "
1425 "copied from '%s'!\n") % (fname, cfname))
1426 "copied from '%s'!\n") % (fname, cfname))
1426
1427
1427 elif fparent1 == nullid:
1428 elif fparent1 == nullid:
1428 fparent1, fparent2 = fparent2, nullid
1429 fparent1, fparent2 = fparent2, nullid
1429 elif fparent2 != nullid:
1430 elif fparent2 != nullid:
1430 # is one parent an ancestor of the other?
1431 # is one parent an ancestor of the other?
1431 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1432 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1432 if fparent1 in fparentancestors:
1433 if fparent1 in fparentancestors:
1433 fparent1, fparent2 = fparent2, nullid
1434 fparent1, fparent2 = fparent2, nullid
1434 elif fparent2 in fparentancestors:
1435 elif fparent2 in fparentancestors:
1435 fparent2 = nullid
1436 fparent2 = nullid
1436
1437
1437 # is the file changed?
1438 # is the file changed?
1438 text = fctx.data()
1439 text = fctx.data()
1439 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1440 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1440 changelist.append(fname)
1441 changelist.append(fname)
1441 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1442 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1442 # are just the flags changed during merge?
1443 # are just the flags changed during merge?
1443 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1444 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1444 changelist.append(fname)
1445 changelist.append(fname)
1445
1446
1446 return fparent1
1447 return fparent1
1447
1448
1448 @unfilteredmethod
1449 @unfilteredmethod
1449 def commit(self, text="", user=None, date=None, match=None, force=False,
1450 def commit(self, text="", user=None, date=None, match=None, force=False,
1450 editor=False, extra=None):
1451 editor=False, extra=None):
1451 """Add a new revision to current repository.
1452 """Add a new revision to current repository.
1452
1453
1453 Revision information is gathered from the working directory,
1454 Revision information is gathered from the working directory,
1454 match can be used to filter the committed files. If editor is
1455 match can be used to filter the committed files. If editor is
1455 supplied, it is called to get a commit message.
1456 supplied, it is called to get a commit message.
1456 """
1457 """
1457 if extra is None:
1458 if extra is None:
1458 extra = {}
1459 extra = {}
1459
1460
1460 def fail(f, msg):
1461 def fail(f, msg):
1461 raise error.Abort('%s: %s' % (f, msg))
1462 raise error.Abort('%s: %s' % (f, msg))
1462
1463
1463 if not match:
1464 if not match:
1464 match = matchmod.always(self.root, '')
1465 match = matchmod.always(self.root, '')
1465
1466
1466 if not force:
1467 if not force:
1467 vdirs = []
1468 vdirs = []
1468 match.explicitdir = vdirs.append
1469 match.explicitdir = vdirs.append
1469 match.bad = fail
1470 match.bad = fail
1470
1471
1471 wlock = lock = tr = None
1472 wlock = lock = tr = None
1472 try:
1473 try:
1473 wlock = self.wlock()
1474 wlock = self.wlock()
1474 lock = self.lock() # for recent changelog (see issue4368)
1475 lock = self.lock() # for recent changelog (see issue4368)
1475
1476
1476 wctx = self[None]
1477 wctx = self[None]
1477 merge = len(wctx.parents()) > 1
1478 merge = len(wctx.parents()) > 1
1478
1479
1479 if not force and merge and match.ispartial():
1480 if not force and merge and match.ispartial():
1480 raise error.Abort(_('cannot partially commit a merge '
1481 raise error.Abort(_('cannot partially commit a merge '
1481 '(do not specify files or patterns)'))
1482 '(do not specify files or patterns)'))
1482
1483
1483 status = self.status(match=match, clean=force)
1484 status = self.status(match=match, clean=force)
1484 if force:
1485 if force:
1485 status.modified.extend(status.clean) # mq may commit clean files
1486 status.modified.extend(status.clean) # mq may commit clean files
1486
1487
1487 # check subrepos
1488 # check subrepos
1488 subs = []
1489 subs = []
1489 commitsubs = set()
1490 commitsubs = set()
1490 newstate = wctx.substate.copy()
1491 newstate = wctx.substate.copy()
1491 # only manage subrepos and .hgsubstate if .hgsub is present
1492 # only manage subrepos and .hgsubstate if .hgsub is present
1492 if '.hgsub' in wctx:
1493 if '.hgsub' in wctx:
1493 # we'll decide whether to track this ourselves, thanks
1494 # we'll decide whether to track this ourselves, thanks
1494 for c in status.modified, status.added, status.removed:
1495 for c in status.modified, status.added, status.removed:
1495 if '.hgsubstate' in c:
1496 if '.hgsubstate' in c:
1496 c.remove('.hgsubstate')
1497 c.remove('.hgsubstate')
1497
1498
1498 # compare current state to last committed state
1499 # compare current state to last committed state
1499 # build new substate based on last committed state
1500 # build new substate based on last committed state
1500 oldstate = wctx.p1().substate
1501 oldstate = wctx.p1().substate
1501 for s in sorted(newstate.keys()):
1502 for s in sorted(newstate.keys()):
1502 if not match(s):
1503 if not match(s):
1503 # ignore working copy, use old state if present
1504 # ignore working copy, use old state if present
1504 if s in oldstate:
1505 if s in oldstate:
1505 newstate[s] = oldstate[s]
1506 newstate[s] = oldstate[s]
1506 continue
1507 continue
1507 if not force:
1508 if not force:
1508 raise error.Abort(
1509 raise error.Abort(
1509 _("commit with new subrepo %s excluded") % s)
1510 _("commit with new subrepo %s excluded") % s)
1510 dirtyreason = wctx.sub(s).dirtyreason(True)
1511 dirtyreason = wctx.sub(s).dirtyreason(True)
1511 if dirtyreason:
1512 if dirtyreason:
1512 if not self.ui.configbool('ui', 'commitsubrepos'):
1513 if not self.ui.configbool('ui', 'commitsubrepos'):
1513 raise error.Abort(dirtyreason,
1514 raise error.Abort(dirtyreason,
1514 hint=_("use --subrepos for recursive commit"))
1515 hint=_("use --subrepos for recursive commit"))
1515 subs.append(s)
1516 subs.append(s)
1516 commitsubs.add(s)
1517 commitsubs.add(s)
1517 else:
1518 else:
1518 bs = wctx.sub(s).basestate()
1519 bs = wctx.sub(s).basestate()
1519 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1520 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1520 if oldstate.get(s, (None, None, None))[1] != bs:
1521 if oldstate.get(s, (None, None, None))[1] != bs:
1521 subs.append(s)
1522 subs.append(s)
1522
1523
1523 # check for removed subrepos
1524 # check for removed subrepos
1524 for p in wctx.parents():
1525 for p in wctx.parents():
1525 r = [s for s in p.substate if s not in newstate]
1526 r = [s for s in p.substate if s not in newstate]
1526 subs += [s for s in r if match(s)]
1527 subs += [s for s in r if match(s)]
1527 if subs:
1528 if subs:
1528 if (not match('.hgsub') and
1529 if (not match('.hgsub') and
1529 '.hgsub' in (wctx.modified() + wctx.added())):
1530 '.hgsub' in (wctx.modified() + wctx.added())):
1530 raise error.Abort(
1531 raise error.Abort(
1531 _("can't commit subrepos without .hgsub"))
1532 _("can't commit subrepos without .hgsub"))
1532 status.modified.insert(0, '.hgsubstate')
1533 status.modified.insert(0, '.hgsubstate')
1533
1534
1534 elif '.hgsub' in status.removed:
1535 elif '.hgsub' in status.removed:
1535 # clean up .hgsubstate when .hgsub is removed
1536 # clean up .hgsubstate when .hgsub is removed
1536 if ('.hgsubstate' in wctx and
1537 if ('.hgsubstate' in wctx and
1537 '.hgsubstate' not in (status.modified + status.added +
1538 '.hgsubstate' not in (status.modified + status.added +
1538 status.removed)):
1539 status.removed)):
1539 status.removed.insert(0, '.hgsubstate')
1540 status.removed.insert(0, '.hgsubstate')
1540
1541
1541 # make sure all explicit patterns are matched
1542 # make sure all explicit patterns are matched
1542 if not force and (match.isexact() or match.prefix()):
1543 if not force and (match.isexact() or match.prefix()):
1543 matched = set(status.modified + status.added + status.removed)
1544 matched = set(status.modified + status.added + status.removed)
1544
1545
1545 for f in match.files():
1546 for f in match.files():
1546 f = self.dirstate.normalize(f)
1547 f = self.dirstate.normalize(f)
1547 if f == '.' or f in matched or f in wctx.substate:
1548 if f == '.' or f in matched or f in wctx.substate:
1548 continue
1549 continue
1549 if f in status.deleted:
1550 if f in status.deleted:
1550 fail(f, _('file not found!'))
1551 fail(f, _('file not found!'))
1551 if f in vdirs: # visited directory
1552 if f in vdirs: # visited directory
1552 d = f + '/'
1553 d = f + '/'
1553 for mf in matched:
1554 for mf in matched:
1554 if mf.startswith(d):
1555 if mf.startswith(d):
1555 break
1556 break
1556 else:
1557 else:
1557 fail(f, _("no match under directory!"))
1558 fail(f, _("no match under directory!"))
1558 elif f not in self.dirstate:
1559 elif f not in self.dirstate:
1559 fail(f, _("file not tracked!"))
1560 fail(f, _("file not tracked!"))
1560
1561
1561 cctx = context.workingcommitctx(self, status,
1562 cctx = context.workingcommitctx(self, status,
1562 text, user, date, extra)
1563 text, user, date, extra)
1563
1564
1564 # internal config: ui.allowemptycommit
1565 # internal config: ui.allowemptycommit
1565 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1566 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1566 or extra.get('close') or merge or cctx.files()
1567 or extra.get('close') or merge or cctx.files()
1567 or self.ui.configbool('ui', 'allowemptycommit'))
1568 or self.ui.configbool('ui', 'allowemptycommit'))
1568 if not allowemptycommit:
1569 if not allowemptycommit:
1569 return None
1570 return None
1570
1571
1571 if merge and cctx.deleted():
1572 if merge and cctx.deleted():
1572 raise error.Abort(_("cannot commit merge with missing files"))
1573 raise error.Abort(_("cannot commit merge with missing files"))
1573
1574
1574 ms = mergemod.mergestate.read(self)
1575 ms = mergemod.mergestate.read(self)
1575
1576
1576 if list(ms.unresolved()):
1577 if list(ms.unresolved()):
1577 raise error.Abort(_('unresolved merge conflicts '
1578 raise error.Abort(_('unresolved merge conflicts '
1578 '(see "hg help resolve")'))
1579 '(see "hg help resolve")'))
1579 if ms.mdstate() != 's' or list(ms.driverresolved()):
1580 if ms.mdstate() != 's' or list(ms.driverresolved()):
1580 raise error.Abort(_('driver-resolved merge conflicts'),
1581 raise error.Abort(_('driver-resolved merge conflicts'),
1581 hint=_('run "hg resolve --all" to resolve'))
1582 hint=_('run "hg resolve --all" to resolve'))
1582
1583
1583 if editor:
1584 if editor:
1584 cctx._text = editor(self, cctx, subs)
1585 cctx._text = editor(self, cctx, subs)
1585 edited = (text != cctx._text)
1586 edited = (text != cctx._text)
1586
1587
1587 # Save commit message in case this transaction gets rolled back
1588 # Save commit message in case this transaction gets rolled back
1588 # (e.g. by a pretxncommit hook). Leave the content alone on
1589 # (e.g. by a pretxncommit hook). Leave the content alone on
1589 # the assumption that the user will use the same editor again.
1590 # the assumption that the user will use the same editor again.
1590 msgfn = self.savecommitmessage(cctx._text)
1591 msgfn = self.savecommitmessage(cctx._text)
1591
1592
1592 # commit subs and write new state
1593 # commit subs and write new state
1593 if subs:
1594 if subs:
1594 for s in sorted(commitsubs):
1595 for s in sorted(commitsubs):
1595 sub = wctx.sub(s)
1596 sub = wctx.sub(s)
1596 self.ui.status(_('committing subrepository %s\n') %
1597 self.ui.status(_('committing subrepository %s\n') %
1597 subrepo.subrelpath(sub))
1598 subrepo.subrelpath(sub))
1598 sr = sub.commit(cctx._text, user, date)
1599 sr = sub.commit(cctx._text, user, date)
1599 newstate[s] = (newstate[s][0], sr)
1600 newstate[s] = (newstate[s][0], sr)
1600 subrepo.writestate(self, newstate)
1601 subrepo.writestate(self, newstate)
1601
1602
1602 p1, p2 = self.dirstate.parents()
1603 p1, p2 = self.dirstate.parents()
1603 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1604 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1604 try:
1605 try:
1605 self.hook("precommit", throw=True, parent1=hookp1,
1606 self.hook("precommit", throw=True, parent1=hookp1,
1606 parent2=hookp2)
1607 parent2=hookp2)
1607 tr = self.transaction('commit')
1608 tr = self.transaction('commit')
1608 ret = self.commitctx(cctx, True)
1609 ret = self.commitctx(cctx, True)
1609 except: # re-raises
1610 except: # re-raises
1610 if edited:
1611 if edited:
1611 self.ui.write(
1612 self.ui.write(
1612 _('note: commit message saved in %s\n') % msgfn)
1613 _('note: commit message saved in %s\n') % msgfn)
1613 raise
1614 raise
1614 # update bookmarks, dirstate and mergestate
1615 # update bookmarks, dirstate and mergestate
1615 bookmarks.update(self, [p1, p2], ret)
1616 bookmarks.update(self, [p1, p2], ret)
1616 cctx.markcommitted(ret)
1617 cctx.markcommitted(ret)
1617 ms.reset()
1618 ms.reset()
1618 tr.close()
1619 tr.close()
1619
1620
1620 finally:
1621 finally:
1621 lockmod.release(tr, lock, wlock)
1622 lockmod.release(tr, lock, wlock)
1622
1623
1623 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1624 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1624 # hack for command that use a temporary commit (eg: histedit)
1625 # hack for command that use a temporary commit (eg: histedit)
1625 # temporary commit got stripped before hook release
1626 # temporary commit got stripped before hook release
1626 if self.changelog.hasnode(ret):
1627 if self.changelog.hasnode(ret):
1627 self.hook("commit", node=node, parent1=parent1,
1628 self.hook("commit", node=node, parent1=parent1,
1628 parent2=parent2)
1629 parent2=parent2)
1629 self._afterlock(commithook)
1630 self._afterlock(commithook)
1630 return ret
1631 return ret
1631
1632
1632 @unfilteredmethod
1633 @unfilteredmethod
1633 def commitctx(self, ctx, error=False):
1634 def commitctx(self, ctx, error=False):
1634 """Add a new revision to current repository.
1635 """Add a new revision to current repository.
1635 Revision information is passed via the context argument.
1636 Revision information is passed via the context argument.
1636 """
1637 """
1637
1638
1638 tr = None
1639 tr = None
1639 p1, p2 = ctx.p1(), ctx.p2()
1640 p1, p2 = ctx.p1(), ctx.p2()
1640 user = ctx.user()
1641 user = ctx.user()
1641
1642
1642 lock = self.lock()
1643 lock = self.lock()
1643 try:
1644 try:
1644 tr = self.transaction("commit")
1645 tr = self.transaction("commit")
1645 trp = weakref.proxy(tr)
1646 trp = weakref.proxy(tr)
1646
1647
1647 if ctx.files():
1648 if ctx.files():
1648 m1 = p1.manifest()
1649 m1 = p1.manifest()
1649 m2 = p2.manifest()
1650 m2 = p2.manifest()
1650 m = m1.copy()
1651 m = m1.copy()
1651
1652
1652 # check in files
1653 # check in files
1653 added = []
1654 added = []
1654 changed = []
1655 changed = []
1655 removed = list(ctx.removed())
1656 removed = list(ctx.removed())
1656 linkrev = len(self)
1657 linkrev = len(self)
1657 self.ui.note(_("committing files:\n"))
1658 self.ui.note(_("committing files:\n"))
1658 for f in sorted(ctx.modified() + ctx.added()):
1659 for f in sorted(ctx.modified() + ctx.added()):
1659 self.ui.note(f + "\n")
1660 self.ui.note(f + "\n")
1660 try:
1661 try:
1661 fctx = ctx[f]
1662 fctx = ctx[f]
1662 if fctx is None:
1663 if fctx is None:
1663 removed.append(f)
1664 removed.append(f)
1664 else:
1665 else:
1665 added.append(f)
1666 added.append(f)
1666 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1667 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1667 trp, changed)
1668 trp, changed)
1668 m.setflag(f, fctx.flags())
1669 m.setflag(f, fctx.flags())
1669 except OSError as inst:
1670 except OSError as inst:
1670 self.ui.warn(_("trouble committing %s!\n") % f)
1671 self.ui.warn(_("trouble committing %s!\n") % f)
1671 raise
1672 raise
1672 except IOError as inst:
1673 except IOError as inst:
1673 errcode = getattr(inst, 'errno', errno.ENOENT)
1674 errcode = getattr(inst, 'errno', errno.ENOENT)
1674 if error or errcode and errcode != errno.ENOENT:
1675 if error or errcode and errcode != errno.ENOENT:
1675 self.ui.warn(_("trouble committing %s!\n") % f)
1676 self.ui.warn(_("trouble committing %s!\n") % f)
1676 raise
1677 raise
1677
1678
1678 # update manifest
1679 # update manifest
1679 self.ui.note(_("committing manifest\n"))
1680 self.ui.note(_("committing manifest\n"))
1680 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1681 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1681 drop = [f for f in removed if f in m]
1682 drop = [f for f in removed if f in m]
1682 for f in drop:
1683 for f in drop:
1683 del m[f]
1684 del m[f]
1684 mn = self.manifest.add(m, trp, linkrev,
1685 mn = self.manifest.add(m, trp, linkrev,
1685 p1.manifestnode(), p2.manifestnode(),
1686 p1.manifestnode(), p2.manifestnode(),
1686 added, drop)
1687 added, drop)
1687 files = changed + removed
1688 files = changed + removed
1688 else:
1689 else:
1689 mn = p1.manifestnode()
1690 mn = p1.manifestnode()
1690 files = []
1691 files = []
1691
1692
1692 # update changelog
1693 # update changelog
1693 self.ui.note(_("committing changelog\n"))
1694 self.ui.note(_("committing changelog\n"))
1694 self.changelog.delayupdate(tr)
1695 self.changelog.delayupdate(tr)
1695 n = self.changelog.add(mn, files, ctx.description(),
1696 n = self.changelog.add(mn, files, ctx.description(),
1696 trp, p1.node(), p2.node(),
1697 trp, p1.node(), p2.node(),
1697 user, ctx.date(), ctx.extra().copy())
1698 user, ctx.date(), ctx.extra().copy())
1698 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1699 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1699 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1700 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1700 parent2=xp2)
1701 parent2=xp2)
1701 # set the new commit is proper phase
1702 # set the new commit is proper phase
1702 targetphase = subrepo.newcommitphase(self.ui, ctx)
1703 targetphase = subrepo.newcommitphase(self.ui, ctx)
1703 if targetphase:
1704 if targetphase:
1704 # retract boundary do not alter parent changeset.
1705 # retract boundary do not alter parent changeset.
1705 # if a parent have higher the resulting phase will
1706 # if a parent have higher the resulting phase will
1706 # be compliant anyway
1707 # be compliant anyway
1707 #
1708 #
1708 # if minimal phase was 0 we don't need to retract anything
1709 # if minimal phase was 0 we don't need to retract anything
1709 phases.retractboundary(self, tr, targetphase, [n])
1710 phases.retractboundary(self, tr, targetphase, [n])
1710 tr.close()
1711 tr.close()
1711 branchmap.updatecache(self.filtered('served'))
1712 branchmap.updatecache(self.filtered('served'))
1712 return n
1713 return n
1713 finally:
1714 finally:
1714 if tr:
1715 if tr:
1715 tr.release()
1716 tr.release()
1716 lock.release()
1717 lock.release()
1717
1718
1718 @unfilteredmethod
1719 @unfilteredmethod
1719 def destroying(self):
1720 def destroying(self):
1720 '''Inform the repository that nodes are about to be destroyed.
1721 '''Inform the repository that nodes are about to be destroyed.
1721 Intended for use by strip and rollback, so there's a common
1722 Intended for use by strip and rollback, so there's a common
1722 place for anything that has to be done before destroying history.
1723 place for anything that has to be done before destroying history.
1723
1724
1724 This is mostly useful for saving state that is in memory and waiting
1725 This is mostly useful for saving state that is in memory and waiting
1725 to be flushed when the current lock is released. Because a call to
1726 to be flushed when the current lock is released. Because a call to
1726 destroyed is imminent, the repo will be invalidated causing those
1727 destroyed is imminent, the repo will be invalidated causing those
1727 changes to stay in memory (waiting for the next unlock), or vanish
1728 changes to stay in memory (waiting for the next unlock), or vanish
1728 completely.
1729 completely.
1729 '''
1730 '''
1730 # When using the same lock to commit and strip, the phasecache is left
1731 # When using the same lock to commit and strip, the phasecache is left
1731 # dirty after committing. Then when we strip, the repo is invalidated,
1732 # dirty after committing. Then when we strip, the repo is invalidated,
1732 # causing those changes to disappear.
1733 # causing those changes to disappear.
1733 if '_phasecache' in vars(self):
1734 if '_phasecache' in vars(self):
1734 self._phasecache.write()
1735 self._phasecache.write()
1735
1736
1736 @unfilteredmethod
1737 @unfilteredmethod
1737 def destroyed(self):
1738 def destroyed(self):
1738 '''Inform the repository that nodes have been destroyed.
1739 '''Inform the repository that nodes have been destroyed.
1739 Intended for use by strip and rollback, so there's a common
1740 Intended for use by strip and rollback, so there's a common
1740 place for anything that has to be done after destroying history.
1741 place for anything that has to be done after destroying history.
1741 '''
1742 '''
1742 # When one tries to:
1743 # When one tries to:
1743 # 1) destroy nodes thus calling this method (e.g. strip)
1744 # 1) destroy nodes thus calling this method (e.g. strip)
1744 # 2) use phasecache somewhere (e.g. commit)
1745 # 2) use phasecache somewhere (e.g. commit)
1745 #
1746 #
1746 # then 2) will fail because the phasecache contains nodes that were
1747 # then 2) will fail because the phasecache contains nodes that were
1747 # removed. We can either remove phasecache from the filecache,
1748 # removed. We can either remove phasecache from the filecache,
1748 # causing it to reload next time it is accessed, or simply filter
1749 # causing it to reload next time it is accessed, or simply filter
1749 # the removed nodes now and write the updated cache.
1750 # the removed nodes now and write the updated cache.
1750 self._phasecache.filterunknown(self)
1751 self._phasecache.filterunknown(self)
1751 self._phasecache.write()
1752 self._phasecache.write()
1752
1753
1753 # update the 'served' branch cache to help read only server process
1754 # update the 'served' branch cache to help read only server process
1754 # Thanks to branchcache collaboration this is done from the nearest
1755 # Thanks to branchcache collaboration this is done from the nearest
1755 # filtered subset and it is expected to be fast.
1756 # filtered subset and it is expected to be fast.
1756 branchmap.updatecache(self.filtered('served'))
1757 branchmap.updatecache(self.filtered('served'))
1757
1758
1758 # Ensure the persistent tag cache is updated. Doing it now
1759 # Ensure the persistent tag cache is updated. Doing it now
1759 # means that the tag cache only has to worry about destroyed
1760 # means that the tag cache only has to worry about destroyed
1760 # heads immediately after a strip/rollback. That in turn
1761 # heads immediately after a strip/rollback. That in turn
1761 # guarantees that "cachetip == currenttip" (comparing both rev
1762 # guarantees that "cachetip == currenttip" (comparing both rev
1762 # and node) always means no nodes have been added or destroyed.
1763 # and node) always means no nodes have been added or destroyed.
1763
1764
1764 # XXX this is suboptimal when qrefresh'ing: we strip the current
1765 # XXX this is suboptimal when qrefresh'ing: we strip the current
1765 # head, refresh the tag cache, then immediately add a new head.
1766 # head, refresh the tag cache, then immediately add a new head.
1766 # But I think doing it this way is necessary for the "instant
1767 # But I think doing it this way is necessary for the "instant
1767 # tag cache retrieval" case to work.
1768 # tag cache retrieval" case to work.
1768 self.invalidate()
1769 self.invalidate()
1769
1770
1770 def walk(self, match, node=None):
1771 def walk(self, match, node=None):
1771 '''
1772 '''
1772 walk recursively through the directory tree or a given
1773 walk recursively through the directory tree or a given
1773 changeset, finding all files matched by the match
1774 changeset, finding all files matched by the match
1774 function
1775 function
1775 '''
1776 '''
1776 return self[node].walk(match)
1777 return self[node].walk(match)
1777
1778
1778 def status(self, node1='.', node2=None, match=None,
1779 def status(self, node1='.', node2=None, match=None,
1779 ignored=False, clean=False, unknown=False,
1780 ignored=False, clean=False, unknown=False,
1780 listsubrepos=False):
1781 listsubrepos=False):
1781 '''a convenience method that calls node1.status(node2)'''
1782 '''a convenience method that calls node1.status(node2)'''
1782 return self[node1].status(node2, match, ignored, clean, unknown,
1783 return self[node1].status(node2, match, ignored, clean, unknown,
1783 listsubrepos)
1784 listsubrepos)
1784
1785
1785 def heads(self, start=None):
1786 def heads(self, start=None):
1786 heads = self.changelog.heads(start)
1787 heads = self.changelog.heads(start)
1787 # sort the output in rev descending order
1788 # sort the output in rev descending order
1788 return sorted(heads, key=self.changelog.rev, reverse=True)
1789 return sorted(heads, key=self.changelog.rev, reverse=True)
1789
1790
1790 def branchheads(self, branch=None, start=None, closed=False):
1791 def branchheads(self, branch=None, start=None, closed=False):
1791 '''return a (possibly filtered) list of heads for the given branch
1792 '''return a (possibly filtered) list of heads for the given branch
1792
1793
1793 Heads are returned in topological order, from newest to oldest.
1794 Heads are returned in topological order, from newest to oldest.
1794 If branch is None, use the dirstate branch.
1795 If branch is None, use the dirstate branch.
1795 If start is not None, return only heads reachable from start.
1796 If start is not None, return only heads reachable from start.
1796 If closed is True, return heads that are marked as closed as well.
1797 If closed is True, return heads that are marked as closed as well.
1797 '''
1798 '''
1798 if branch is None:
1799 if branch is None:
1799 branch = self[None].branch()
1800 branch = self[None].branch()
1800 branches = self.branchmap()
1801 branches = self.branchmap()
1801 if branch not in branches:
1802 if branch not in branches:
1802 return []
1803 return []
1803 # the cache returns heads ordered lowest to highest
1804 # the cache returns heads ordered lowest to highest
1804 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1805 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1805 if start is not None:
1806 if start is not None:
1806 # filter out the heads that cannot be reached from startrev
1807 # filter out the heads that cannot be reached from startrev
1807 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1808 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1808 bheads = [h for h in bheads if h in fbheads]
1809 bheads = [h for h in bheads if h in fbheads]
1809 return bheads
1810 return bheads
1810
1811
1811 def branches(self, nodes):
1812 def branches(self, nodes):
1812 if not nodes:
1813 if not nodes:
1813 nodes = [self.changelog.tip()]
1814 nodes = [self.changelog.tip()]
1814 b = []
1815 b = []
1815 for n in nodes:
1816 for n in nodes:
1816 t = n
1817 t = n
1817 while True:
1818 while True:
1818 p = self.changelog.parents(n)
1819 p = self.changelog.parents(n)
1819 if p[1] != nullid or p[0] == nullid:
1820 if p[1] != nullid or p[0] == nullid:
1820 b.append((t, n, p[0], p[1]))
1821 b.append((t, n, p[0], p[1]))
1821 break
1822 break
1822 n = p[0]
1823 n = p[0]
1823 return b
1824 return b
1824
1825
1825 def between(self, pairs):
1826 def between(self, pairs):
1826 r = []
1827 r = []
1827
1828
1828 for top, bottom in pairs:
1829 for top, bottom in pairs:
1829 n, l, i = top, [], 0
1830 n, l, i = top, [], 0
1830 f = 1
1831 f = 1
1831
1832
1832 while n != bottom and n != nullid:
1833 while n != bottom and n != nullid:
1833 p = self.changelog.parents(n)[0]
1834 p = self.changelog.parents(n)[0]
1834 if i == f:
1835 if i == f:
1835 l.append(n)
1836 l.append(n)
1836 f = f * 2
1837 f = f * 2
1837 n = p
1838 n = p
1838 i += 1
1839 i += 1
1839
1840
1840 r.append(l)
1841 r.append(l)
1841
1842
1842 return r
1843 return r
1843
1844
1844 def checkpush(self, pushop):
1845 def checkpush(self, pushop):
1845 """Extensions can override this function if additional checks have
1846 """Extensions can override this function if additional checks have
1846 to be performed before pushing, or call it if they override push
1847 to be performed before pushing, or call it if they override push
1847 command.
1848 command.
1848 """
1849 """
1849 pass
1850 pass
1850
1851
1851 @unfilteredpropertycache
1852 @unfilteredpropertycache
1852 def prepushoutgoinghooks(self):
1853 def prepushoutgoinghooks(self):
1853 """Return util.hooks consists of "(repo, remote, outgoing)"
1854 """Return util.hooks consists of "(repo, remote, outgoing)"
1854 functions, which are called before pushing changesets.
1855 functions, which are called before pushing changesets.
1855 """
1856 """
1856 return util.hooks()
1857 return util.hooks()
1857
1858
1858 def pushkey(self, namespace, key, old, new):
1859 def pushkey(self, namespace, key, old, new):
1859 try:
1860 try:
1860 tr = self.currenttransaction()
1861 tr = self.currenttransaction()
1861 hookargs = {}
1862 hookargs = {}
1862 if tr is not None:
1863 if tr is not None:
1863 hookargs.update(tr.hookargs)
1864 hookargs.update(tr.hookargs)
1864 hookargs['namespace'] = namespace
1865 hookargs['namespace'] = namespace
1865 hookargs['key'] = key
1866 hookargs['key'] = key
1866 hookargs['old'] = old
1867 hookargs['old'] = old
1867 hookargs['new'] = new
1868 hookargs['new'] = new
1868 self.hook('prepushkey', throw=True, **hookargs)
1869 self.hook('prepushkey', throw=True, **hookargs)
1869 except error.HookAbort as exc:
1870 except error.HookAbort as exc:
1870 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1871 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1871 if exc.hint:
1872 if exc.hint:
1872 self.ui.write_err(_("(%s)\n") % exc.hint)
1873 self.ui.write_err(_("(%s)\n") % exc.hint)
1873 return False
1874 return False
1874 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1875 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1875 ret = pushkey.push(self, namespace, key, old, new)
1876 ret = pushkey.push(self, namespace, key, old, new)
1876 def runhook():
1877 def runhook():
1877 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1878 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1878 ret=ret)
1879 ret=ret)
1879 self._afterlock(runhook)
1880 self._afterlock(runhook)
1880 return ret
1881 return ret
1881
1882
1882 def listkeys(self, namespace):
1883 def listkeys(self, namespace):
1883 self.hook('prelistkeys', throw=True, namespace=namespace)
1884 self.hook('prelistkeys', throw=True, namespace=namespace)
1884 self.ui.debug('listing keys for "%s"\n' % namespace)
1885 self.ui.debug('listing keys for "%s"\n' % namespace)
1885 values = pushkey.list(self, namespace)
1886 values = pushkey.list(self, namespace)
1886 self.hook('listkeys', namespace=namespace, values=values)
1887 self.hook('listkeys', namespace=namespace, values=values)
1887 return values
1888 return values
1888
1889
1889 def debugwireargs(self, one, two, three=None, four=None, five=None):
1890 def debugwireargs(self, one, two, three=None, four=None, five=None):
1890 '''used to test argument passing over the wire'''
1891 '''used to test argument passing over the wire'''
1891 return "%s %s %s %s %s" % (one, two, three, four, five)
1892 return "%s %s %s %s %s" % (one, two, three, four, five)
1892
1893
1893 def savecommitmessage(self, text):
1894 def savecommitmessage(self, text):
1894 fp = self.vfs('last-message.txt', 'wb')
1895 fp = self.vfs('last-message.txt', 'wb')
1895 try:
1896 try:
1896 fp.write(text)
1897 fp.write(text)
1897 finally:
1898 finally:
1898 fp.close()
1899 fp.close()
1899 return self.pathto(fp.name[len(self.root) + 1:])
1900 return self.pathto(fp.name[len(self.root) + 1:])
1900
1901
1901 # used to avoid circular references so destructors work
1902 # used to avoid circular references so destructors work
1902 def aftertrans(files):
1903 def aftertrans(files):
1903 renamefiles = [tuple(t) for t in files]
1904 renamefiles = [tuple(t) for t in files]
1904 def a():
1905 def a():
1905 for vfs, src, dest in renamefiles:
1906 for vfs, src, dest in renamefiles:
1906 try:
1907 try:
1907 vfs.rename(src, dest)
1908 vfs.rename(src, dest)
1908 except OSError: # journal file does not yet exist
1909 except OSError: # journal file does not yet exist
1909 pass
1910 pass
1910 return a
1911 return a
1911
1912
1912 def undoname(fn):
1913 def undoname(fn):
1913 base, name = os.path.split(fn)
1914 base, name = os.path.split(fn)
1914 assert name.startswith('journal')
1915 assert name.startswith('journal')
1915 return os.path.join(base, name.replace('journal', 'undo', 1))
1916 return os.path.join(base, name.replace('journal', 'undo', 1))
1916
1917
1917 def instance(ui, path, create):
1918 def instance(ui, path, create):
1918 return localrepository(ui, util.urllocalpath(path), create)
1919 return localrepository(ui, util.urllocalpath(path), create)
1919
1920
1920 def islocal(path):
1921 def islocal(path):
1921 return True
1922 return True
@@ -1,158 +1,154 b''
1 Test changesets filtering during exchanges (some tests are still in
1 Test changesets filtering during exchanges (some tests are still in
2 test-obsolete.t)
2 test-obsolete.t)
3
3
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > evolution=createmarkers
6 > evolution=createmarkers
7 > EOF
7 > EOF
8
8
9 Push does not corrupt remote
9 Push does not corrupt remote
10 ----------------------------
10 ----------------------------
11
11
12 Create a DAG where a changeset reuses a revision from a file first used in an
12 Create a DAG where a changeset reuses a revision from a file first used in an
13 extinct changeset.
13 extinct changeset.
14
14
15 $ hg init local
15 $ hg init local
16 $ cd local
16 $ cd local
17 $ echo 'base' > base
17 $ echo 'base' > base
18 $ hg commit -Am base
18 $ hg commit -Am base
19 adding base
19 adding base
20 $ echo 'A' > A
20 $ echo 'A' > A
21 $ hg commit -Am A
21 $ hg commit -Am A
22 adding A
22 adding A
23 $ hg up 0
23 $ hg up 0
24 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
24 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
25 $ hg revert -ar 1
25 $ hg revert -ar 1
26 adding A
26 adding A
27 $ hg commit -Am "A'"
27 $ hg commit -Am "A'"
28 created new head
28 created new head
29 $ hg log -G --template='{desc} {node}'
29 $ hg log -G --template='{desc} {node}'
30 @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
30 @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
31 |
31 |
32 | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a
32 | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a
33 |/
33 |/
34 o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
34 o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
35
35
36 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
36 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
37
37
38 Push it. The bundle should not refer to the extinct changeset.
38 Push it. The bundle should not refer to the extinct changeset.
39
39
40 $ hg init ../other
40 $ hg init ../other
41 $ hg push ../other
41 $ hg push ../other
42 pushing to ../other
42 pushing to ../other
43 searching for changes
43 searching for changes
44 adding changesets
44 adding changesets
45 adding manifests
45 adding manifests
46 adding file changes
46 adding file changes
47 added 2 changesets with 2 changes to 2 files
47 added 2 changesets with 2 changes to 2 files
48 $ hg -R ../other verify
48 $ hg -R ../other verify
49 checking changesets
49 checking changesets
50 checking manifests
50 checking manifests
51 crosschecking files in changesets and manifests
51 crosschecking files in changesets and manifests
52 checking files
52 checking files
53 2 files, 2 changesets, 2 total revisions
53 2 files, 2 changesets, 2 total revisions
54
54
55 Adding a changeset going extinct locally
55 Adding a changeset going extinct locally
56 ------------------------------------------
56 ------------------------------------------
57
57
58 Pull a changeset that will immediatly goes extinct (because you already have a
58 Pull a changeset that will immediatly goes extinct (because you already have a
59 marker to obsolete him)
59 marker to obsolete him)
60 (test resolution of issue3788)
60 (test resolution of issue3788)
61
61
62 $ hg phase --draft --force f89bcc95eba5
62 $ hg phase --draft --force f89bcc95eba5
63 $ hg phase -R ../other --draft --force f89bcc95eba5
63 $ hg phase -R ../other --draft --force f89bcc95eba5
64 $ hg commit --amend -m "A''"
64 $ hg commit --amend -m "A''"
65 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
65 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
66 $ hg pull ../other
66 $ hg pull ../other
67 pulling from ../other
67 pulling from ../other
68 searching for changes
68 searching for changes
69 adding changesets
69 adding changesets
70 adding manifests
70 adding manifests
71 adding file changes
71 adding file changes
72 added 1 changesets with 0 changes to 1 files (+1 heads)
72 added 1 changesets with 0 changes to 1 files (+1 heads)
73 (run 'hg heads' to see heads, 'hg merge' to merge)
73 (run 'hg heads' to see heads, 'hg merge' to merge)
74
74
75 check that bundle is not affected
75 check that bundle is not affected
76
76
77 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg
77 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg
78 1 changesets found
78 1 changesets found
79 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
79 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
80 $ hg unbundle ../f89bcc95eba5.hg
80 $ hg unbundle ../f89bcc95eba5.hg
81 adding changesets
81 adding changesets
82 adding manifests
82 adding manifests
83 adding file changes
83 adding file changes
84 added 1 changesets with 0 changes to 1 files (+1 heads)
84 added 1 changesets with 0 changes to 1 files (+1 heads)
85 (run 'hg heads' to see heads)
85 (run 'hg heads' to see heads)
86 $ cd ..
86 $ cd ..
87
87
88 pull does not fetch excessive changesets when common node is hidden (issue4982)
88 pull does not fetch excessive changesets when common node is hidden (issue4982)
89 -------------------------------------------------------------------------------
89 -------------------------------------------------------------------------------
90
90
91 initial repo with server and client matching
91 initial repo with server and client matching
92
92
93 $ hg init pull-hidden-common
93 $ hg init pull-hidden-common
94 $ cd pull-hidden-common
94 $ cd pull-hidden-common
95 $ touch foo
95 $ touch foo
96 $ hg -q commit -A -m initial
96 $ hg -q commit -A -m initial
97 $ echo 1 > foo
97 $ echo 1 > foo
98 $ hg commit -m 1
98 $ hg commit -m 1
99 $ echo 2a > foo
99 $ echo 2a > foo
100 $ hg commit -m 2a
100 $ hg commit -m 2a
101 $ cd ..
101 $ cd ..
102 $ hg clone --pull pull-hidden-common pull-hidden-common-client
102 $ hg clone --pull pull-hidden-common pull-hidden-common-client
103 requesting all changes
103 requesting all changes
104 adding changesets
104 adding changesets
105 adding manifests
105 adding manifests
106 adding file changes
106 adding file changes
107 added 3 changesets with 3 changes to 1 files
107 added 3 changesets with 3 changes to 1 files
108 updating to branch default
108 updating to branch default
109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110
110
111 server obsoletes the old head
111 server obsoletes the old head
112
112
113 $ cd pull-hidden-common
113 $ cd pull-hidden-common
114 $ hg -q up -r 1
114 $ hg -q up -r 1
115 $ echo 2b > foo
115 $ echo 2b > foo
116 $ hg -q commit -m 2b
116 $ hg -q commit -m 2b
117 $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
117 $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
118 $ cd ..
118 $ cd ..
119
119
120 client only pulls down 1 changeset
120 client only pulls down 1 changeset
121 ("all local heads known remotely" may change if the wire protocol discovery
122 commands ever stop saying they have hidden changesets)
123
121
124 $ cd pull-hidden-common-client
122 $ cd pull-hidden-common-client
125 $ hg pull --debug
123 $ hg pull --debug
126 pulling from $TESTTMP/pull-hidden-common (glob)
124 pulling from $TESTTMP/pull-hidden-common (glob)
127 query 1; heads
125 query 1; heads
128 searching for changes
126 searching for changes
129 all local heads known remotely
127 taking quick initial sample
130 3 changesets found
128 query 2; still undecided: 2, sample size is: 2
129 2 total queries
130 1 changesets found
131 list of changesets:
131 list of changesets:
132 96ee1d7354c4ad7372047672c36a1f561e3a6a4c
133 a33779fdfc23063680fc31e9ff637dff6876d3d2
134 bec0734cd68e84477ba7fc1d13e6cff53ab70129
132 bec0734cd68e84477ba7fc1d13e6cff53ab70129
135 listing keys for "phase"
133 listing keys for "phase"
136 listing keys for "bookmarks"
134 listing keys for "bookmarks"
137 bundle2-output-bundle: "HG20", 3 parts total
135 bundle2-output-bundle: "HG20", 3 parts total
138 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
136 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
139 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
137 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
140 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
138 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
141 bundle2-input-bundle: with-transaction
139 bundle2-input-bundle: with-transaction
142 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
140 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
143 adding changesets
141 adding changesets
144 add changeset 96ee1d7354c4
145 add changeset a33779fdfc23
146 add changeset bec0734cd68e
142 add changeset bec0734cd68e
147 adding manifests
143 adding manifests
148 adding file changes
144 adding file changes
149 adding foo revisions
145 adding foo revisions
150 added 1 changesets with 1 changes to 1 files (+1 heads)
146 added 1 changesets with 1 changes to 1 files (+1 heads)
151 bundle2-input-part: total payload size 1378
147 bundle2-input-part: total payload size 474
152 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
148 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
153 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
149 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
154 bundle2-input-bundle: 2 parts total
150 bundle2-input-bundle: 2 parts total
155 checking for updated bookmarks
151 checking for updated bookmarks
156 listing keys for "phases"
152 listing keys for "phases"
157 updating the branch cache
153 updating the branch cache
158 (run 'hg heads' to see heads, 'hg merge' to merge)
154 (run 'hg heads' to see heads, 'hg merge' to merge)
@@ -1,977 +1,975 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [phases]
2 > [phases]
3 > # public changeset are not obsolete
3 > # public changeset are not obsolete
4 > publish=false
4 > publish=false
5 > [ui]
5 > [ui]
6 > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
6 > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
7 > [experimental]
7 > [experimental]
8 > # drop me once bundle2 is the default,
8 > # drop me once bundle2 is the default,
9 > # added to get test change early.
9 > # added to get test change early.
10 > bundle2-exp = True
10 > bundle2-exp = True
11 > EOF
11 > EOF
12 $ mkcommit() {
12 $ mkcommit() {
13 > echo "$1" > "$1"
13 > echo "$1" > "$1"
14 > hg add "$1"
14 > hg add "$1"
15 > hg ci -m "add $1"
15 > hg ci -m "add $1"
16 > }
16 > }
17 $ getid() {
17 $ getid() {
18 > hg log -T "{node}\n" --hidden -r "desc('$1')"
18 > hg log -T "{node}\n" --hidden -r "desc('$1')"
19 > }
19 > }
20
20
21 $ cat > debugkeys.py <<EOF
21 $ cat > debugkeys.py <<EOF
22 > def reposetup(ui, repo):
22 > def reposetup(ui, repo):
23 > class debugkeysrepo(repo.__class__):
23 > class debugkeysrepo(repo.__class__):
24 > def listkeys(self, namespace):
24 > def listkeys(self, namespace):
25 > ui.write('listkeys %s\n' % (namespace,))
25 > ui.write('listkeys %s\n' % (namespace,))
26 > return super(debugkeysrepo, self).listkeys(namespace)
26 > return super(debugkeysrepo, self).listkeys(namespace)
27 >
27 >
28 > if repo.local():
28 > if repo.local():
29 > repo.__class__ = debugkeysrepo
29 > repo.__class__ = debugkeysrepo
30 > EOF
30 > EOF
31
31
32 $ hg init tmpa
32 $ hg init tmpa
33 $ cd tmpa
33 $ cd tmpa
34 $ mkcommit kill_me
34 $ mkcommit kill_me
35
35
36 Checking that the feature is properly disabled
36 Checking that the feature is properly disabled
37
37
38 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
38 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
39 abort: creating obsolete markers is not enabled on this repo
39 abort: creating obsolete markers is not enabled on this repo
40 [255]
40 [255]
41
41
42 Enabling it
42 Enabling it
43
43
44 $ cat >> $HGRCPATH << EOF
44 $ cat >> $HGRCPATH << EOF
45 > [experimental]
45 > [experimental]
46 > evolution=createmarkers,exchange
46 > evolution=createmarkers,exchange
47 > EOF
47 > EOF
48
48
49 Killing a single changeset without replacement
49 Killing a single changeset without replacement
50
50
51 $ hg debugobsolete 0
51 $ hg debugobsolete 0
52 abort: changeset references must be full hexadecimal node identifiers
52 abort: changeset references must be full hexadecimal node identifiers
53 [255]
53 [255]
54 $ hg debugobsolete '00'
54 $ hg debugobsolete '00'
55 abort: changeset references must be full hexadecimal node identifiers
55 abort: changeset references must be full hexadecimal node identifiers
56 [255]
56 [255]
57 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
57 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
58 $ hg debugobsolete
58 $ hg debugobsolete
59 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
59 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
60
60
61 (test that mercurial is not confused)
61 (test that mercurial is not confused)
62
62
63 $ hg up null --quiet # having 0 as parent prevents it to be hidden
63 $ hg up null --quiet # having 0 as parent prevents it to be hidden
64 $ hg tip
64 $ hg tip
65 -1:000000000000 (public) [tip ]
65 -1:000000000000 (public) [tip ]
66 $ hg up --hidden tip --quiet
66 $ hg up --hidden tip --quiet
67
67
68 Killing a single changeset with itself should fail
68 Killing a single changeset with itself should fail
69 (simple local safeguard)
69 (simple local safeguard)
70
70
71 $ hg debugobsolete `getid kill_me` `getid kill_me`
71 $ hg debugobsolete `getid kill_me` `getid kill_me`
72 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
72 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
73 [255]
73 [255]
74
74
75 $ cd ..
75 $ cd ..
76
76
77 Killing a single changeset with replacement
77 Killing a single changeset with replacement
78 (and testing the format option)
78 (and testing the format option)
79
79
80 $ hg init tmpb
80 $ hg init tmpb
81 $ cd tmpb
81 $ cd tmpb
82 $ mkcommit a
82 $ mkcommit a
83 $ mkcommit b
83 $ mkcommit b
84 $ mkcommit original_c
84 $ mkcommit original_c
85 $ hg up "desc('b')"
85 $ hg up "desc('b')"
86 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
86 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
87 $ mkcommit new_c
87 $ mkcommit new_c
88 created new head
88 created new head
89 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
89 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
90 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
90 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
91 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
91 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
92 2:245bde4270cd add original_c
92 2:245bde4270cd add original_c
93 $ hg debugrevlog -cd
93 $ hg debugrevlog -cd
94 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
94 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
95 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
95 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
96 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
96 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
97 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
97 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
98 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
98 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
99 $ hg debugobsolete
99 $ hg debugobsolete
100 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
100 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
101
101
102 (check for version number of the obsstore)
102 (check for version number of the obsstore)
103
103
104 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
104 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
105 \x00 (no-eol) (esc)
105 \x00 (no-eol) (esc)
106
106
107 do it again (it read the obsstore before adding new changeset)
107 do it again (it read the obsstore before adding new changeset)
108
108
109 $ hg up '.^'
109 $ hg up '.^'
110 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
110 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
111 $ mkcommit new_2_c
111 $ mkcommit new_2_c
112 created new head
112 created new head
113 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
113 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
114 $ hg debugobsolete
114 $ hg debugobsolete
115 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
115 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
116 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
116 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
117
117
118 Register two markers with a missing node
118 Register two markers with a missing node
119
119
120 $ hg up '.^'
120 $ hg up '.^'
121 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
121 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
122 $ mkcommit new_3_c
122 $ mkcommit new_3_c
123 created new head
123 created new head
124 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
124 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
125 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
125 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
126 $ hg debugobsolete
126 $ hg debugobsolete
127 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
127 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
128 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
128 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
129 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
129 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
130 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
130 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
131
131
132 Refuse pathological nullid successors
132 Refuse pathological nullid successors
133 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
133 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
134 transaction abort!
134 transaction abort!
135 rollback completed
135 rollback completed
136 abort: bad obsolescence marker detected: invalid successors nullid
136 abort: bad obsolescence marker detected: invalid successors nullid
137 [255]
137 [255]
138
138
139 Check that graphlog detect that a changeset is obsolete:
139 Check that graphlog detect that a changeset is obsolete:
140
140
141 $ hg log -G
141 $ hg log -G
142 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
142 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
143 |
143 |
144 o 1:7c3bad9141dc (draft) [ ] add b
144 o 1:7c3bad9141dc (draft) [ ] add b
145 |
145 |
146 o 0:1f0dee641bb7 (draft) [ ] add a
146 o 0:1f0dee641bb7 (draft) [ ] add a
147
147
148
148
149 check that heads does not report them
149 check that heads does not report them
150
150
151 $ hg heads
151 $ hg heads
152 5:5601fb93a350 (draft) [tip ] add new_3_c
152 5:5601fb93a350 (draft) [tip ] add new_3_c
153 $ hg heads --hidden
153 $ hg heads --hidden
154 5:5601fb93a350 (draft) [tip ] add new_3_c
154 5:5601fb93a350 (draft) [tip ] add new_3_c
155 4:ca819180edb9 (draft) [ ] add new_2_c
155 4:ca819180edb9 (draft) [ ] add new_2_c
156 3:cdbce2fbb163 (draft) [ ] add new_c
156 3:cdbce2fbb163 (draft) [ ] add new_c
157 2:245bde4270cd (draft) [ ] add original_c
157 2:245bde4270cd (draft) [ ] add original_c
158
158
159
159
160 check that summary does not report them
160 check that summary does not report them
161
161
162 $ hg init ../sink
162 $ hg init ../sink
163 $ echo '[paths]' >> .hg/hgrc
163 $ echo '[paths]' >> .hg/hgrc
164 $ echo 'default=../sink' >> .hg/hgrc
164 $ echo 'default=../sink' >> .hg/hgrc
165 $ hg summary --remote
165 $ hg summary --remote
166 parent: 5:5601fb93a350 tip
166 parent: 5:5601fb93a350 tip
167 add new_3_c
167 add new_3_c
168 branch: default
168 branch: default
169 commit: (clean)
169 commit: (clean)
170 update: (current)
170 update: (current)
171 phases: 3 draft
171 phases: 3 draft
172 remote: 3 outgoing
172 remote: 3 outgoing
173
173
174 $ hg summary --remote --hidden
174 $ hg summary --remote --hidden
175 parent: 5:5601fb93a350 tip
175 parent: 5:5601fb93a350 tip
176 add new_3_c
176 add new_3_c
177 branch: default
177 branch: default
178 commit: (clean)
178 commit: (clean)
179 update: 3 new changesets, 4 branch heads (merge)
179 update: 3 new changesets, 4 branch heads (merge)
180 phases: 6 draft
180 phases: 6 draft
181 remote: 3 outgoing
181 remote: 3 outgoing
182
182
183 check that various commands work well with filtering
183 check that various commands work well with filtering
184
184
185 $ hg tip
185 $ hg tip
186 5:5601fb93a350 (draft) [tip ] add new_3_c
186 5:5601fb93a350 (draft) [tip ] add new_3_c
187 $ hg log -r 6
187 $ hg log -r 6
188 abort: unknown revision '6'!
188 abort: unknown revision '6'!
189 [255]
189 [255]
190 $ hg log -r 4
190 $ hg log -r 4
191 abort: hidden revision '4'!
191 abort: hidden revision '4'!
192 (use --hidden to access hidden revisions)
192 (use --hidden to access hidden revisions)
193 [255]
193 [255]
194 $ hg debugrevspec 'rev(6)'
194 $ hg debugrevspec 'rev(6)'
195 $ hg debugrevspec 'rev(4)'
195 $ hg debugrevspec 'rev(4)'
196 $ hg debugrevspec 'null'
196 $ hg debugrevspec 'null'
197 -1
197 -1
198
198
199 Check that public changeset are not accounted as obsolete:
199 Check that public changeset are not accounted as obsolete:
200
200
201 $ hg --hidden phase --public 2
201 $ hg --hidden phase --public 2
202 $ hg log -G
202 $ hg log -G
203 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
203 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
204 |
204 |
205 | o 2:245bde4270cd (public) [ ] add original_c
205 | o 2:245bde4270cd (public) [ ] add original_c
206 |/
206 |/
207 o 1:7c3bad9141dc (public) [ ] add b
207 o 1:7c3bad9141dc (public) [ ] add b
208 |
208 |
209 o 0:1f0dee641bb7 (public) [ ] add a
209 o 0:1f0dee641bb7 (public) [ ] add a
210
210
211
211
212 And that bumped changeset are detected
212 And that bumped changeset are detected
213 --------------------------------------
213 --------------------------------------
214
214
215 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
215 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
216 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
216 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
217 the public changeset
217 the public changeset
218
218
219 $ hg log --hidden -r 'bumped()'
219 $ hg log --hidden -r 'bumped()'
220 5:5601fb93a350 (draft) [tip ] add new_3_c
220 5:5601fb93a350 (draft) [tip ] add new_3_c
221
221
222 And that we can't push bumped changeset
222 And that we can't push bumped changeset
223
223
224 $ hg push ../tmpa -r 0 --force #(make repo related)
224 $ hg push ../tmpa -r 0 --force #(make repo related)
225 pushing to ../tmpa
225 pushing to ../tmpa
226 searching for changes
226 searching for changes
227 warning: repository is unrelated
227 warning: repository is unrelated
228 adding changesets
228 adding changesets
229 adding manifests
229 adding manifests
230 adding file changes
230 adding file changes
231 added 1 changesets with 1 changes to 1 files (+1 heads)
231 added 1 changesets with 1 changes to 1 files (+1 heads)
232 $ hg push ../tmpa
232 $ hg push ../tmpa
233 pushing to ../tmpa
233 pushing to ../tmpa
234 searching for changes
234 searching for changes
235 abort: push includes bumped changeset: 5601fb93a350!
235 abort: push includes bumped changeset: 5601fb93a350!
236 [255]
236 [255]
237
237
238 Fixing "bumped" situation
238 Fixing "bumped" situation
239 We need to create a clone of 5 and add a special marker with a flag
239 We need to create a clone of 5 and add a special marker with a flag
240
240
241 $ hg up '5^'
241 $ hg up '5^'
242 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
242 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
243 $ hg revert -ar 5
243 $ hg revert -ar 5
244 adding new_3_c
244 adding new_3_c
245 $ hg ci -m 'add n3w_3_c'
245 $ hg ci -m 'add n3w_3_c'
246 created new head
246 created new head
247 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
247 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
248 $ hg log -r 'bumped()'
248 $ hg log -r 'bumped()'
249 $ hg log -G
249 $ hg log -G
250 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
250 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
251 |
251 |
252 | o 2:245bde4270cd (public) [ ] add original_c
252 | o 2:245bde4270cd (public) [ ] add original_c
253 |/
253 |/
254 o 1:7c3bad9141dc (public) [ ] add b
254 o 1:7c3bad9141dc (public) [ ] add b
255 |
255 |
256 o 0:1f0dee641bb7 (public) [ ] add a
256 o 0:1f0dee641bb7 (public) [ ] add a
257
257
258
258
259 $ cd ..
259 $ cd ..
260
260
261 Revision 0 is hidden
261 Revision 0 is hidden
262 --------------------
262 --------------------
263
263
264 $ hg init rev0hidden
264 $ hg init rev0hidden
265 $ cd rev0hidden
265 $ cd rev0hidden
266
266
267 $ mkcommit kill0
267 $ mkcommit kill0
268 $ hg up -q null
268 $ hg up -q null
269 $ hg debugobsolete `getid kill0`
269 $ hg debugobsolete `getid kill0`
270 $ mkcommit a
270 $ mkcommit a
271 $ mkcommit b
271 $ mkcommit b
272
272
273 Should pick the first visible revision as "repo" node
273 Should pick the first visible revision as "repo" node
274
274
275 $ hg archive ../archive-null
275 $ hg archive ../archive-null
276 $ cat ../archive-null/.hg_archival.txt
276 $ cat ../archive-null/.hg_archival.txt
277 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
277 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
278 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
278 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
279 branch: default
279 branch: default
280 latesttag: null
280 latesttag: null
281 latesttagdistance: 2
281 latesttagdistance: 2
282 changessincelatesttag: 2
282 changessincelatesttag: 2
283
283
284
284
285 $ cd ..
285 $ cd ..
286
286
287 Exchange Test
287 Exchange Test
288 ============================
288 ============================
289
289
290 Destination repo does not have any data
290 Destination repo does not have any data
291 ---------------------------------------
291 ---------------------------------------
292
292
293 Simple incoming test
293 Simple incoming test
294
294
295 $ hg init tmpc
295 $ hg init tmpc
296 $ cd tmpc
296 $ cd tmpc
297 $ hg incoming ../tmpb
297 $ hg incoming ../tmpb
298 comparing with ../tmpb
298 comparing with ../tmpb
299 0:1f0dee641bb7 (public) [ ] add a
299 0:1f0dee641bb7 (public) [ ] add a
300 1:7c3bad9141dc (public) [ ] add b
300 1:7c3bad9141dc (public) [ ] add b
301 2:245bde4270cd (public) [ ] add original_c
301 2:245bde4270cd (public) [ ] add original_c
302 6:6f9641995072 (draft) [tip ] add n3w_3_c
302 6:6f9641995072 (draft) [tip ] add n3w_3_c
303
303
304 Try to pull markers
304 Try to pull markers
305 (extinct changeset are excluded but marker are pushed)
305 (extinct changeset are excluded but marker are pushed)
306
306
307 $ hg pull ../tmpb
307 $ hg pull ../tmpb
308 pulling from ../tmpb
308 pulling from ../tmpb
309 requesting all changes
309 requesting all changes
310 adding changesets
310 adding changesets
311 adding manifests
311 adding manifests
312 adding file changes
312 adding file changes
313 added 4 changesets with 4 changes to 4 files (+1 heads)
313 added 4 changesets with 4 changes to 4 files (+1 heads)
314 5 new obsolescence markers
314 5 new obsolescence markers
315 (run 'hg heads' to see heads, 'hg merge' to merge)
315 (run 'hg heads' to see heads, 'hg merge' to merge)
316 $ hg debugobsolete
316 $ hg debugobsolete
317 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
317 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
318 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
318 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
319 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
319 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
320 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
320 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
321 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
321 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
322
322
323 Rollback//Transaction support
323 Rollback//Transaction support
324
324
325 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
325 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
326 $ hg debugobsolete
326 $ hg debugobsolete
327 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
327 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
328 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
328 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
329 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
329 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
330 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
330 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
331 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
331 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
332 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
332 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
333 $ hg rollback -n
333 $ hg rollback -n
334 repository tip rolled back to revision 3 (undo debugobsolete)
334 repository tip rolled back to revision 3 (undo debugobsolete)
335 $ hg rollback
335 $ hg rollback
336 repository tip rolled back to revision 3 (undo debugobsolete)
336 repository tip rolled back to revision 3 (undo debugobsolete)
337 $ hg debugobsolete
337 $ hg debugobsolete
338 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
338 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
339 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
339 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
340 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
340 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
341 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
341 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
342 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
342 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
343
343
344 $ cd ..
344 $ cd ..
345
345
346 Try to push markers
346 Try to push markers
347
347
348 $ hg init tmpd
348 $ hg init tmpd
349 $ hg -R tmpb push tmpd
349 $ hg -R tmpb push tmpd
350 pushing to tmpd
350 pushing to tmpd
351 searching for changes
351 searching for changes
352 adding changesets
352 adding changesets
353 adding manifests
353 adding manifests
354 adding file changes
354 adding file changes
355 added 4 changesets with 4 changes to 4 files (+1 heads)
355 added 4 changesets with 4 changes to 4 files (+1 heads)
356 5 new obsolescence markers
356 5 new obsolescence markers
357 $ hg -R tmpd debugobsolete | sort
357 $ hg -R tmpd debugobsolete | sort
358 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
358 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
359 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
359 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
360 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
360 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
361 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
361 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
362 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
362 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
363
363
364 Check obsolete keys are exchanged only if source has an obsolete store
364 Check obsolete keys are exchanged only if source has an obsolete store
365
365
366 $ hg init empty
366 $ hg init empty
367 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
367 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
368 pushing to tmpd
368 pushing to tmpd
369 listkeys phases
369 listkeys phases
370 listkeys bookmarks
370 listkeys bookmarks
371 no changes found
371 no changes found
372 listkeys phases
372 listkeys phases
373 [1]
373 [1]
374
374
375 clone support
375 clone support
376 (markers are copied and extinct changesets are included to allow hardlinks)
376 (markers are copied and extinct changesets are included to allow hardlinks)
377
377
378 $ hg clone tmpb clone-dest
378 $ hg clone tmpb clone-dest
379 updating to branch default
379 updating to branch default
380 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
380 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
381 $ hg -R clone-dest log -G --hidden
381 $ hg -R clone-dest log -G --hidden
382 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
382 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
383 |
383 |
384 | x 5:5601fb93a350 (draft) [ ] add new_3_c
384 | x 5:5601fb93a350 (draft) [ ] add new_3_c
385 |/
385 |/
386 | x 4:ca819180edb9 (draft) [ ] add new_2_c
386 | x 4:ca819180edb9 (draft) [ ] add new_2_c
387 |/
387 |/
388 | x 3:cdbce2fbb163 (draft) [ ] add new_c
388 | x 3:cdbce2fbb163 (draft) [ ] add new_c
389 |/
389 |/
390 | o 2:245bde4270cd (public) [ ] add original_c
390 | o 2:245bde4270cd (public) [ ] add original_c
391 |/
391 |/
392 o 1:7c3bad9141dc (public) [ ] add b
392 o 1:7c3bad9141dc (public) [ ] add b
393 |
393 |
394 o 0:1f0dee641bb7 (public) [ ] add a
394 o 0:1f0dee641bb7 (public) [ ] add a
395
395
396 $ hg -R clone-dest debugobsolete
396 $ hg -R clone-dest debugobsolete
397 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
397 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
398 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
398 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
399 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
399 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
400 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
400 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
401 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
401 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
402
402
403
403
404 Destination repo have existing data
404 Destination repo have existing data
405 ---------------------------------------
405 ---------------------------------------
406
406
407 On pull
407 On pull
408
408
409 $ hg init tmpe
409 $ hg init tmpe
410 $ cd tmpe
410 $ cd tmpe
411 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
411 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
412 $ hg pull ../tmpb
412 $ hg pull ../tmpb
413 pulling from ../tmpb
413 pulling from ../tmpb
414 requesting all changes
414 requesting all changes
415 adding changesets
415 adding changesets
416 adding manifests
416 adding manifests
417 adding file changes
417 adding file changes
418 added 4 changesets with 4 changes to 4 files (+1 heads)
418 added 4 changesets with 4 changes to 4 files (+1 heads)
419 5 new obsolescence markers
419 5 new obsolescence markers
420 (run 'hg heads' to see heads, 'hg merge' to merge)
420 (run 'hg heads' to see heads, 'hg merge' to merge)
421 $ hg debugobsolete
421 $ hg debugobsolete
422 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
422 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
423 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
423 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
424 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
424 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
425 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
425 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
426 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
426 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
427 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
427 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
428
428
429
429
430 On push
430 On push
431
431
432 $ hg push ../tmpc
432 $ hg push ../tmpc
433 pushing to ../tmpc
433 pushing to ../tmpc
434 searching for changes
434 searching for changes
435 no changes found
435 no changes found
436 1 new obsolescence markers
436 1 new obsolescence markers
437 [1]
437 [1]
438 $ hg -R ../tmpc debugobsolete
438 $ hg -R ../tmpc debugobsolete
439 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
439 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
440 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
440 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
441 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
441 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
442 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
442 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
443 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
443 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
444 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
444 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
445
445
446 detect outgoing obsolete and unstable
446 detect outgoing obsolete and unstable
447 ---------------------------------------
447 ---------------------------------------
448
448
449
449
450 $ hg log -G
450 $ hg log -G
451 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
451 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
452 |
452 |
453 | o 2:245bde4270cd (public) [ ] add original_c
453 | o 2:245bde4270cd (public) [ ] add original_c
454 |/
454 |/
455 o 1:7c3bad9141dc (public) [ ] add b
455 o 1:7c3bad9141dc (public) [ ] add b
456 |
456 |
457 o 0:1f0dee641bb7 (public) [ ] add a
457 o 0:1f0dee641bb7 (public) [ ] add a
458
458
459 $ hg up 'desc("n3w_3_c")'
459 $ hg up 'desc("n3w_3_c")'
460 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
460 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
461 $ mkcommit original_d
461 $ mkcommit original_d
462 $ mkcommit original_e
462 $ mkcommit original_e
463 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
463 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
464 $ hg debugobsolete | grep `getid original_d`
464 $ hg debugobsolete | grep `getid original_d`
465 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
465 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
466 $ hg log -r 'obsolete()'
466 $ hg log -r 'obsolete()'
467 4:94b33453f93b (draft) [ ] add original_d
467 4:94b33453f93b (draft) [ ] add original_d
468 $ hg log -G -r '::unstable()'
468 $ hg log -G -r '::unstable()'
469 @ 5:cda648ca50f5 (draft) [tip ] add original_e
469 @ 5:cda648ca50f5 (draft) [tip ] add original_e
470 |
470 |
471 x 4:94b33453f93b (draft) [ ] add original_d
471 x 4:94b33453f93b (draft) [ ] add original_d
472 |
472 |
473 o 3:6f9641995072 (draft) [ ] add n3w_3_c
473 o 3:6f9641995072 (draft) [ ] add n3w_3_c
474 |
474 |
475 o 1:7c3bad9141dc (public) [ ] add b
475 o 1:7c3bad9141dc (public) [ ] add b
476 |
476 |
477 o 0:1f0dee641bb7 (public) [ ] add a
477 o 0:1f0dee641bb7 (public) [ ] add a
478
478
479
479
480 refuse to push obsolete changeset
480 refuse to push obsolete changeset
481
481
482 $ hg push ../tmpc/ -r 'desc("original_d")'
482 $ hg push ../tmpc/ -r 'desc("original_d")'
483 pushing to ../tmpc/
483 pushing to ../tmpc/
484 searching for changes
484 searching for changes
485 abort: push includes obsolete changeset: 94b33453f93b!
485 abort: push includes obsolete changeset: 94b33453f93b!
486 [255]
486 [255]
487
487
488 refuse to push unstable changeset
488 refuse to push unstable changeset
489
489
490 $ hg push ../tmpc/
490 $ hg push ../tmpc/
491 pushing to ../tmpc/
491 pushing to ../tmpc/
492 searching for changes
492 searching for changes
493 abort: push includes unstable changeset: cda648ca50f5!
493 abort: push includes unstable changeset: cda648ca50f5!
494 [255]
494 [255]
495
495
496 Test that extinct changeset are properly detected
496 Test that extinct changeset are properly detected
497
497
498 $ hg log -r 'extinct()'
498 $ hg log -r 'extinct()'
499
499
500 Don't try to push extinct changeset
500 Don't try to push extinct changeset
501
501
502 $ hg init ../tmpf
502 $ hg init ../tmpf
503 $ hg out ../tmpf
503 $ hg out ../tmpf
504 comparing with ../tmpf
504 comparing with ../tmpf
505 searching for changes
505 searching for changes
506 0:1f0dee641bb7 (public) [ ] add a
506 0:1f0dee641bb7 (public) [ ] add a
507 1:7c3bad9141dc (public) [ ] add b
507 1:7c3bad9141dc (public) [ ] add b
508 2:245bde4270cd (public) [ ] add original_c
508 2:245bde4270cd (public) [ ] add original_c
509 3:6f9641995072 (draft) [ ] add n3w_3_c
509 3:6f9641995072 (draft) [ ] add n3w_3_c
510 4:94b33453f93b (draft) [ ] add original_d
510 4:94b33453f93b (draft) [ ] add original_d
511 5:cda648ca50f5 (draft) [tip ] add original_e
511 5:cda648ca50f5 (draft) [tip ] add original_e
512 $ hg push ../tmpf -f # -f because be push unstable too
512 $ hg push ../tmpf -f # -f because be push unstable too
513 pushing to ../tmpf
513 pushing to ../tmpf
514 searching for changes
514 searching for changes
515 adding changesets
515 adding changesets
516 adding manifests
516 adding manifests
517 adding file changes
517 adding file changes
518 added 6 changesets with 6 changes to 6 files (+1 heads)
518 added 6 changesets with 6 changes to 6 files (+1 heads)
519 7 new obsolescence markers
519 7 new obsolescence markers
520
520
521 no warning displayed
521 no warning displayed
522
522
523 $ hg push ../tmpf
523 $ hg push ../tmpf
524 pushing to ../tmpf
524 pushing to ../tmpf
525 searching for changes
525 searching for changes
526 no changes found
526 no changes found
527 [1]
527 [1]
528
528
529 Do not warn about new head when the new head is a successors of a remote one
529 Do not warn about new head when the new head is a successors of a remote one
530
530
531 $ hg log -G
531 $ hg log -G
532 @ 5:cda648ca50f5 (draft) [tip ] add original_e
532 @ 5:cda648ca50f5 (draft) [tip ] add original_e
533 |
533 |
534 x 4:94b33453f93b (draft) [ ] add original_d
534 x 4:94b33453f93b (draft) [ ] add original_d
535 |
535 |
536 o 3:6f9641995072 (draft) [ ] add n3w_3_c
536 o 3:6f9641995072 (draft) [ ] add n3w_3_c
537 |
537 |
538 | o 2:245bde4270cd (public) [ ] add original_c
538 | o 2:245bde4270cd (public) [ ] add original_c
539 |/
539 |/
540 o 1:7c3bad9141dc (public) [ ] add b
540 o 1:7c3bad9141dc (public) [ ] add b
541 |
541 |
542 o 0:1f0dee641bb7 (public) [ ] add a
542 o 0:1f0dee641bb7 (public) [ ] add a
543
543
544 $ hg up -q 'desc(n3w_3_c)'
544 $ hg up -q 'desc(n3w_3_c)'
545 $ mkcommit obsolete_e
545 $ mkcommit obsolete_e
546 created new head
546 created new head
547 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
547 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
548 $ hg outgoing ../tmpf # parasite hg outgoing testin
548 $ hg outgoing ../tmpf # parasite hg outgoing testin
549 comparing with ../tmpf
549 comparing with ../tmpf
550 searching for changes
550 searching for changes
551 6:3de5eca88c00 (draft) [tip ] add obsolete_e
551 6:3de5eca88c00 (draft) [tip ] add obsolete_e
552 $ hg push ../tmpf
552 $ hg push ../tmpf
553 pushing to ../tmpf
553 pushing to ../tmpf
554 searching for changes
554 searching for changes
555 adding changesets
555 adding changesets
556 adding manifests
556 adding manifests
557 adding file changes
557 adding file changes
558 added 1 changesets with 1 changes to 1 files (+1 heads)
558 added 1 changesets with 1 changes to 1 files (+1 heads)
559 1 new obsolescence markers
559 1 new obsolescence markers
560
560
561 test relevance computation
561 test relevance computation
562 ---------------------------------------
562 ---------------------------------------
563
563
564 Checking simple case of "marker relevance".
564 Checking simple case of "marker relevance".
565
565
566
566
567 Reminder of the repo situation
567 Reminder of the repo situation
568
568
569 $ hg log --hidden --graph
569 $ hg log --hidden --graph
570 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
570 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
571 |
571 |
572 | x 5:cda648ca50f5 (draft) [ ] add original_e
572 | x 5:cda648ca50f5 (draft) [ ] add original_e
573 | |
573 | |
574 | x 4:94b33453f93b (draft) [ ] add original_d
574 | x 4:94b33453f93b (draft) [ ] add original_d
575 |/
575 |/
576 o 3:6f9641995072 (draft) [ ] add n3w_3_c
576 o 3:6f9641995072 (draft) [ ] add n3w_3_c
577 |
577 |
578 | o 2:245bde4270cd (public) [ ] add original_c
578 | o 2:245bde4270cd (public) [ ] add original_c
579 |/
579 |/
580 o 1:7c3bad9141dc (public) [ ] add b
580 o 1:7c3bad9141dc (public) [ ] add b
581 |
581 |
582 o 0:1f0dee641bb7 (public) [ ] add a
582 o 0:1f0dee641bb7 (public) [ ] add a
583
583
584
584
585 List of all markers
585 List of all markers
586
586
587 $ hg debugobsolete
587 $ hg debugobsolete
588 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
588 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
589 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
589 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
590 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
590 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
591 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
591 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
592 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
592 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
593 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
593 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
594 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
594 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
595 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
595 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
596
596
597 List of changesets with no chain
597 List of changesets with no chain
598
598
599 $ hg debugobsolete --hidden --rev ::2
599 $ hg debugobsolete --hidden --rev ::2
600
600
601 List of changesets that are included on marker chain
601 List of changesets that are included on marker chain
602
602
603 $ hg debugobsolete --hidden --rev 6
603 $ hg debugobsolete --hidden --rev 6
604 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
604 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
605
605
606 List of changesets with a longer chain, (including a pruned children)
606 List of changesets with a longer chain, (including a pruned children)
607
607
608 $ hg debugobsolete --hidden --rev 3
608 $ hg debugobsolete --hidden --rev 3
609 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
609 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
610 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
610 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
611 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
611 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
612 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
612 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
613 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
613 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
614 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
614 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
615 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
615 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
616
616
617 List of both
617 List of both
618
618
619 $ hg debugobsolete --hidden --rev 3::6
619 $ hg debugobsolete --hidden --rev 3::6
620 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
620 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
621 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
621 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
622 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
622 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
623 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
623 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
624 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
624 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
625 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
625 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
626 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
626 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
627 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
627 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
628
628
629 #if serve
629 #if serve
630
630
631 Test the debug output for exchange
631 Test the debug output for exchange
632 ----------------------------------
632 ----------------------------------
633
633
634 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' --config 'experimental.bundle2-exp=True'
634 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' --config 'experimental.bundle2-exp=True'
635 pulling from ../tmpb
635 pulling from ../tmpb
636 searching for changes
636 searching for changes
637 no changes found
637 no changes found
638 obsmarker-exchange: 346 bytes received
638 obsmarker-exchange: 346 bytes received
639
639
640 check hgweb does not explode
640 check hgweb does not explode
641 ====================================
641 ====================================
642
642
643 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
643 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
644 adding changesets
644 adding changesets
645 adding manifests
645 adding manifests
646 adding file changes
646 adding file changes
647 added 62 changesets with 63 changes to 9 files (+60 heads)
647 added 62 changesets with 63 changes to 9 files (+60 heads)
648 (run 'hg heads .' to see heads, 'hg merge' to merge)
648 (run 'hg heads .' to see heads, 'hg merge' to merge)
649 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
649 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
650 > do
650 > do
651 > hg debugobsolete $node
651 > hg debugobsolete $node
652 > done
652 > done
653 $ hg up tip
653 $ hg up tip
654 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
654 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
655
655
656 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
656 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
657 $ cat hg.pid >> $DAEMON_PIDS
657 $ cat hg.pid >> $DAEMON_PIDS
658
658
659 check changelog view
659 check changelog view
660
660
661 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
661 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
662 200 Script output follows
662 200 Script output follows
663
663
664 check graph view
664 check graph view
665
665
666 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
666 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
667 200 Script output follows
667 200 Script output follows
668
668
669 check filelog view
669 check filelog view
670
670
671 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
671 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
672 200 Script output follows
672 200 Script output follows
673
673
674 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
674 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
675 200 Script output follows
675 200 Script output follows
676 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
676 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
677 404 Not Found
677 404 Not Found
678 [1]
678 [1]
679
679
680 check that web.view config option:
680 check that web.view config option:
681
681
682 $ killdaemons.py hg.pid
682 $ killdaemons.py hg.pid
683 $ cat >> .hg/hgrc << EOF
683 $ cat >> .hg/hgrc << EOF
684 > [web]
684 > [web]
685 > view=all
685 > view=all
686 > EOF
686 > EOF
687 $ wait
687 $ wait
688 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
688 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
689 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
689 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
690 200 Script output follows
690 200 Script output follows
691 $ killdaemons.py hg.pid
691 $ killdaemons.py hg.pid
692
692
693 Checking _enable=False warning if obsolete marker exists
693 Checking _enable=False warning if obsolete marker exists
694
694
695 $ echo '[experimental]' >> $HGRCPATH
695 $ echo '[experimental]' >> $HGRCPATH
696 $ echo "evolution=" >> $HGRCPATH
696 $ echo "evolution=" >> $HGRCPATH
697 $ hg log -r tip
697 $ hg log -r tip
698 obsolete feature not enabled but 68 markers found!
698 obsolete feature not enabled but 68 markers found!
699 68:c15e9edfca13 (draft) [tip ] add celestine
699 68:c15e9edfca13 (draft) [tip ] add celestine
700
700
701 reenable for later test
701 reenable for later test
702
702
703 $ echo '[experimental]' >> $HGRCPATH
703 $ echo '[experimental]' >> $HGRCPATH
704 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
704 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
705
705
706 #endif
706 #endif
707
707
708 Test incoming/outcoming with changesets obsoleted remotely, known locally
708 Test incoming/outcoming with changesets obsoleted remotely, known locally
709 ===============================================================================
709 ===============================================================================
710
710
711 This test issue 3805
711 This test issue 3805
712
712
713 $ hg init repo-issue3805
713 $ hg init repo-issue3805
714 $ cd repo-issue3805
714 $ cd repo-issue3805
715 $ echo "base" > base
715 $ echo "base" > base
716 $ hg ci -Am "base"
716 $ hg ci -Am "base"
717 adding base
717 adding base
718 $ echo "foo" > foo
718 $ echo "foo" > foo
719 $ hg ci -Am "A"
719 $ hg ci -Am "A"
720 adding foo
720 adding foo
721 $ hg clone . ../other-issue3805
721 $ hg clone . ../other-issue3805
722 updating to branch default
722 updating to branch default
723 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
723 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
724 $ echo "bar" >> foo
724 $ echo "bar" >> foo
725 $ hg ci --amend
725 $ hg ci --amend
726 $ cd ../other-issue3805
726 $ cd ../other-issue3805
727 $ hg log -G
727 $ hg log -G
728 @ 1:29f0c6921ddd (draft) [tip ] A
728 @ 1:29f0c6921ddd (draft) [tip ] A
729 |
729 |
730 o 0:d20a80d4def3 (draft) [ ] base
730 o 0:d20a80d4def3 (draft) [ ] base
731
731
732 $ hg log -G -R ../repo-issue3805
732 $ hg log -G -R ../repo-issue3805
733 @ 3:323a9c3ddd91 (draft) [tip ] A
733 @ 3:323a9c3ddd91 (draft) [tip ] A
734 |
734 |
735 o 0:d20a80d4def3 (draft) [ ] base
735 o 0:d20a80d4def3 (draft) [ ] base
736
736
737 $ hg incoming
737 $ hg incoming
738 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
738 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
739 searching for changes
739 searching for changes
740 3:323a9c3ddd91 (draft) [tip ] A
740 3:323a9c3ddd91 (draft) [tip ] A
741 $ hg incoming --bundle ../issue3805.hg
741 $ hg incoming --bundle ../issue3805.hg
742 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
742 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
743 searching for changes
743 searching for changes
744 3:323a9c3ddd91 (draft) [tip ] A
744 3:323a9c3ddd91 (draft) [tip ] A
745 $ hg outgoing
745 $ hg outgoing
746 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
746 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
747 searching for changes
747 searching for changes
748 no changes found
748 1:29f0c6921ddd (draft) [tip ] A
749 [1]
750
749
751 #if serve
750 #if serve
752
751
753 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
752 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
754 $ cat hg.pid >> $DAEMON_PIDS
753 $ cat hg.pid >> $DAEMON_PIDS
755
754
756 $ hg incoming http://localhost:$HGPORT
755 $ hg incoming http://localhost:$HGPORT
757 comparing with http://localhost:$HGPORT/
756 comparing with http://localhost:$HGPORT/
758 searching for changes
757 searching for changes
759 2:323a9c3ddd91 (draft) [tip ] A
758 2:323a9c3ddd91 (draft) [tip ] A
760 $ hg outgoing http://localhost:$HGPORT
759 $ hg outgoing http://localhost:$HGPORT
761 comparing with http://localhost:$HGPORT/
760 comparing with http://localhost:$HGPORT/
762 searching for changes
761 searching for changes
763 no changes found
762 1:29f0c6921ddd (draft) [tip ] A
764 [1]
765
763
766 $ killdaemons.py
764 $ killdaemons.py
767
765
768 #endif
766 #endif
769
767
770 This test issue 3814
768 This test issue 3814
771
769
772 (nothing to push but locally hidden changeset)
770 (nothing to push but locally hidden changeset)
773
771
774 $ cd ..
772 $ cd ..
775 $ hg init repo-issue3814
773 $ hg init repo-issue3814
776 $ cd repo-issue3805
774 $ cd repo-issue3805
777 $ hg push -r 323a9c3ddd91 ../repo-issue3814
775 $ hg push -r 323a9c3ddd91 ../repo-issue3814
778 pushing to ../repo-issue3814
776 pushing to ../repo-issue3814
779 searching for changes
777 searching for changes
780 adding changesets
778 adding changesets
781 adding manifests
779 adding manifests
782 adding file changes
780 adding file changes
783 added 2 changesets with 2 changes to 2 files
781 added 2 changesets with 2 changes to 2 files
784 2 new obsolescence markers
782 2 new obsolescence markers
785 $ hg out ../repo-issue3814
783 $ hg out ../repo-issue3814
786 comparing with ../repo-issue3814
784 comparing with ../repo-issue3814
787 searching for changes
785 searching for changes
788 no changes found
786 no changes found
789 [1]
787 [1]
790
788
791 Test that a local tag blocks a changeset from being hidden
789 Test that a local tag blocks a changeset from being hidden
792
790
793 $ hg tag -l visible -r 1 --hidden
791 $ hg tag -l visible -r 1 --hidden
794 $ hg log -G
792 $ hg log -G
795 @ 3:323a9c3ddd91 (draft) [tip ] A
793 @ 3:323a9c3ddd91 (draft) [tip ] A
796 |
794 |
797 | x 1:29f0c6921ddd (draft) [visible ] A
795 | x 1:29f0c6921ddd (draft) [visible ] A
798 |/
796 |/
799 o 0:d20a80d4def3 (draft) [ ] base
797 o 0:d20a80d4def3 (draft) [ ] base
800
798
801 Test that removing a local tag does not cause some commands to fail
799 Test that removing a local tag does not cause some commands to fail
802
800
803 $ hg tag -l -r tip tiptag
801 $ hg tag -l -r tip tiptag
804 $ hg tags
802 $ hg tags
805 tiptag 3:323a9c3ddd91
803 tiptag 3:323a9c3ddd91
806 tip 3:323a9c3ddd91
804 tip 3:323a9c3ddd91
807 visible 1:29f0c6921ddd
805 visible 1:29f0c6921ddd
808 $ hg --config extensions.strip= strip -r tip --no-backup
806 $ hg --config extensions.strip= strip -r tip --no-backup
809 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
807 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
810 $ hg tags
808 $ hg tags
811 visible 1:29f0c6921ddd
809 visible 1:29f0c6921ddd
812 tip 1:29f0c6921ddd
810 tip 1:29f0c6921ddd
813
811
814 Test bundle overlay onto hidden revision
812 Test bundle overlay onto hidden revision
815
813
816 $ cd ..
814 $ cd ..
817 $ hg init repo-bundleoverlay
815 $ hg init repo-bundleoverlay
818 $ cd repo-bundleoverlay
816 $ cd repo-bundleoverlay
819 $ echo "A" > foo
817 $ echo "A" > foo
820 $ hg ci -Am "A"
818 $ hg ci -Am "A"
821 adding foo
819 adding foo
822 $ echo "B" >> foo
820 $ echo "B" >> foo
823 $ hg ci -m "B"
821 $ hg ci -m "B"
824 $ hg up 0
822 $ hg up 0
825 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
823 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
826 $ echo "C" >> foo
824 $ echo "C" >> foo
827 $ hg ci -m "C"
825 $ hg ci -m "C"
828 created new head
826 created new head
829 $ hg log -G
827 $ hg log -G
830 @ 2:c186d7714947 (draft) [tip ] C
828 @ 2:c186d7714947 (draft) [tip ] C
831 |
829 |
832 | o 1:44526ebb0f98 (draft) [ ] B
830 | o 1:44526ebb0f98 (draft) [ ] B
833 |/
831 |/
834 o 0:4b34ecfb0d56 (draft) [ ] A
832 o 0:4b34ecfb0d56 (draft) [ ] A
835
833
836
834
837 $ hg clone -r1 . ../other-bundleoverlay
835 $ hg clone -r1 . ../other-bundleoverlay
838 adding changesets
836 adding changesets
839 adding manifests
837 adding manifests
840 adding file changes
838 adding file changes
841 added 2 changesets with 2 changes to 1 files
839 added 2 changesets with 2 changes to 1 files
842 updating to branch default
840 updating to branch default
843 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
841 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
844 $ cd ../other-bundleoverlay
842 $ cd ../other-bundleoverlay
845 $ echo "B+" >> foo
843 $ echo "B+" >> foo
846 $ hg ci --amend -m "B+"
844 $ hg ci --amend -m "B+"
847 $ hg log -G --hidden
845 $ hg log -G --hidden
848 @ 3:b7d587542d40 (draft) [tip ] B+
846 @ 3:b7d587542d40 (draft) [tip ] B+
849 |
847 |
850 | x 2:eb95e9297e18 (draft) [ ] temporary amend commit for 44526ebb0f98
848 | x 2:eb95e9297e18 (draft) [ ] temporary amend commit for 44526ebb0f98
851 | |
849 | |
852 | x 1:44526ebb0f98 (draft) [ ] B
850 | x 1:44526ebb0f98 (draft) [ ] B
853 |/
851 |/
854 o 0:4b34ecfb0d56 (draft) [ ] A
852 o 0:4b34ecfb0d56 (draft) [ ] A
855
853
856
854
857 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
855 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
858 comparing with ../repo-bundleoverlay
856 comparing with ../repo-bundleoverlay
859 searching for changes
857 searching for changes
860 1:44526ebb0f98 (draft) [ ] B
858 1:44526ebb0f98 (draft) [ ] B
861 2:c186d7714947 (draft) [tip ] C
859 2:c186d7714947 (draft) [tip ] C
862 $ hg log -G -R ../bundleoverlay.hg
860 $ hg log -G -R ../bundleoverlay.hg
863 o 4:c186d7714947 (draft) [tip ] C
861 o 4:c186d7714947 (draft) [tip ] C
864 |
862 |
865 | @ 3:b7d587542d40 (draft) [ ] B+
863 | @ 3:b7d587542d40 (draft) [ ] B+
866 |/
864 |/
867 o 0:4b34ecfb0d56 (draft) [ ] A
865 o 0:4b34ecfb0d56 (draft) [ ] A
868
866
869
867
870 #if serve
868 #if serve
871
869
872 Test issue 4506
870 Test issue 4506
873
871
874 $ cd ..
872 $ cd ..
875 $ hg init repo-issue4506
873 $ hg init repo-issue4506
876 $ cd repo-issue4506
874 $ cd repo-issue4506
877 $ echo "0" > foo
875 $ echo "0" > foo
878 $ hg add foo
876 $ hg add foo
879 $ hg ci -m "content-0"
877 $ hg ci -m "content-0"
880
878
881 $ hg up null
879 $ hg up null
882 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
880 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
883 $ echo "1" > bar
881 $ echo "1" > bar
884 $ hg add bar
882 $ hg add bar
885 $ hg ci -m "content-1"
883 $ hg ci -m "content-1"
886 created new head
884 created new head
887 $ hg up 0
885 $ hg up 0
888 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
886 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
889 $ hg graft 1
887 $ hg graft 1
890 grafting 1:1c9eddb02162 "content-1" (tip)
888 grafting 1:1c9eddb02162 "content-1" (tip)
891
889
892 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
890 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
893
891
894 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
892 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
895 $ cat hg.pid >> $DAEMON_PIDS
893 $ cat hg.pid >> $DAEMON_PIDS
896
894
897 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
895 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
898 404 Not Found
896 404 Not Found
899 [1]
897 [1]
900 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
898 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
901 200 Script output follows
899 200 Script output follows
902 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
900 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
903 200 Script output follows
901 200 Script output follows
904
902
905 $ killdaemons.py
903 $ killdaemons.py
906
904
907 #endif
905 #endif
908
906
909 Test heads computation on pending index changes with obsolescence markers
907 Test heads computation on pending index changes with obsolescence markers
910 $ cd ..
908 $ cd ..
911 $ cat >$TESTTMP/test_extension.py << EOF
909 $ cat >$TESTTMP/test_extension.py << EOF
912 > from mercurial import cmdutil
910 > from mercurial import cmdutil
913 > from mercurial.i18n import _
911 > from mercurial.i18n import _
914 >
912 >
915 > cmdtable = {}
913 > cmdtable = {}
916 > command = cmdutil.command(cmdtable)
914 > command = cmdutil.command(cmdtable)
917 > @command("amendtransient",[], _('hg amendtransient [rev]'))
915 > @command("amendtransient",[], _('hg amendtransient [rev]'))
918 > def amend(ui, repo, *pats, **opts):
916 > def amend(ui, repo, *pats, **opts):
919 > def commitfunc(ui, repo, message, match, opts):
917 > def commitfunc(ui, repo, message, match, opts):
920 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
918 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
921 > opts['message'] = 'Test'
919 > opts['message'] = 'Test'
922 > opts['logfile'] = None
920 > opts['logfile'] = None
923 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
921 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
924 > print repo.changelog.headrevs()
922 > print repo.changelog.headrevs()
925 > EOF
923 > EOF
926 $ cat >> $HGRCPATH << EOF
924 $ cat >> $HGRCPATH << EOF
927 > [extensions]
925 > [extensions]
928 > testextension=$TESTTMP/test_extension.py
926 > testextension=$TESTTMP/test_extension.py
929 > EOF
927 > EOF
930 $ hg init repo-issue-nativerevs-pending-changes
928 $ hg init repo-issue-nativerevs-pending-changes
931 $ cd repo-issue-nativerevs-pending-changes
929 $ cd repo-issue-nativerevs-pending-changes
932 $ mkcommit a
930 $ mkcommit a
933 $ mkcommit b
931 $ mkcommit b
934 $ hg up ".^"
932 $ hg up ".^"
935 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
933 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
936 $ echo aa > a
934 $ echo aa > a
937 $ hg amendtransient
935 $ hg amendtransient
938 [1, 3]
936 [1, 3]
939
937
940 Test cache consistency for the visible filter
938 Test cache consistency for the visible filter
941 1) We want to make sure that the cached filtered revs are invalidated when
939 1) We want to make sure that the cached filtered revs are invalidated when
942 bookmarks change
940 bookmarks change
943 $ cd ..
941 $ cd ..
944 $ cat >$TESTTMP/test_extension.py << EOF
942 $ cat >$TESTTMP/test_extension.py << EOF
945 > from mercurial import cmdutil, extensions, bookmarks, repoview
943 > from mercurial import cmdutil, extensions, bookmarks, repoview
946 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
944 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
947 > repo = bkmstoreinst._repo
945 > repo = bkmstoreinst._repo
948 > ret = orig(bkmstoreinst, *args, **kwargs)
946 > ret = orig(bkmstoreinst, *args, **kwargs)
949 > hidden1 = repoview.computehidden(repo)
947 > hidden1 = repoview.computehidden(repo)
950 > hidden = repoview.filterrevs(repo, 'visible')
948 > hidden = repoview.filterrevs(repo, 'visible')
951 > if sorted(hidden1) != sorted(hidden):
949 > if sorted(hidden1) != sorted(hidden):
952 > print "cache inconsistency"
950 > print "cache inconsistency"
953 > return ret
951 > return ret
954 > def extsetup(ui):
952 > def extsetup(ui):
955 > extensions.wrapfunction(bookmarks.bmstore, 'write', _bookmarkchanged)
953 > extensions.wrapfunction(bookmarks.bmstore, 'write', _bookmarkchanged)
956 > EOF
954 > EOF
957
955
958 $ hg init repo-cache-inconsistency
956 $ hg init repo-cache-inconsistency
959 $ cd repo-issue-nativerevs-pending-changes
957 $ cd repo-issue-nativerevs-pending-changes
960 $ mkcommit a
958 $ mkcommit a
961 a already tracked!
959 a already tracked!
962 $ mkcommit b
960 $ mkcommit b
963 $ hg id
961 $ hg id
964 13bedc178fce tip
962 13bedc178fce tip
965 $ echo "hello" > b
963 $ echo "hello" > b
966 $ hg commit --amend -m "message"
964 $ hg commit --amend -m "message"
967 $ hg book bookb -r 13bedc178fce --hidden
965 $ hg book bookb -r 13bedc178fce --hidden
968 $ hg log -r 13bedc178fce
966 $ hg log -r 13bedc178fce
969 5:13bedc178fce (draft) [ bookb] add b
967 5:13bedc178fce (draft) [ bookb] add b
970 $ hg book -d bookb
968 $ hg book -d bookb
971 $ hg log -r 13bedc178fce
969 $ hg log -r 13bedc178fce
972 abort: hidden revision '13bedc178fce'!
970 abort: hidden revision '13bedc178fce'!
973 (use --hidden to access hidden revisions)
971 (use --hidden to access hidden revisions)
974 [255]
972 [255]
975
973
976
974
977
975
General Comments 0
You need to be logged in to leave comments. Login now