##// END OF EJS Templates
localrepo: back out changeset b08af8f0ac01...
Pierre-Yves David -
r24234:7977d35d default
parent child Browse files
Show More
@@ -1,1855 +1,1856
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import urllib
9 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock as lockmod
12 import lock as lockmod
13 import transaction, store, encoding, exchange, bundle2
13 import transaction, store, encoding, exchange, bundle2
14 import scmutil, util, extensions, hook, error, revset
14 import scmutil, util, extensions, hook, error, revset
15 import match as matchmod
15 import match as matchmod
16 import merge as mergemod
16 import merge as mergemod
17 import tags as tagsmod
17 import tags as tagsmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 import branchmap, pathutil
20 import branchmap, pathutil
21 import namespaces
21 import namespaces
22 propertycache = util.propertycache
22 propertycache = util.propertycache
23 filecache = scmutil.filecache
23 filecache = scmutil.filecache
24
24
25 class repofilecache(filecache):
25 class repofilecache(filecache):
26 """All filecache usage on repo are done for logic that should be unfiltered
26 """All filecache usage on repo are done for logic that should be unfiltered
27 """
27 """
28
28
29 def __get__(self, repo, type=None):
29 def __get__(self, repo, type=None):
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
31 def __set__(self, repo, value):
31 def __set__(self, repo, value):
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
33 def __delete__(self, repo):
33 def __delete__(self, repo):
34 return super(repofilecache, self).__delete__(repo.unfiltered())
34 return super(repofilecache, self).__delete__(repo.unfiltered())
35
35
36 class storecache(repofilecache):
36 class storecache(repofilecache):
37 """filecache for files in the store"""
37 """filecache for files in the store"""
38 def join(self, obj, fname):
38 def join(self, obj, fname):
39 return obj.sjoin(fname)
39 return obj.sjoin(fname)
40
40
41 class unfilteredpropertycache(propertycache):
41 class unfilteredpropertycache(propertycache):
42 """propertycache that apply to unfiltered repo only"""
42 """propertycache that apply to unfiltered repo only"""
43
43
44 def __get__(self, repo, type=None):
44 def __get__(self, repo, type=None):
45 unfi = repo.unfiltered()
45 unfi = repo.unfiltered()
46 if unfi is repo:
46 if unfi is repo:
47 return super(unfilteredpropertycache, self).__get__(unfi)
47 return super(unfilteredpropertycache, self).__get__(unfi)
48 return getattr(unfi, self.name)
48 return getattr(unfi, self.name)
49
49
50 class filteredpropertycache(propertycache):
50 class filteredpropertycache(propertycache):
51 """propertycache that must take filtering in account"""
51 """propertycache that must take filtering in account"""
52
52
53 def cachevalue(self, obj, value):
53 def cachevalue(self, obj, value):
54 object.__setattr__(obj, self.name, value)
54 object.__setattr__(obj, self.name, value)
55
55
56
56
57 def hasunfilteredcache(repo, name):
57 def hasunfilteredcache(repo, name):
58 """check if a repo has an unfilteredpropertycache value for <name>"""
58 """check if a repo has an unfilteredpropertycache value for <name>"""
59 return name in vars(repo.unfiltered())
59 return name in vars(repo.unfiltered())
60
60
61 def unfilteredmethod(orig):
61 def unfilteredmethod(orig):
62 """decorate method that always need to be run on unfiltered version"""
62 """decorate method that always need to be run on unfiltered version"""
63 def wrapper(repo, *args, **kwargs):
63 def wrapper(repo, *args, **kwargs):
64 return orig(repo.unfiltered(), *args, **kwargs)
64 return orig(repo.unfiltered(), *args, **kwargs)
65 return wrapper
65 return wrapper
66
66
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
68 'unbundle'))
68 'unbundle'))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
70
70
71 class localpeer(peer.peerrepository):
71 class localpeer(peer.peerrepository):
72 '''peer for a local repo; reflects only the most recent API'''
72 '''peer for a local repo; reflects only the most recent API'''
73
73
74 def __init__(self, repo, caps=moderncaps):
74 def __init__(self, repo, caps=moderncaps):
75 peer.peerrepository.__init__(self)
75 peer.peerrepository.__init__(self)
76 self._repo = repo.filtered('served')
76 self._repo = repo.filtered('served')
77 self.ui = repo.ui
77 self.ui = repo.ui
78 self._caps = repo._restrictcapabilities(caps)
78 self._caps = repo._restrictcapabilities(caps)
79 self.requirements = repo.requirements
79 self.requirements = repo.requirements
80 self.supportedformats = repo.supportedformats
80 self.supportedformats = repo.supportedformats
81
81
82 def close(self):
82 def close(self):
83 self._repo.close()
83 self._repo.close()
84
84
85 def _capabilities(self):
85 def _capabilities(self):
86 return self._caps
86 return self._caps
87
87
88 def local(self):
88 def local(self):
89 return self._repo
89 return self._repo
90
90
91 def canpush(self):
91 def canpush(self):
92 return True
92 return True
93
93
94 def url(self):
94 def url(self):
95 return self._repo.url()
95 return self._repo.url()
96
96
97 def lookup(self, key):
97 def lookup(self, key):
98 return self._repo.lookup(key)
98 return self._repo.lookup(key)
99
99
100 def branchmap(self):
100 def branchmap(self):
101 return self._repo.branchmap()
101 return self._repo.branchmap()
102
102
103 def heads(self):
103 def heads(self):
104 return self._repo.heads()
104 return self._repo.heads()
105
105
106 def known(self, nodes):
106 def known(self, nodes):
107 return self._repo.known(nodes)
107 return self._repo.known(nodes)
108
108
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
110 format='HG10', **kwargs):
110 format='HG10', **kwargs):
111 cg = exchange.getbundle(self._repo, source, heads=heads,
111 cg = exchange.getbundle(self._repo, source, heads=heads,
112 common=common, bundlecaps=bundlecaps, **kwargs)
112 common=common, bundlecaps=bundlecaps, **kwargs)
113 if bundlecaps is not None and 'HG2Y' in bundlecaps:
113 if bundlecaps is not None and 'HG2Y' in bundlecaps:
114 # When requesting a bundle2, getbundle returns a stream to make the
114 # When requesting a bundle2, getbundle returns a stream to make the
115 # wire level function happier. We need to build a proper object
115 # wire level function happier. We need to build a proper object
116 # from it in local peer.
116 # from it in local peer.
117 cg = bundle2.unbundle20(self.ui, cg)
117 cg = bundle2.unbundle20(self.ui, cg)
118 return cg
118 return cg
119
119
120 # TODO We might want to move the next two calls into legacypeer and add
120 # TODO We might want to move the next two calls into legacypeer and add
121 # unbundle instead.
121 # unbundle instead.
122
122
123 def unbundle(self, cg, heads, url):
123 def unbundle(self, cg, heads, url):
124 """apply a bundle on a repo
124 """apply a bundle on a repo
125
125
126 This function handles the repo locking itself."""
126 This function handles the repo locking itself."""
127 try:
127 try:
128 cg = exchange.readbundle(self.ui, cg, None)
128 cg = exchange.readbundle(self.ui, cg, None)
129 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
129 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
130 if util.safehasattr(ret, 'getchunks'):
130 if util.safehasattr(ret, 'getchunks'):
131 # This is a bundle20 object, turn it into an unbundler.
131 # This is a bundle20 object, turn it into an unbundler.
132 # This little dance should be dropped eventually when the API
132 # This little dance should be dropped eventually when the API
133 # is finally improved.
133 # is finally improved.
134 stream = util.chunkbuffer(ret.getchunks())
134 stream = util.chunkbuffer(ret.getchunks())
135 ret = bundle2.unbundle20(self.ui, stream)
135 ret = bundle2.unbundle20(self.ui, stream)
136 return ret
136 return ret
137 except error.PushRaced, exc:
137 except error.PushRaced, exc:
138 raise error.ResponseError(_('push failed:'), str(exc))
138 raise error.ResponseError(_('push failed:'), str(exc))
139
139
140 def lock(self):
140 def lock(self):
141 return self._repo.lock()
141 return self._repo.lock()
142
142
143 def addchangegroup(self, cg, source, url):
143 def addchangegroup(self, cg, source, url):
144 return changegroup.addchangegroup(self._repo, cg, source, url)
144 return changegroup.addchangegroup(self._repo, cg, source, url)
145
145
146 def pushkey(self, namespace, key, old, new):
146 def pushkey(self, namespace, key, old, new):
147 return self._repo.pushkey(namespace, key, old, new)
147 return self._repo.pushkey(namespace, key, old, new)
148
148
149 def listkeys(self, namespace):
149 def listkeys(self, namespace):
150 return self._repo.listkeys(namespace)
150 return self._repo.listkeys(namespace)
151
151
152 def debugwireargs(self, one, two, three=None, four=None, five=None):
152 def debugwireargs(self, one, two, three=None, four=None, five=None):
153 '''used to test argument passing over the wire'''
153 '''used to test argument passing over the wire'''
154 return "%s %s %s %s %s" % (one, two, three, four, five)
154 return "%s %s %s %s %s" % (one, two, three, four, five)
155
155
156 class locallegacypeer(localpeer):
156 class locallegacypeer(localpeer):
157 '''peer extension which implements legacy methods too; used for tests with
157 '''peer extension which implements legacy methods too; used for tests with
158 restricted capabilities'''
158 restricted capabilities'''
159
159
160 def __init__(self, repo):
160 def __init__(self, repo):
161 localpeer.__init__(self, repo, caps=legacycaps)
161 localpeer.__init__(self, repo, caps=legacycaps)
162
162
163 def branches(self, nodes):
163 def branches(self, nodes):
164 return self._repo.branches(nodes)
164 return self._repo.branches(nodes)
165
165
166 def between(self, pairs):
166 def between(self, pairs):
167 return self._repo.between(pairs)
167 return self._repo.between(pairs)
168
168
169 def changegroup(self, basenodes, source):
169 def changegroup(self, basenodes, source):
170 return changegroup.changegroup(self._repo, basenodes, source)
170 return changegroup.changegroup(self._repo, basenodes, source)
171
171
172 def changegroupsubset(self, bases, heads, source):
172 def changegroupsubset(self, bases, heads, source):
173 return changegroup.changegroupsubset(self._repo, bases, heads, source)
173 return changegroup.changegroupsubset(self._repo, bases, heads, source)
174
174
175 class localrepository(object):
175 class localrepository(object):
176
176
177 supportedformats = set(('revlogv1', 'generaldelta'))
177 supportedformats = set(('revlogv1', 'generaldelta'))
178 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
178 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
179 'dotencode'))
179 'dotencode'))
180 openerreqs = set(('revlogv1', 'generaldelta'))
180 openerreqs = set(('revlogv1', 'generaldelta'))
181 requirements = ['revlogv1']
181 requirements = ['revlogv1']
182 filtername = None
182 filtername = None
183
183
184 # a list of (ui, featureset) functions.
184 # a list of (ui, featureset) functions.
185 # only functions defined in module of enabled extensions are invoked
185 # only functions defined in module of enabled extensions are invoked
186 featuresetupfuncs = set()
186 featuresetupfuncs = set()
187
187
188 def _baserequirements(self, create):
188 def _baserequirements(self, create):
189 return self.requirements[:]
189 return self.requirements[:]
190
190
191 def __init__(self, baseui, path=None, create=False):
191 def __init__(self, baseui, path=None, create=False):
192 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
192 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
193 self.wopener = self.wvfs
193 self.wopener = self.wvfs
194 self.root = self.wvfs.base
194 self.root = self.wvfs.base
195 self.path = self.wvfs.join(".hg")
195 self.path = self.wvfs.join(".hg")
196 self.origroot = path
196 self.origroot = path
197 self.auditor = pathutil.pathauditor(self.root, self._checknested)
197 self.auditor = pathutil.pathauditor(self.root, self._checknested)
198 self.vfs = scmutil.vfs(self.path)
198 self.vfs = scmutil.vfs(self.path)
199 self.opener = self.vfs
199 self.opener = self.vfs
200 self.baseui = baseui
200 self.baseui = baseui
201 self.ui = baseui.copy()
201 self.ui = baseui.copy()
202 self.ui.copy = baseui.copy # prevent copying repo configuration
202 self.ui.copy = baseui.copy # prevent copying repo configuration
203 # A list of callback to shape the phase if no data were found.
203 # A list of callback to shape the phase if no data were found.
204 # Callback are in the form: func(repo, roots) --> processed root.
204 # Callback are in the form: func(repo, roots) --> processed root.
205 # This list it to be filled by extension during repo setup
205 # This list it to be filled by extension during repo setup
206 self._phasedefaults = []
206 self._phasedefaults = []
207 try:
207 try:
208 self.ui.readconfig(self.join("hgrc"), self.root)
208 self.ui.readconfig(self.join("hgrc"), self.root)
209 extensions.loadall(self.ui)
209 extensions.loadall(self.ui)
210 except IOError:
210 except IOError:
211 pass
211 pass
212
212
213 if self.featuresetupfuncs:
213 if self.featuresetupfuncs:
214 self.supported = set(self._basesupported) # use private copy
214 self.supported = set(self._basesupported) # use private copy
215 extmods = set(m.__name__ for n, m
215 extmods = set(m.__name__ for n, m
216 in extensions.extensions(self.ui))
216 in extensions.extensions(self.ui))
217 for setupfunc in self.featuresetupfuncs:
217 for setupfunc in self.featuresetupfuncs:
218 if setupfunc.__module__ in extmods:
218 if setupfunc.__module__ in extmods:
219 setupfunc(self.ui, self.supported)
219 setupfunc(self.ui, self.supported)
220 else:
220 else:
221 self.supported = self._basesupported
221 self.supported = self._basesupported
222
222
223 if not self.vfs.isdir():
223 if not self.vfs.isdir():
224 if create:
224 if create:
225 if not self.wvfs.exists():
225 if not self.wvfs.exists():
226 self.wvfs.makedirs()
226 self.wvfs.makedirs()
227 self.vfs.makedir(notindexed=True)
227 self.vfs.makedir(notindexed=True)
228 requirements = self._baserequirements(create)
228 requirements = self._baserequirements(create)
229 if self.ui.configbool('format', 'usestore', True):
229 if self.ui.configbool('format', 'usestore', True):
230 self.vfs.mkdir("store")
230 self.vfs.mkdir("store")
231 requirements.append("store")
231 requirements.append("store")
232 if self.ui.configbool('format', 'usefncache', True):
232 if self.ui.configbool('format', 'usefncache', True):
233 requirements.append("fncache")
233 requirements.append("fncache")
234 if self.ui.configbool('format', 'dotencode', True):
234 if self.ui.configbool('format', 'dotencode', True):
235 requirements.append('dotencode')
235 requirements.append('dotencode')
236 # create an invalid changelog
236 # create an invalid changelog
237 self.vfs.append(
237 self.vfs.append(
238 "00changelog.i",
238 "00changelog.i",
239 '\0\0\0\2' # represents revlogv2
239 '\0\0\0\2' # represents revlogv2
240 ' dummy changelog to prevent using the old repo layout'
240 ' dummy changelog to prevent using the old repo layout'
241 )
241 )
242 if self.ui.configbool('format', 'generaldelta', False):
242 if self.ui.configbool('format', 'generaldelta', False):
243 requirements.append("generaldelta")
243 requirements.append("generaldelta")
244 requirements = set(requirements)
244 requirements = set(requirements)
245 else:
245 else:
246 raise error.RepoError(_("repository %s not found") % path)
246 raise error.RepoError(_("repository %s not found") % path)
247 elif create:
247 elif create:
248 raise error.RepoError(_("repository %s already exists") % path)
248 raise error.RepoError(_("repository %s already exists") % path)
249 else:
249 else:
250 try:
250 try:
251 requirements = scmutil.readrequires(self.vfs, self.supported)
251 requirements = scmutil.readrequires(self.vfs, self.supported)
252 except IOError, inst:
252 except IOError, inst:
253 if inst.errno != errno.ENOENT:
253 if inst.errno != errno.ENOENT:
254 raise
254 raise
255 requirements = set()
255 requirements = set()
256
256
257 self.sharedpath = self.path
257 self.sharedpath = self.path
258 try:
258 try:
259 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
259 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
260 realpath=True)
260 realpath=True)
261 s = vfs.base
261 s = vfs.base
262 if not vfs.exists():
262 if not vfs.exists():
263 raise error.RepoError(
263 raise error.RepoError(
264 _('.hg/sharedpath points to nonexistent directory %s') % s)
264 _('.hg/sharedpath points to nonexistent directory %s') % s)
265 self.sharedpath = s
265 self.sharedpath = s
266 except IOError, inst:
266 except IOError, inst:
267 if inst.errno != errno.ENOENT:
267 if inst.errno != errno.ENOENT:
268 raise
268 raise
269
269
270 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
270 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
271 self.spath = self.store.path
271 self.spath = self.store.path
272 self.svfs = self.store.vfs
272 self.svfs = self.store.vfs
273 self.sopener = self.svfs
273 self.sopener = self.svfs
274 self.sjoin = self.store.join
274 self.sjoin = self.store.join
275 self.vfs.createmode = self.store.createmode
275 self.vfs.createmode = self.store.createmode
276 self._applyrequirements(requirements)
276 self._applyrequirements(requirements)
277 if create:
277 if create:
278 self._writerequirements()
278 self._writerequirements()
279
279
280
280
281 self._branchcaches = {}
281 self._branchcaches = {}
282 self.filterpats = {}
282 self.filterpats = {}
283 self._datafilters = {}
283 self._datafilters = {}
284 self._transref = self._lockref = self._wlockref = None
284 self._transref = self._lockref = self._wlockref = None
285
285
286 # A cache for various files under .hg/ that tracks file changes,
286 # A cache for various files under .hg/ that tracks file changes,
287 # (used by the filecache decorator)
287 # (used by the filecache decorator)
288 #
288 #
289 # Maps a property name to its util.filecacheentry
289 # Maps a property name to its util.filecacheentry
290 self._filecache = {}
290 self._filecache = {}
291
291
292 # hold sets of revision to be filtered
292 # hold sets of revision to be filtered
293 # should be cleared when something might have changed the filter value:
293 # should be cleared when something might have changed the filter value:
294 # - new changesets,
294 # - new changesets,
295 # - phase change,
295 # - phase change,
296 # - new obsolescence marker,
296 # - new obsolescence marker,
297 # - working directory parent change,
297 # - working directory parent change,
298 # - bookmark changes
298 # - bookmark changes
299 self.filteredrevcache = {}
299 self.filteredrevcache = {}
300
300
301 # generic mapping between names and nodes
301 # generic mapping between names and nodes
302 self.names = namespaces.namespaces()
302 self.names = namespaces.namespaces()
303
303
304 def close(self):
304 def close(self):
305 pass
305 pass
306
306
307 def _restrictcapabilities(self, caps):
307 def _restrictcapabilities(self, caps):
308 # bundle2 is not ready for prime time, drop it unless explicitly
308 # bundle2 is not ready for prime time, drop it unless explicitly
309 # required by the tests (or some brave tester)
309 # required by the tests (or some brave tester)
310 if self.ui.configbool('experimental', 'bundle2-exp', False):
310 if self.ui.configbool('experimental', 'bundle2-exp', False):
311 caps = set(caps)
311 caps = set(caps)
312 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
312 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
313 caps.add('bundle2-exp=' + urllib.quote(capsblob))
313 caps.add('bundle2-exp=' + urllib.quote(capsblob))
314 return caps
314 return caps
315
315
316 def _applyrequirements(self, requirements):
316 def _applyrequirements(self, requirements):
317 self.requirements = requirements
317 self.requirements = requirements
318 self.svfs.options = dict((r, 1) for r in requirements
318 self.svfs.options = dict((r, 1) for r in requirements
319 if r in self.openerreqs)
319 if r in self.openerreqs)
320 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
320 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
321 if chunkcachesize is not None:
321 if chunkcachesize is not None:
322 self.svfs.options['chunkcachesize'] = chunkcachesize
322 self.svfs.options['chunkcachesize'] = chunkcachesize
323 maxchainlen = self.ui.configint('format', 'maxchainlen')
323 maxchainlen = self.ui.configint('format', 'maxchainlen')
324 if maxchainlen is not None:
324 if maxchainlen is not None:
325 self.svfs.options['maxchainlen'] = maxchainlen
325 self.svfs.options['maxchainlen'] = maxchainlen
326 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
326 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
327 if manifestcachesize is not None:
327 if manifestcachesize is not None:
328 self.svfs.options['manifestcachesize'] = manifestcachesize
328 self.svfs.options['manifestcachesize'] = manifestcachesize
329
329
330 def _writerequirements(self):
330 def _writerequirements(self):
331 reqfile = self.vfs("requires", "w")
331 reqfile = self.vfs("requires", "w")
332 for r in sorted(self.requirements):
332 for r in sorted(self.requirements):
333 reqfile.write("%s\n" % r)
333 reqfile.write("%s\n" % r)
334 reqfile.close()
334 reqfile.close()
335
335
336 def _checknested(self, path):
336 def _checknested(self, path):
337 """Determine if path is a legal nested repository."""
337 """Determine if path is a legal nested repository."""
338 if not path.startswith(self.root):
338 if not path.startswith(self.root):
339 return False
339 return False
340 subpath = path[len(self.root) + 1:]
340 subpath = path[len(self.root) + 1:]
341 normsubpath = util.pconvert(subpath)
341 normsubpath = util.pconvert(subpath)
342
342
343 # XXX: Checking against the current working copy is wrong in
343 # XXX: Checking against the current working copy is wrong in
344 # the sense that it can reject things like
344 # the sense that it can reject things like
345 #
345 #
346 # $ hg cat -r 10 sub/x.txt
346 # $ hg cat -r 10 sub/x.txt
347 #
347 #
348 # if sub/ is no longer a subrepository in the working copy
348 # if sub/ is no longer a subrepository in the working copy
349 # parent revision.
349 # parent revision.
350 #
350 #
351 # However, it can of course also allow things that would have
351 # However, it can of course also allow things that would have
352 # been rejected before, such as the above cat command if sub/
352 # been rejected before, such as the above cat command if sub/
353 # is a subrepository now, but was a normal directory before.
353 # is a subrepository now, but was a normal directory before.
354 # The old path auditor would have rejected by mistake since it
354 # The old path auditor would have rejected by mistake since it
355 # panics when it sees sub/.hg/.
355 # panics when it sees sub/.hg/.
356 #
356 #
357 # All in all, checking against the working copy seems sensible
357 # All in all, checking against the working copy seems sensible
358 # since we want to prevent access to nested repositories on
358 # since we want to prevent access to nested repositories on
359 # the filesystem *now*.
359 # the filesystem *now*.
360 ctx = self[None]
360 ctx = self[None]
361 parts = util.splitpath(subpath)
361 parts = util.splitpath(subpath)
362 while parts:
362 while parts:
363 prefix = '/'.join(parts)
363 prefix = '/'.join(parts)
364 if prefix in ctx.substate:
364 if prefix in ctx.substate:
365 if prefix == normsubpath:
365 if prefix == normsubpath:
366 return True
366 return True
367 else:
367 else:
368 sub = ctx.sub(prefix)
368 sub = ctx.sub(prefix)
369 return sub.checknested(subpath[len(prefix) + 1:])
369 return sub.checknested(subpath[len(prefix) + 1:])
370 else:
370 else:
371 parts.pop()
371 parts.pop()
372 return False
372 return False
373
373
374 def peer(self):
374 def peer(self):
375 return localpeer(self) # not cached to avoid reference cycle
375 return localpeer(self) # not cached to avoid reference cycle
376
376
377 def unfiltered(self):
377 def unfiltered(self):
378 """Return unfiltered version of the repository
378 """Return unfiltered version of the repository
379
379
380 Intended to be overwritten by filtered repo."""
380 Intended to be overwritten by filtered repo."""
381 return self
381 return self
382
382
383 def filtered(self, name):
383 def filtered(self, name):
384 """Return a filtered version of a repository"""
384 """Return a filtered version of a repository"""
385 # build a new class with the mixin and the current class
385 # build a new class with the mixin and the current class
386 # (possibly subclass of the repo)
386 # (possibly subclass of the repo)
387 class proxycls(repoview.repoview, self.unfiltered().__class__):
387 class proxycls(repoview.repoview, self.unfiltered().__class__):
388 pass
388 pass
389 return proxycls(self, name)
389 return proxycls(self, name)
390
390
391 @repofilecache('bookmarks')
391 @repofilecache('bookmarks')
392 def _bookmarks(self):
392 def _bookmarks(self):
393 return bookmarks.bmstore(self)
393 return bookmarks.bmstore(self)
394
394
395 @repofilecache('bookmarks.current')
395 @repofilecache('bookmarks.current')
396 def _bookmarkcurrent(self):
396 def _bookmarkcurrent(self):
397 return bookmarks.readcurrent(self)
397 return bookmarks.readcurrent(self)
398
398
399 def bookmarkheads(self, bookmark):
399 def bookmarkheads(self, bookmark):
400 name = bookmark.split('@', 1)[0]
400 name = bookmark.split('@', 1)[0]
401 heads = []
401 heads = []
402 for mark, n in self._bookmarks.iteritems():
402 for mark, n in self._bookmarks.iteritems():
403 if mark.split('@', 1)[0] == name:
403 if mark.split('@', 1)[0] == name:
404 heads.append(n)
404 heads.append(n)
405 return heads
405 return heads
406
406
407 @storecache('phaseroots')
407 @storecache('phaseroots')
408 def _phasecache(self):
408 def _phasecache(self):
409 return phases.phasecache(self, self._phasedefaults)
409 return phases.phasecache(self, self._phasedefaults)
410
410
411 @storecache('obsstore')
411 @storecache('obsstore')
412 def obsstore(self):
412 def obsstore(self):
413 # read default format for new obsstore.
413 # read default format for new obsstore.
414 defaultformat = self.ui.configint('format', 'obsstore-version', None)
414 defaultformat = self.ui.configint('format', 'obsstore-version', None)
415 # rely on obsstore class default when possible.
415 # rely on obsstore class default when possible.
416 kwargs = {}
416 kwargs = {}
417 if defaultformat is not None:
417 if defaultformat is not None:
418 kwargs['defaultformat'] = defaultformat
418 kwargs['defaultformat'] = defaultformat
419 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
419 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
420 store = obsolete.obsstore(self.svfs, readonly=readonly,
420 store = obsolete.obsstore(self.svfs, readonly=readonly,
421 **kwargs)
421 **kwargs)
422 if store and readonly:
422 if store and readonly:
423 # message is rare enough to not be translated
423 # message is rare enough to not be translated
424 msg = 'obsolete feature not enabled but %i markers found!\n'
424 msg = 'obsolete feature not enabled but %i markers found!\n'
425 self.ui.warn(msg % len(list(store)))
425 self.ui.warn(msg % len(list(store)))
426 return store
426 return store
427
427
428 @storecache('00changelog.i')
428 @storecache('00changelog.i')
429 def changelog(self):
429 def changelog(self):
430 c = changelog.changelog(self.svfs)
430 c = changelog.changelog(self.svfs)
431 if 'HG_PENDING' in os.environ:
431 if 'HG_PENDING' in os.environ:
432 p = os.environ['HG_PENDING']
432 p = os.environ['HG_PENDING']
433 if p.startswith(self.root):
433 if p.startswith(self.root):
434 c.readpending('00changelog.i.a')
434 c.readpending('00changelog.i.a')
435 return c
435 return c
436
436
437 @storecache('00manifest.i')
437 @storecache('00manifest.i')
438 def manifest(self):
438 def manifest(self):
439 return manifest.manifest(self.svfs)
439 return manifest.manifest(self.svfs)
440
440
441 @repofilecache('dirstate')
441 @repofilecache('dirstate')
442 def dirstate(self):
442 def dirstate(self):
443 warned = [0]
443 warned = [0]
444 def validate(node):
444 def validate(node):
445 try:
445 try:
446 self.changelog.rev(node)
446 self.changelog.rev(node)
447 return node
447 return node
448 except error.LookupError:
448 except error.LookupError:
449 if not warned[0]:
449 if not warned[0]:
450 warned[0] = True
450 warned[0] = True
451 self.ui.warn(_("warning: ignoring unknown"
451 self.ui.warn(_("warning: ignoring unknown"
452 " working parent %s!\n") % short(node))
452 " working parent %s!\n") % short(node))
453 return nullid
453 return nullid
454
454
455 return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
455 return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
456
456
457 def __getitem__(self, changeid):
457 def __getitem__(self, changeid):
458 if changeid is None:
458 if changeid is None:
459 return context.workingctx(self)
459 return context.workingctx(self)
460 if isinstance(changeid, slice):
460 if isinstance(changeid, slice):
461 return [context.changectx(self, i)
461 return [context.changectx(self, i)
462 for i in xrange(*changeid.indices(len(self)))
462 for i in xrange(*changeid.indices(len(self)))
463 if i not in self.changelog.filteredrevs]
463 if i not in self.changelog.filteredrevs]
464 return context.changectx(self, changeid)
464 return context.changectx(self, changeid)
465
465
466 def __contains__(self, changeid):
466 def __contains__(self, changeid):
467 try:
467 try:
468 return bool(self.lookup(changeid))
468 return bool(self.lookup(changeid))
469 except error.RepoLookupError:
469 except error.RepoLookupError:
470 return False
470 return False
471
471
472 def __nonzero__(self):
472 def __nonzero__(self):
473 return True
473 return True
474
474
475 def __len__(self):
475 def __len__(self):
476 return len(self.changelog)
476 return len(self.changelog)
477
477
478 def __iter__(self):
478 def __iter__(self):
479 return iter(self.changelog)
479 return iter(self.changelog)
480
480
481 def revs(self, expr, *args):
481 def revs(self, expr, *args):
482 '''Return a list of revisions matching the given revset'''
482 '''Return a list of revisions matching the given revset'''
483 expr = revset.formatspec(expr, *args)
483 expr = revset.formatspec(expr, *args)
484 m = revset.match(None, expr)
484 m = revset.match(None, expr)
485 return m(self)
485 return m(self)
486
486
487 def set(self, expr, *args):
487 def set(self, expr, *args):
488 '''
488 '''
489 Yield a context for each matching revision, after doing arg
489 Yield a context for each matching revision, after doing arg
490 replacement via revset.formatspec
490 replacement via revset.formatspec
491 '''
491 '''
492 for r in self.revs(expr, *args):
492 for r in self.revs(expr, *args):
493 yield self[r]
493 yield self[r]
494
494
495 def url(self):
495 def url(self):
496 return 'file:' + self.root
496 return 'file:' + self.root
497
497
498 def hook(self, name, throw=False, **args):
498 def hook(self, name, throw=False, **args):
499 """Call a hook, passing this repo instance.
499 """Call a hook, passing this repo instance.
500
500
501 This a convenience method to aid invoking hooks. Extensions likely
501 This a convenience method to aid invoking hooks. Extensions likely
502 won't call this unless they have registered a custom hook or are
502 won't call this unless they have registered a custom hook or are
503 replacing code that is expected to call a hook.
503 replacing code that is expected to call a hook.
504 """
504 """
505 return hook.hook(self.ui, self, name, throw, **args)
505 return hook.hook(self.ui, self, name, throw, **args)
506
506
507 @unfilteredmethod
507 def _tag(self, names, node, message, local, user, date, extra={},
508 def _tag(self, names, node, message, local, user, date, extra={},
508 editor=False):
509 editor=False):
509 if isinstance(names, str):
510 if isinstance(names, str):
510 names = (names,)
511 names = (names,)
511
512
512 branches = self.branchmap()
513 branches = self.branchmap()
513 for name in names:
514 for name in names:
514 self.hook('pretag', throw=True, node=hex(node), tag=name,
515 self.hook('pretag', throw=True, node=hex(node), tag=name,
515 local=local)
516 local=local)
516 if name in branches:
517 if name in branches:
517 self.ui.warn(_("warning: tag %s conflicts with existing"
518 self.ui.warn(_("warning: tag %s conflicts with existing"
518 " branch name\n") % name)
519 " branch name\n") % name)
519
520
520 def writetags(fp, names, munge, prevtags):
521 def writetags(fp, names, munge, prevtags):
521 fp.seek(0, 2)
522 fp.seek(0, 2)
522 if prevtags and prevtags[-1] != '\n':
523 if prevtags and prevtags[-1] != '\n':
523 fp.write('\n')
524 fp.write('\n')
524 for name in names:
525 for name in names:
525 m = munge and munge(name) or name
526 m = munge and munge(name) or name
526 if (self._tagscache.tagtypes and
527 if (self._tagscache.tagtypes and
527 name in self._tagscache.tagtypes):
528 name in self._tagscache.tagtypes):
528 old = self.tags().get(name, nullid)
529 old = self.tags().get(name, nullid)
529 fp.write('%s %s\n' % (hex(old), m))
530 fp.write('%s %s\n' % (hex(old), m))
530 fp.write('%s %s\n' % (hex(node), m))
531 fp.write('%s %s\n' % (hex(node), m))
531 fp.close()
532 fp.close()
532
533
533 prevtags = ''
534 prevtags = ''
534 if local:
535 if local:
535 try:
536 try:
536 fp = self.vfs('localtags', 'r+')
537 fp = self.vfs('localtags', 'r+')
537 except IOError:
538 except IOError:
538 fp = self.vfs('localtags', 'a')
539 fp = self.vfs('localtags', 'a')
539 else:
540 else:
540 prevtags = fp.read()
541 prevtags = fp.read()
541
542
542 # local tags are stored in the current charset
543 # local tags are stored in the current charset
543 writetags(fp, names, None, prevtags)
544 writetags(fp, names, None, prevtags)
544 for name in names:
545 for name in names:
545 self.hook('tag', node=hex(node), tag=name, local=local)
546 self.hook('tag', node=hex(node), tag=name, local=local)
546 return
547 return
547
548
548 try:
549 try:
549 fp = self.wfile('.hgtags', 'rb+')
550 fp = self.wfile('.hgtags', 'rb+')
550 except IOError, e:
551 except IOError, e:
551 if e.errno != errno.ENOENT:
552 if e.errno != errno.ENOENT:
552 raise
553 raise
553 fp = self.wfile('.hgtags', 'ab')
554 fp = self.wfile('.hgtags', 'ab')
554 else:
555 else:
555 prevtags = fp.read()
556 prevtags = fp.read()
556
557
557 # committed tags are stored in UTF-8
558 # committed tags are stored in UTF-8
558 writetags(fp, names, encoding.fromlocal, prevtags)
559 writetags(fp, names, encoding.fromlocal, prevtags)
559
560
560 fp.close()
561 fp.close()
561
562
562 self.invalidatecaches()
563 self.invalidatecaches()
563
564
564 if '.hgtags' not in self.dirstate:
565 if '.hgtags' not in self.dirstate:
565 self[None].add(['.hgtags'])
566 self[None].add(['.hgtags'])
566
567
567 m = matchmod.exact(self.root, '', ['.hgtags'])
568 m = matchmod.exact(self.root, '', ['.hgtags'])
568 tagnode = self.commit(message, user, date, extra=extra, match=m,
569 tagnode = self.commit(message, user, date, extra=extra, match=m,
569 editor=editor)
570 editor=editor)
570
571
571 for name in names:
572 for name in names:
572 self.hook('tag', node=hex(node), tag=name, local=local)
573 self.hook('tag', node=hex(node), tag=name, local=local)
573
574
574 return tagnode
575 return tagnode
575
576
576 def tag(self, names, node, message, local, user, date, editor=False):
577 def tag(self, names, node, message, local, user, date, editor=False):
577 '''tag a revision with one or more symbolic names.
578 '''tag a revision with one or more symbolic names.
578
579
579 names is a list of strings or, when adding a single tag, names may be a
580 names is a list of strings or, when adding a single tag, names may be a
580 string.
581 string.
581
582
582 if local is True, the tags are stored in a per-repository file.
583 if local is True, the tags are stored in a per-repository file.
583 otherwise, they are stored in the .hgtags file, and a new
584 otherwise, they are stored in the .hgtags file, and a new
584 changeset is committed with the change.
585 changeset is committed with the change.
585
586
586 keyword arguments:
587 keyword arguments:
587
588
588 local: whether to store tags in non-version-controlled file
589 local: whether to store tags in non-version-controlled file
589 (default False)
590 (default False)
590
591
591 message: commit message to use if committing
592 message: commit message to use if committing
592
593
593 user: name of user to use if committing
594 user: name of user to use if committing
594
595
595 date: date tuple to use if committing'''
596 date: date tuple to use if committing'''
596
597
597 if not local:
598 if not local:
598 m = matchmod.exact(self.root, '', ['.hgtags'])
599 m = matchmod.exact(self.root, '', ['.hgtags'])
599 if util.any(self.status(match=m, unknown=True, ignored=True)):
600 if util.any(self.status(match=m, unknown=True, ignored=True)):
600 raise util.Abort(_('working copy of .hgtags is changed'),
601 raise util.Abort(_('working copy of .hgtags is changed'),
601 hint=_('please commit .hgtags manually'))
602 hint=_('please commit .hgtags manually'))
602
603
603 self.tags() # instantiate the cache
604 self.tags() # instantiate the cache
604 self._tag(names, node, message, local, user, date, editor=editor)
605 self._tag(names, node, message, local, user, date, editor=editor)
605
606
606 @filteredpropertycache
607 @filteredpropertycache
607 def _tagscache(self):
608 def _tagscache(self):
608 '''Returns a tagscache object that contains various tags related
609 '''Returns a tagscache object that contains various tags related
609 caches.'''
610 caches.'''
610
611
611 # This simplifies its cache management by having one decorated
612 # This simplifies its cache management by having one decorated
612 # function (this one) and the rest simply fetch things from it.
613 # function (this one) and the rest simply fetch things from it.
613 class tagscache(object):
614 class tagscache(object):
614 def __init__(self):
615 def __init__(self):
615 # These two define the set of tags for this repository. tags
616 # These two define the set of tags for this repository. tags
616 # maps tag name to node; tagtypes maps tag name to 'global' or
617 # maps tag name to node; tagtypes maps tag name to 'global' or
617 # 'local'. (Global tags are defined by .hgtags across all
618 # 'local'. (Global tags are defined by .hgtags across all
618 # heads, and local tags are defined in .hg/localtags.)
619 # heads, and local tags are defined in .hg/localtags.)
619 # They constitute the in-memory cache of tags.
620 # They constitute the in-memory cache of tags.
620 self.tags = self.tagtypes = None
621 self.tags = self.tagtypes = None
621
622
622 self.nodetagscache = self.tagslist = None
623 self.nodetagscache = self.tagslist = None
623
624
624 cache = tagscache()
625 cache = tagscache()
625 cache.tags, cache.tagtypes = self._findtags()
626 cache.tags, cache.tagtypes = self._findtags()
626
627
627 return cache
628 return cache
628
629
629 def tags(self):
630 def tags(self):
630 '''return a mapping of tag to node'''
631 '''return a mapping of tag to node'''
631 t = {}
632 t = {}
632 if self.changelog.filteredrevs:
633 if self.changelog.filteredrevs:
633 tags, tt = self._findtags()
634 tags, tt = self._findtags()
634 else:
635 else:
635 tags = self._tagscache.tags
636 tags = self._tagscache.tags
636 for k, v in tags.iteritems():
637 for k, v in tags.iteritems():
637 try:
638 try:
638 # ignore tags to unknown nodes
639 # ignore tags to unknown nodes
639 self.changelog.rev(v)
640 self.changelog.rev(v)
640 t[k] = v
641 t[k] = v
641 except (error.LookupError, ValueError):
642 except (error.LookupError, ValueError):
642 pass
643 pass
643 return t
644 return t
644
645
645 def _findtags(self):
646 def _findtags(self):
646 '''Do the hard work of finding tags. Return a pair of dicts
647 '''Do the hard work of finding tags. Return a pair of dicts
647 (tags, tagtypes) where tags maps tag name to node, and tagtypes
648 (tags, tagtypes) where tags maps tag name to node, and tagtypes
648 maps tag name to a string like \'global\' or \'local\'.
649 maps tag name to a string like \'global\' or \'local\'.
649 Subclasses or extensions are free to add their own tags, but
650 Subclasses or extensions are free to add their own tags, but
650 should be aware that the returned dicts will be retained for the
651 should be aware that the returned dicts will be retained for the
651 duration of the localrepo object.'''
652 duration of the localrepo object.'''
652
653
653 # XXX what tagtype should subclasses/extensions use? Currently
654 # XXX what tagtype should subclasses/extensions use? Currently
654 # mq and bookmarks add tags, but do not set the tagtype at all.
655 # mq and bookmarks add tags, but do not set the tagtype at all.
655 # Should each extension invent its own tag type? Should there
656 # Should each extension invent its own tag type? Should there
656 # be one tagtype for all such "virtual" tags? Or is the status
657 # be one tagtype for all such "virtual" tags? Or is the status
657 # quo fine?
658 # quo fine?
658
659
659 alltags = {} # map tag name to (node, hist)
660 alltags = {} # map tag name to (node, hist)
660 tagtypes = {}
661 tagtypes = {}
661
662
662 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
663 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
663 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
664 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
664
665
665 # Build the return dicts. Have to re-encode tag names because
666 # Build the return dicts. Have to re-encode tag names because
666 # the tags module always uses UTF-8 (in order not to lose info
667 # the tags module always uses UTF-8 (in order not to lose info
667 # writing to the cache), but the rest of Mercurial wants them in
668 # writing to the cache), but the rest of Mercurial wants them in
668 # local encoding.
669 # local encoding.
669 tags = {}
670 tags = {}
670 for (name, (node, hist)) in alltags.iteritems():
671 for (name, (node, hist)) in alltags.iteritems():
671 if node != nullid:
672 if node != nullid:
672 tags[encoding.tolocal(name)] = node
673 tags[encoding.tolocal(name)] = node
673 tags['tip'] = self.changelog.tip()
674 tags['tip'] = self.changelog.tip()
674 tagtypes = dict([(encoding.tolocal(name), value)
675 tagtypes = dict([(encoding.tolocal(name), value)
675 for (name, value) in tagtypes.iteritems()])
676 for (name, value) in tagtypes.iteritems()])
676 return (tags, tagtypes)
677 return (tags, tagtypes)
677
678
678 def tagtype(self, tagname):
679 def tagtype(self, tagname):
679 '''
680 '''
680 return the type of the given tag. result can be:
681 return the type of the given tag. result can be:
681
682
682 'local' : a local tag
683 'local' : a local tag
683 'global' : a global tag
684 'global' : a global tag
684 None : tag does not exist
685 None : tag does not exist
685 '''
686 '''
686
687
687 return self._tagscache.tagtypes.get(tagname)
688 return self._tagscache.tagtypes.get(tagname)
688
689
689 def tagslist(self):
690 def tagslist(self):
690 '''return a list of tags ordered by revision'''
691 '''return a list of tags ordered by revision'''
691 if not self._tagscache.tagslist:
692 if not self._tagscache.tagslist:
692 l = []
693 l = []
693 for t, n in self.tags().iteritems():
694 for t, n in self.tags().iteritems():
694 l.append((self.changelog.rev(n), t, n))
695 l.append((self.changelog.rev(n), t, n))
695 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
696 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
696
697
697 return self._tagscache.tagslist
698 return self._tagscache.tagslist
698
699
699 def nodetags(self, node):
700 def nodetags(self, node):
700 '''return the tags associated with a node'''
701 '''return the tags associated with a node'''
701 if not self._tagscache.nodetagscache:
702 if not self._tagscache.nodetagscache:
702 nodetagscache = {}
703 nodetagscache = {}
703 for t, n in self._tagscache.tags.iteritems():
704 for t, n in self._tagscache.tags.iteritems():
704 nodetagscache.setdefault(n, []).append(t)
705 nodetagscache.setdefault(n, []).append(t)
705 for tags in nodetagscache.itervalues():
706 for tags in nodetagscache.itervalues():
706 tags.sort()
707 tags.sort()
707 self._tagscache.nodetagscache = nodetagscache
708 self._tagscache.nodetagscache = nodetagscache
708 return self._tagscache.nodetagscache.get(node, [])
709 return self._tagscache.nodetagscache.get(node, [])
709
710
710 def nodebookmarks(self, node):
711 def nodebookmarks(self, node):
711 marks = []
712 marks = []
712 for bookmark, n in self._bookmarks.iteritems():
713 for bookmark, n in self._bookmarks.iteritems():
713 if n == node:
714 if n == node:
714 marks.append(bookmark)
715 marks.append(bookmark)
715 return sorted(marks)
716 return sorted(marks)
716
717
717 def branchmap(self):
718 def branchmap(self):
718 '''returns a dictionary {branch: [branchheads]} with branchheads
719 '''returns a dictionary {branch: [branchheads]} with branchheads
719 ordered by increasing revision number'''
720 ordered by increasing revision number'''
720 branchmap.updatecache(self)
721 branchmap.updatecache(self)
721 return self._branchcaches[self.filtername]
722 return self._branchcaches[self.filtername]
722
723
723 def branchtip(self, branch, ignoremissing=False):
724 def branchtip(self, branch, ignoremissing=False):
724 '''return the tip node for a given branch
725 '''return the tip node for a given branch
725
726
726 If ignoremissing is True, then this method will not raise an error.
727 If ignoremissing is True, then this method will not raise an error.
727 This is helpful for callers that only expect None for a missing branch
728 This is helpful for callers that only expect None for a missing branch
728 (e.g. namespace).
729 (e.g. namespace).
729
730
730 '''
731 '''
731 try:
732 try:
732 return self.branchmap().branchtip(branch)
733 return self.branchmap().branchtip(branch)
733 except KeyError:
734 except KeyError:
734 if not ignoremissing:
735 if not ignoremissing:
735 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
736 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
736 else:
737 else:
737 pass
738 pass
738
739
739 def lookup(self, key):
740 def lookup(self, key):
740 return self[key].node()
741 return self[key].node()
741
742
742 def lookupbranch(self, key, remote=None):
743 def lookupbranch(self, key, remote=None):
743 repo = remote or self
744 repo = remote or self
744 if key in repo.branchmap():
745 if key in repo.branchmap():
745 return key
746 return key
746
747
747 repo = (remote and remote.local()) and remote or self
748 repo = (remote and remote.local()) and remote or self
748 return repo[key].branch()
749 return repo[key].branch()
749
750
750 def known(self, nodes):
751 def known(self, nodes):
751 nm = self.changelog.nodemap
752 nm = self.changelog.nodemap
752 pc = self._phasecache
753 pc = self._phasecache
753 result = []
754 result = []
754 for n in nodes:
755 for n in nodes:
755 r = nm.get(n)
756 r = nm.get(n)
756 resp = not (r is None or pc.phase(self, r) >= phases.secret)
757 resp = not (r is None or pc.phase(self, r) >= phases.secret)
757 result.append(resp)
758 result.append(resp)
758 return result
759 return result
759
760
760 def local(self):
761 def local(self):
761 return self
762 return self
762
763
763 def cancopy(self):
764 def cancopy(self):
764 # so statichttprepo's override of local() works
765 # so statichttprepo's override of local() works
765 if not self.local():
766 if not self.local():
766 return False
767 return False
767 if not self.ui.configbool('phases', 'publish', True):
768 if not self.ui.configbool('phases', 'publish', True):
768 return True
769 return True
769 # if publishing we can't copy if there is filtered content
770 # if publishing we can't copy if there is filtered content
770 return not self.filtered('visible').changelog.filteredrevs
771 return not self.filtered('visible').changelog.filteredrevs
771
772
772 def shared(self):
773 def shared(self):
773 '''the type of shared repository (None if not shared)'''
774 '''the type of shared repository (None if not shared)'''
774 if self.sharedpath != self.path:
775 if self.sharedpath != self.path:
775 return 'store'
776 return 'store'
776 return None
777 return None
777
778
778 def join(self, f, *insidef):
779 def join(self, f, *insidef):
779 return self.vfs.join(os.path.join(f, *insidef))
780 return self.vfs.join(os.path.join(f, *insidef))
780
781
781 def wjoin(self, f, *insidef):
782 def wjoin(self, f, *insidef):
782 return self.vfs.reljoin(self.root, f, *insidef)
783 return self.vfs.reljoin(self.root, f, *insidef)
783
784
784 def file(self, f):
785 def file(self, f):
785 if f[0] == '/':
786 if f[0] == '/':
786 f = f[1:]
787 f = f[1:]
787 return filelog.filelog(self.svfs, f)
788 return filelog.filelog(self.svfs, f)
788
789
789 def changectx(self, changeid):
790 def changectx(self, changeid):
790 return self[changeid]
791 return self[changeid]
791
792
792 def parents(self, changeid=None):
793 def parents(self, changeid=None):
793 '''get list of changectxs for parents of changeid'''
794 '''get list of changectxs for parents of changeid'''
794 return self[changeid].parents()
795 return self[changeid].parents()
795
796
796 def setparents(self, p1, p2=nullid):
797 def setparents(self, p1, p2=nullid):
797 self.dirstate.beginparentchange()
798 self.dirstate.beginparentchange()
798 copies = self.dirstate.setparents(p1, p2)
799 copies = self.dirstate.setparents(p1, p2)
799 pctx = self[p1]
800 pctx = self[p1]
800 if copies:
801 if copies:
801 # Adjust copy records, the dirstate cannot do it, it
802 # Adjust copy records, the dirstate cannot do it, it
802 # requires access to parents manifests. Preserve them
803 # requires access to parents manifests. Preserve them
803 # only for entries added to first parent.
804 # only for entries added to first parent.
804 for f in copies:
805 for f in copies:
805 if f not in pctx and copies[f] in pctx:
806 if f not in pctx and copies[f] in pctx:
806 self.dirstate.copy(copies[f], f)
807 self.dirstate.copy(copies[f], f)
807 if p2 == nullid:
808 if p2 == nullid:
808 for f, s in sorted(self.dirstate.copies().items()):
809 for f, s in sorted(self.dirstate.copies().items()):
809 if f not in pctx and s not in pctx:
810 if f not in pctx and s not in pctx:
810 self.dirstate.copy(None, f)
811 self.dirstate.copy(None, f)
811 self.dirstate.endparentchange()
812 self.dirstate.endparentchange()
812
813
813 def filectx(self, path, changeid=None, fileid=None):
814 def filectx(self, path, changeid=None, fileid=None):
814 """changeid can be a changeset revision, node, or tag.
815 """changeid can be a changeset revision, node, or tag.
815 fileid can be a file revision or node."""
816 fileid can be a file revision or node."""
816 return context.filectx(self, path, changeid, fileid)
817 return context.filectx(self, path, changeid, fileid)
817
818
818 def getcwd(self):
819 def getcwd(self):
819 return self.dirstate.getcwd()
820 return self.dirstate.getcwd()
820
821
821 def pathto(self, f, cwd=None):
822 def pathto(self, f, cwd=None):
822 return self.dirstate.pathto(f, cwd)
823 return self.dirstate.pathto(f, cwd)
823
824
824 def wfile(self, f, mode='r'):
825 def wfile(self, f, mode='r'):
825 return self.wvfs(f, mode)
826 return self.wvfs(f, mode)
826
827
827 def _link(self, f):
828 def _link(self, f):
828 return self.wvfs.islink(f)
829 return self.wvfs.islink(f)
829
830
830 def _loadfilter(self, filter):
831 def _loadfilter(self, filter):
831 if filter not in self.filterpats:
832 if filter not in self.filterpats:
832 l = []
833 l = []
833 for pat, cmd in self.ui.configitems(filter):
834 for pat, cmd in self.ui.configitems(filter):
834 if cmd == '!':
835 if cmd == '!':
835 continue
836 continue
836 mf = matchmod.match(self.root, '', [pat])
837 mf = matchmod.match(self.root, '', [pat])
837 fn = None
838 fn = None
838 params = cmd
839 params = cmd
839 for name, filterfn in self._datafilters.iteritems():
840 for name, filterfn in self._datafilters.iteritems():
840 if cmd.startswith(name):
841 if cmd.startswith(name):
841 fn = filterfn
842 fn = filterfn
842 params = cmd[len(name):].lstrip()
843 params = cmd[len(name):].lstrip()
843 break
844 break
844 if not fn:
845 if not fn:
845 fn = lambda s, c, **kwargs: util.filter(s, c)
846 fn = lambda s, c, **kwargs: util.filter(s, c)
846 # Wrap old filters not supporting keyword arguments
847 # Wrap old filters not supporting keyword arguments
847 if not inspect.getargspec(fn)[2]:
848 if not inspect.getargspec(fn)[2]:
848 oldfn = fn
849 oldfn = fn
849 fn = lambda s, c, **kwargs: oldfn(s, c)
850 fn = lambda s, c, **kwargs: oldfn(s, c)
850 l.append((mf, fn, params))
851 l.append((mf, fn, params))
851 self.filterpats[filter] = l
852 self.filterpats[filter] = l
852 return self.filterpats[filter]
853 return self.filterpats[filter]
853
854
854 def _filter(self, filterpats, filename, data):
855 def _filter(self, filterpats, filename, data):
855 for mf, fn, cmd in filterpats:
856 for mf, fn, cmd in filterpats:
856 if mf(filename):
857 if mf(filename):
857 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
858 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
858 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
859 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
859 break
860 break
860
861
861 return data
862 return data
862
863
863 @unfilteredpropertycache
864 @unfilteredpropertycache
864 def _encodefilterpats(self):
865 def _encodefilterpats(self):
865 return self._loadfilter('encode')
866 return self._loadfilter('encode')
866
867
867 @unfilteredpropertycache
868 @unfilteredpropertycache
868 def _decodefilterpats(self):
869 def _decodefilterpats(self):
869 return self._loadfilter('decode')
870 return self._loadfilter('decode')
870
871
871 def adddatafilter(self, name, filter):
872 def adddatafilter(self, name, filter):
872 self._datafilters[name] = filter
873 self._datafilters[name] = filter
873
874
874 def wread(self, filename):
875 def wread(self, filename):
875 if self._link(filename):
876 if self._link(filename):
876 data = self.wvfs.readlink(filename)
877 data = self.wvfs.readlink(filename)
877 else:
878 else:
878 data = self.wvfs.read(filename)
879 data = self.wvfs.read(filename)
879 return self._filter(self._encodefilterpats, filename, data)
880 return self._filter(self._encodefilterpats, filename, data)
880
881
881 def wwrite(self, filename, data, flags):
882 def wwrite(self, filename, data, flags):
882 data = self._filter(self._decodefilterpats, filename, data)
883 data = self._filter(self._decodefilterpats, filename, data)
883 if 'l' in flags:
884 if 'l' in flags:
884 self.wvfs.symlink(data, filename)
885 self.wvfs.symlink(data, filename)
885 else:
886 else:
886 self.wvfs.write(filename, data)
887 self.wvfs.write(filename, data)
887 if 'x' in flags:
888 if 'x' in flags:
888 self.wvfs.setflags(filename, False, True)
889 self.wvfs.setflags(filename, False, True)
889
890
890 def wwritedata(self, filename, data):
891 def wwritedata(self, filename, data):
891 return self._filter(self._decodefilterpats, filename, data)
892 return self._filter(self._decodefilterpats, filename, data)
892
893
893 def currenttransaction(self):
894 def currenttransaction(self):
894 """return the current transaction or None if non exists"""
895 """return the current transaction or None if non exists"""
895 tr = self._transref and self._transref() or None
896 tr = self._transref and self._transref() or None
896 if tr and tr.running():
897 if tr and tr.running():
897 return tr
898 return tr
898 return None
899 return None
899
900
900 def transaction(self, desc, report=None):
901 def transaction(self, desc, report=None):
901 tr = self.currenttransaction()
902 tr = self.currenttransaction()
902 if tr is not None:
903 if tr is not None:
903 return tr.nest()
904 return tr.nest()
904
905
905 # abort here if the journal already exists
906 # abort here if the journal already exists
906 if self.svfs.exists("journal"):
907 if self.svfs.exists("journal"):
907 raise error.RepoError(
908 raise error.RepoError(
908 _("abandoned transaction found"),
909 _("abandoned transaction found"),
909 hint=_("run 'hg recover' to clean up transaction"))
910 hint=_("run 'hg recover' to clean up transaction"))
910
911
911 self._writejournal(desc)
912 self._writejournal(desc)
912 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
913 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
913 rp = report and report or self.ui.warn
914 rp = report and report or self.ui.warn
914 vfsmap = {'plain': self.vfs} # root of .hg/
915 vfsmap = {'plain': self.vfs} # root of .hg/
915 tr = transaction.transaction(rp, self.svfs, vfsmap,
916 tr = transaction.transaction(rp, self.svfs, vfsmap,
916 "journal",
917 "journal",
917 "undo",
918 "undo",
918 aftertrans(renames),
919 aftertrans(renames),
919 self.store.createmode)
920 self.store.createmode)
920 # note: writing the fncache only during finalize mean that the file is
921 # note: writing the fncache only during finalize mean that the file is
921 # outdated when running hooks. As fncache is used for streaming clone,
922 # outdated when running hooks. As fncache is used for streaming clone,
922 # this is not expected to break anything that happen during the hooks.
923 # this is not expected to break anything that happen during the hooks.
923 tr.addfinalize('flush-fncache', self.store.write)
924 tr.addfinalize('flush-fncache', self.store.write)
924 self._transref = weakref.ref(tr)
925 self._transref = weakref.ref(tr)
925 return tr
926 return tr
926
927
927 def _journalfiles(self):
928 def _journalfiles(self):
928 return ((self.svfs, 'journal'),
929 return ((self.svfs, 'journal'),
929 (self.vfs, 'journal.dirstate'),
930 (self.vfs, 'journal.dirstate'),
930 (self.vfs, 'journal.branch'),
931 (self.vfs, 'journal.branch'),
931 (self.vfs, 'journal.desc'),
932 (self.vfs, 'journal.desc'),
932 (self.vfs, 'journal.bookmarks'),
933 (self.vfs, 'journal.bookmarks'),
933 (self.svfs, 'journal.phaseroots'))
934 (self.svfs, 'journal.phaseroots'))
934
935
935 def undofiles(self):
936 def undofiles(self):
936 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
937 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
937
938
938 def _writejournal(self, desc):
939 def _writejournal(self, desc):
939 self.vfs.write("journal.dirstate",
940 self.vfs.write("journal.dirstate",
940 self.vfs.tryread("dirstate"))
941 self.vfs.tryread("dirstate"))
941 self.vfs.write("journal.branch",
942 self.vfs.write("journal.branch",
942 encoding.fromlocal(self.dirstate.branch()))
943 encoding.fromlocal(self.dirstate.branch()))
943 self.vfs.write("journal.desc",
944 self.vfs.write("journal.desc",
944 "%d\n%s\n" % (len(self), desc))
945 "%d\n%s\n" % (len(self), desc))
945 self.vfs.write("journal.bookmarks",
946 self.vfs.write("journal.bookmarks",
946 self.vfs.tryread("bookmarks"))
947 self.vfs.tryread("bookmarks"))
947 self.svfs.write("journal.phaseroots",
948 self.svfs.write("journal.phaseroots",
948 self.svfs.tryread("phaseroots"))
949 self.svfs.tryread("phaseroots"))
949
950
950 def recover(self):
951 def recover(self):
951 lock = self.lock()
952 lock = self.lock()
952 try:
953 try:
953 if self.svfs.exists("journal"):
954 if self.svfs.exists("journal"):
954 self.ui.status(_("rolling back interrupted transaction\n"))
955 self.ui.status(_("rolling back interrupted transaction\n"))
955 vfsmap = {'': self.svfs,
956 vfsmap = {'': self.svfs,
956 'plain': self.vfs,}
957 'plain': self.vfs,}
957 transaction.rollback(self.svfs, vfsmap, "journal",
958 transaction.rollback(self.svfs, vfsmap, "journal",
958 self.ui.warn)
959 self.ui.warn)
959 self.invalidate()
960 self.invalidate()
960 return True
961 return True
961 else:
962 else:
962 self.ui.warn(_("no interrupted transaction available\n"))
963 self.ui.warn(_("no interrupted transaction available\n"))
963 return False
964 return False
964 finally:
965 finally:
965 lock.release()
966 lock.release()
966
967
967 def rollback(self, dryrun=False, force=False):
968 def rollback(self, dryrun=False, force=False):
968 wlock = lock = None
969 wlock = lock = None
969 try:
970 try:
970 wlock = self.wlock()
971 wlock = self.wlock()
971 lock = self.lock()
972 lock = self.lock()
972 if self.svfs.exists("undo"):
973 if self.svfs.exists("undo"):
973 return self._rollback(dryrun, force)
974 return self._rollback(dryrun, force)
974 else:
975 else:
975 self.ui.warn(_("no rollback information available\n"))
976 self.ui.warn(_("no rollback information available\n"))
976 return 1
977 return 1
977 finally:
978 finally:
978 release(lock, wlock)
979 release(lock, wlock)
979
980
980 @unfilteredmethod # Until we get smarter cache management
981 @unfilteredmethod # Until we get smarter cache management
981 def _rollback(self, dryrun, force):
982 def _rollback(self, dryrun, force):
982 ui = self.ui
983 ui = self.ui
983 try:
984 try:
984 args = self.vfs.read('undo.desc').splitlines()
985 args = self.vfs.read('undo.desc').splitlines()
985 (oldlen, desc, detail) = (int(args[0]), args[1], None)
986 (oldlen, desc, detail) = (int(args[0]), args[1], None)
986 if len(args) >= 3:
987 if len(args) >= 3:
987 detail = args[2]
988 detail = args[2]
988 oldtip = oldlen - 1
989 oldtip = oldlen - 1
989
990
990 if detail and ui.verbose:
991 if detail and ui.verbose:
991 msg = (_('repository tip rolled back to revision %s'
992 msg = (_('repository tip rolled back to revision %s'
992 ' (undo %s: %s)\n')
993 ' (undo %s: %s)\n')
993 % (oldtip, desc, detail))
994 % (oldtip, desc, detail))
994 else:
995 else:
995 msg = (_('repository tip rolled back to revision %s'
996 msg = (_('repository tip rolled back to revision %s'
996 ' (undo %s)\n')
997 ' (undo %s)\n')
997 % (oldtip, desc))
998 % (oldtip, desc))
998 except IOError:
999 except IOError:
999 msg = _('rolling back unknown transaction\n')
1000 msg = _('rolling back unknown transaction\n')
1000 desc = None
1001 desc = None
1001
1002
1002 if not force and self['.'] != self['tip'] and desc == 'commit':
1003 if not force and self['.'] != self['tip'] and desc == 'commit':
1003 raise util.Abort(
1004 raise util.Abort(
1004 _('rollback of last commit while not checked out '
1005 _('rollback of last commit while not checked out '
1005 'may lose data'), hint=_('use -f to force'))
1006 'may lose data'), hint=_('use -f to force'))
1006
1007
1007 ui.status(msg)
1008 ui.status(msg)
1008 if dryrun:
1009 if dryrun:
1009 return 0
1010 return 0
1010
1011
1011 parents = self.dirstate.parents()
1012 parents = self.dirstate.parents()
1012 self.destroying()
1013 self.destroying()
1013 vfsmap = {'plain': self.vfs, '': self.svfs}
1014 vfsmap = {'plain': self.vfs, '': self.svfs}
1014 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1015 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1015 if self.vfs.exists('undo.bookmarks'):
1016 if self.vfs.exists('undo.bookmarks'):
1016 self.vfs.rename('undo.bookmarks', 'bookmarks')
1017 self.vfs.rename('undo.bookmarks', 'bookmarks')
1017 if self.svfs.exists('undo.phaseroots'):
1018 if self.svfs.exists('undo.phaseroots'):
1018 self.svfs.rename('undo.phaseroots', 'phaseroots')
1019 self.svfs.rename('undo.phaseroots', 'phaseroots')
1019 self.invalidate()
1020 self.invalidate()
1020
1021
1021 parentgone = (parents[0] not in self.changelog.nodemap or
1022 parentgone = (parents[0] not in self.changelog.nodemap or
1022 parents[1] not in self.changelog.nodemap)
1023 parents[1] not in self.changelog.nodemap)
1023 if parentgone:
1024 if parentgone:
1024 self.vfs.rename('undo.dirstate', 'dirstate')
1025 self.vfs.rename('undo.dirstate', 'dirstate')
1025 try:
1026 try:
1026 branch = self.vfs.read('undo.branch')
1027 branch = self.vfs.read('undo.branch')
1027 self.dirstate.setbranch(encoding.tolocal(branch))
1028 self.dirstate.setbranch(encoding.tolocal(branch))
1028 except IOError:
1029 except IOError:
1029 ui.warn(_('named branch could not be reset: '
1030 ui.warn(_('named branch could not be reset: '
1030 'current branch is still \'%s\'\n')
1031 'current branch is still \'%s\'\n')
1031 % self.dirstate.branch())
1032 % self.dirstate.branch())
1032
1033
1033 self.dirstate.invalidate()
1034 self.dirstate.invalidate()
1034 parents = tuple([p.rev() for p in self.parents()])
1035 parents = tuple([p.rev() for p in self.parents()])
1035 if len(parents) > 1:
1036 if len(parents) > 1:
1036 ui.status(_('working directory now based on '
1037 ui.status(_('working directory now based on '
1037 'revisions %d and %d\n') % parents)
1038 'revisions %d and %d\n') % parents)
1038 else:
1039 else:
1039 ui.status(_('working directory now based on '
1040 ui.status(_('working directory now based on '
1040 'revision %d\n') % parents)
1041 'revision %d\n') % parents)
1041 # TODO: if we know which new heads may result from this rollback, pass
1042 # TODO: if we know which new heads may result from this rollback, pass
1042 # them to destroy(), which will prevent the branchhead cache from being
1043 # them to destroy(), which will prevent the branchhead cache from being
1043 # invalidated.
1044 # invalidated.
1044 self.destroyed()
1045 self.destroyed()
1045 return 0
1046 return 0
1046
1047
1047 def invalidatecaches(self):
1048 def invalidatecaches(self):
1048
1049
1049 if '_tagscache' in vars(self):
1050 if '_tagscache' in vars(self):
1050 # can't use delattr on proxy
1051 # can't use delattr on proxy
1051 del self.__dict__['_tagscache']
1052 del self.__dict__['_tagscache']
1052
1053
1053 self.unfiltered()._branchcaches.clear()
1054 self.unfiltered()._branchcaches.clear()
1054 self.invalidatevolatilesets()
1055 self.invalidatevolatilesets()
1055
1056
1056 def invalidatevolatilesets(self):
1057 def invalidatevolatilesets(self):
1057 self.filteredrevcache.clear()
1058 self.filteredrevcache.clear()
1058 obsolete.clearobscaches(self)
1059 obsolete.clearobscaches(self)
1059
1060
1060 def invalidatedirstate(self):
1061 def invalidatedirstate(self):
1061 '''Invalidates the dirstate, causing the next call to dirstate
1062 '''Invalidates the dirstate, causing the next call to dirstate
1062 to check if it was modified since the last time it was read,
1063 to check if it was modified since the last time it was read,
1063 rereading it if it has.
1064 rereading it if it has.
1064
1065
1065 This is different to dirstate.invalidate() that it doesn't always
1066 This is different to dirstate.invalidate() that it doesn't always
1066 rereads the dirstate. Use dirstate.invalidate() if you want to
1067 rereads the dirstate. Use dirstate.invalidate() if you want to
1067 explicitly read the dirstate again (i.e. restoring it to a previous
1068 explicitly read the dirstate again (i.e. restoring it to a previous
1068 known good state).'''
1069 known good state).'''
1069 if hasunfilteredcache(self, 'dirstate'):
1070 if hasunfilteredcache(self, 'dirstate'):
1070 for k in self.dirstate._filecache:
1071 for k in self.dirstate._filecache:
1071 try:
1072 try:
1072 delattr(self.dirstate, k)
1073 delattr(self.dirstate, k)
1073 except AttributeError:
1074 except AttributeError:
1074 pass
1075 pass
1075 delattr(self.unfiltered(), 'dirstate')
1076 delattr(self.unfiltered(), 'dirstate')
1076
1077
1077 def invalidate(self):
1078 def invalidate(self):
1078 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1079 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1079 for k in self._filecache:
1080 for k in self._filecache:
1080 # dirstate is invalidated separately in invalidatedirstate()
1081 # dirstate is invalidated separately in invalidatedirstate()
1081 if k == 'dirstate':
1082 if k == 'dirstate':
1082 continue
1083 continue
1083
1084
1084 try:
1085 try:
1085 delattr(unfiltered, k)
1086 delattr(unfiltered, k)
1086 except AttributeError:
1087 except AttributeError:
1087 pass
1088 pass
1088 self.invalidatecaches()
1089 self.invalidatecaches()
1089 self.store.invalidatecaches()
1090 self.store.invalidatecaches()
1090
1091
1091 def invalidateall(self):
1092 def invalidateall(self):
1092 '''Fully invalidates both store and non-store parts, causing the
1093 '''Fully invalidates both store and non-store parts, causing the
1093 subsequent operation to reread any outside changes.'''
1094 subsequent operation to reread any outside changes.'''
1094 # extension should hook this to invalidate its caches
1095 # extension should hook this to invalidate its caches
1095 self.invalidate()
1096 self.invalidate()
1096 self.invalidatedirstate()
1097 self.invalidatedirstate()
1097
1098
1098 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1099 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1099 try:
1100 try:
1100 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1101 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1101 except error.LockHeld, inst:
1102 except error.LockHeld, inst:
1102 if not wait:
1103 if not wait:
1103 raise
1104 raise
1104 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1105 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1105 (desc, inst.locker))
1106 (desc, inst.locker))
1106 # default to 600 seconds timeout
1107 # default to 600 seconds timeout
1107 l = lockmod.lock(vfs, lockname,
1108 l = lockmod.lock(vfs, lockname,
1108 int(self.ui.config("ui", "timeout", "600")),
1109 int(self.ui.config("ui", "timeout", "600")),
1109 releasefn, desc=desc)
1110 releasefn, desc=desc)
1110 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1111 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1111 if acquirefn:
1112 if acquirefn:
1112 acquirefn()
1113 acquirefn()
1113 return l
1114 return l
1114
1115
1115 def _afterlock(self, callback):
1116 def _afterlock(self, callback):
1116 """add a callback to the current repository lock.
1117 """add a callback to the current repository lock.
1117
1118
1118 The callback will be executed on lock release."""
1119 The callback will be executed on lock release."""
1119 l = self._lockref and self._lockref()
1120 l = self._lockref and self._lockref()
1120 if l:
1121 if l:
1121 l.postrelease.append(callback)
1122 l.postrelease.append(callback)
1122 else:
1123 else:
1123 callback()
1124 callback()
1124
1125
1125 def lock(self, wait=True):
1126 def lock(self, wait=True):
1126 '''Lock the repository store (.hg/store) and return a weak reference
1127 '''Lock the repository store (.hg/store) and return a weak reference
1127 to the lock. Use this before modifying the store (e.g. committing or
1128 to the lock. Use this before modifying the store (e.g. committing or
1128 stripping). If you are opening a transaction, get a lock as well.)'''
1129 stripping). If you are opening a transaction, get a lock as well.)'''
1129 l = self._lockref and self._lockref()
1130 l = self._lockref and self._lockref()
1130 if l is not None and l.held:
1131 if l is not None and l.held:
1131 l.lock()
1132 l.lock()
1132 return l
1133 return l
1133
1134
1134 def unlock():
1135 def unlock():
1135 for k, ce in self._filecache.items():
1136 for k, ce in self._filecache.items():
1136 if k == 'dirstate' or k not in self.__dict__:
1137 if k == 'dirstate' or k not in self.__dict__:
1137 continue
1138 continue
1138 ce.refresh()
1139 ce.refresh()
1139
1140
1140 l = self._lock(self.svfs, "lock", wait, unlock,
1141 l = self._lock(self.svfs, "lock", wait, unlock,
1141 self.invalidate, _('repository %s') % self.origroot)
1142 self.invalidate, _('repository %s') % self.origroot)
1142 self._lockref = weakref.ref(l)
1143 self._lockref = weakref.ref(l)
1143 return l
1144 return l
1144
1145
1145 def wlock(self, wait=True):
1146 def wlock(self, wait=True):
1146 '''Lock the non-store parts of the repository (everything under
1147 '''Lock the non-store parts of the repository (everything under
1147 .hg except .hg/store) and return a weak reference to the lock.
1148 .hg except .hg/store) and return a weak reference to the lock.
1148 Use this before modifying files in .hg.'''
1149 Use this before modifying files in .hg.'''
1149 l = self._wlockref and self._wlockref()
1150 l = self._wlockref and self._wlockref()
1150 if l is not None and l.held:
1151 if l is not None and l.held:
1151 l.lock()
1152 l.lock()
1152 return l
1153 return l
1153
1154
1154 def unlock():
1155 def unlock():
1155 if self.dirstate.pendingparentchange():
1156 if self.dirstate.pendingparentchange():
1156 self.dirstate.invalidate()
1157 self.dirstate.invalidate()
1157 else:
1158 else:
1158 self.dirstate.write()
1159 self.dirstate.write()
1159
1160
1160 self._filecache['dirstate'].refresh()
1161 self._filecache['dirstate'].refresh()
1161
1162
1162 l = self._lock(self.vfs, "wlock", wait, unlock,
1163 l = self._lock(self.vfs, "wlock", wait, unlock,
1163 self.invalidatedirstate, _('working directory of %s') %
1164 self.invalidatedirstate, _('working directory of %s') %
1164 self.origroot)
1165 self.origroot)
1165 self._wlockref = weakref.ref(l)
1166 self._wlockref = weakref.ref(l)
1166 return l
1167 return l
1167
1168
1168 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1169 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1169 """
1170 """
1170 commit an individual file as part of a larger transaction
1171 commit an individual file as part of a larger transaction
1171 """
1172 """
1172
1173
1173 fname = fctx.path()
1174 fname = fctx.path()
1174 text = fctx.data()
1175 text = fctx.data()
1175 flog = self.file(fname)
1176 flog = self.file(fname)
1176 fparent1 = manifest1.get(fname, nullid)
1177 fparent1 = manifest1.get(fname, nullid)
1177 fparent2 = manifest2.get(fname, nullid)
1178 fparent2 = manifest2.get(fname, nullid)
1178
1179
1179 meta = {}
1180 meta = {}
1180 copy = fctx.renamed()
1181 copy = fctx.renamed()
1181 if copy and copy[0] != fname:
1182 if copy and copy[0] != fname:
1182 # Mark the new revision of this file as a copy of another
1183 # Mark the new revision of this file as a copy of another
1183 # file. This copy data will effectively act as a parent
1184 # file. This copy data will effectively act as a parent
1184 # of this new revision. If this is a merge, the first
1185 # of this new revision. If this is a merge, the first
1185 # parent will be the nullid (meaning "look up the copy data")
1186 # parent will be the nullid (meaning "look up the copy data")
1186 # and the second one will be the other parent. For example:
1187 # and the second one will be the other parent. For example:
1187 #
1188 #
1188 # 0 --- 1 --- 3 rev1 changes file foo
1189 # 0 --- 1 --- 3 rev1 changes file foo
1189 # \ / rev2 renames foo to bar and changes it
1190 # \ / rev2 renames foo to bar and changes it
1190 # \- 2 -/ rev3 should have bar with all changes and
1191 # \- 2 -/ rev3 should have bar with all changes and
1191 # should record that bar descends from
1192 # should record that bar descends from
1192 # bar in rev2 and foo in rev1
1193 # bar in rev2 and foo in rev1
1193 #
1194 #
1194 # this allows this merge to succeed:
1195 # this allows this merge to succeed:
1195 #
1196 #
1196 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1197 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1197 # \ / merging rev3 and rev4 should use bar@rev2
1198 # \ / merging rev3 and rev4 should use bar@rev2
1198 # \- 2 --- 4 as the merge base
1199 # \- 2 --- 4 as the merge base
1199 #
1200 #
1200
1201
1201 cfname = copy[0]
1202 cfname = copy[0]
1202 crev = manifest1.get(cfname)
1203 crev = manifest1.get(cfname)
1203 newfparent = fparent2
1204 newfparent = fparent2
1204
1205
1205 if manifest2: # branch merge
1206 if manifest2: # branch merge
1206 if fparent2 == nullid or crev is None: # copied on remote side
1207 if fparent2 == nullid or crev is None: # copied on remote side
1207 if cfname in manifest2:
1208 if cfname in manifest2:
1208 crev = manifest2[cfname]
1209 crev = manifest2[cfname]
1209 newfparent = fparent1
1210 newfparent = fparent1
1210
1211
1211 # Here, we used to search backwards through history to try to find
1212 # Here, we used to search backwards through history to try to find
1212 # where the file copy came from if the source of a copy was not in
1213 # where the file copy came from if the source of a copy was not in
1213 # the parent directory. However, this doesn't actually make sense to
1214 # the parent directory. However, this doesn't actually make sense to
1214 # do (what does a copy from something not in your working copy even
1215 # do (what does a copy from something not in your working copy even
1215 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1216 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1216 # the user that copy information was dropped, so if they didn't
1217 # the user that copy information was dropped, so if they didn't
1217 # expect this outcome it can be fixed, but this is the correct
1218 # expect this outcome it can be fixed, but this is the correct
1218 # behavior in this circumstance.
1219 # behavior in this circumstance.
1219
1220
1220 if crev:
1221 if crev:
1221 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1222 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1222 meta["copy"] = cfname
1223 meta["copy"] = cfname
1223 meta["copyrev"] = hex(crev)
1224 meta["copyrev"] = hex(crev)
1224 fparent1, fparent2 = nullid, newfparent
1225 fparent1, fparent2 = nullid, newfparent
1225 else:
1226 else:
1226 self.ui.warn(_("warning: can't find ancestor for '%s' "
1227 self.ui.warn(_("warning: can't find ancestor for '%s' "
1227 "copied from '%s'!\n") % (fname, cfname))
1228 "copied from '%s'!\n") % (fname, cfname))
1228
1229
1229 elif fparent1 == nullid:
1230 elif fparent1 == nullid:
1230 fparent1, fparent2 = fparent2, nullid
1231 fparent1, fparent2 = fparent2, nullid
1231 elif fparent2 != nullid:
1232 elif fparent2 != nullid:
1232 # is one parent an ancestor of the other?
1233 # is one parent an ancestor of the other?
1233 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1234 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1234 if fparent1 in fparentancestors:
1235 if fparent1 in fparentancestors:
1235 fparent1, fparent2 = fparent2, nullid
1236 fparent1, fparent2 = fparent2, nullid
1236 elif fparent2 in fparentancestors:
1237 elif fparent2 in fparentancestors:
1237 fparent2 = nullid
1238 fparent2 = nullid
1238
1239
1239 # is the file changed?
1240 # is the file changed?
1240 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1241 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1241 changelist.append(fname)
1242 changelist.append(fname)
1242 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1243 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1243 # are just the flags changed during merge?
1244 # are just the flags changed during merge?
1244 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1245 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1245 changelist.append(fname)
1246 changelist.append(fname)
1246
1247
1247 return fparent1
1248 return fparent1
1248
1249
1249 @unfilteredmethod
1250 @unfilteredmethod
1250 def commit(self, text="", user=None, date=None, match=None, force=False,
1251 def commit(self, text="", user=None, date=None, match=None, force=False,
1251 editor=False, extra={}):
1252 editor=False, extra={}):
1252 """Add a new revision to current repository.
1253 """Add a new revision to current repository.
1253
1254
1254 Revision information is gathered from the working directory,
1255 Revision information is gathered from the working directory,
1255 match can be used to filter the committed files. If editor is
1256 match can be used to filter the committed files. If editor is
1256 supplied, it is called to get a commit message.
1257 supplied, it is called to get a commit message.
1257 """
1258 """
1258
1259
1259 def fail(f, msg):
1260 def fail(f, msg):
1260 raise util.Abort('%s: %s' % (f, msg))
1261 raise util.Abort('%s: %s' % (f, msg))
1261
1262
1262 if not match:
1263 if not match:
1263 match = matchmod.always(self.root, '')
1264 match = matchmod.always(self.root, '')
1264
1265
1265 if not force:
1266 if not force:
1266 vdirs = []
1267 vdirs = []
1267 match.explicitdir = vdirs.append
1268 match.explicitdir = vdirs.append
1268 match.bad = fail
1269 match.bad = fail
1269
1270
1270 wlock = self.wlock()
1271 wlock = self.wlock()
1271 try:
1272 try:
1272 wctx = self[None]
1273 wctx = self[None]
1273 merge = len(wctx.parents()) > 1
1274 merge = len(wctx.parents()) > 1
1274
1275
1275 if (not force and merge and match and
1276 if (not force and merge and match and
1276 (match.files() or match.anypats())):
1277 (match.files() or match.anypats())):
1277 raise util.Abort(_('cannot partially commit a merge '
1278 raise util.Abort(_('cannot partially commit a merge '
1278 '(do not specify files or patterns)'))
1279 '(do not specify files or patterns)'))
1279
1280
1280 status = self.status(match=match, clean=force)
1281 status = self.status(match=match, clean=force)
1281 if force:
1282 if force:
1282 status.modified.extend(status.clean) # mq may commit clean files
1283 status.modified.extend(status.clean) # mq may commit clean files
1283
1284
1284 # check subrepos
1285 # check subrepos
1285 subs = []
1286 subs = []
1286 commitsubs = set()
1287 commitsubs = set()
1287 newstate = wctx.substate.copy()
1288 newstate = wctx.substate.copy()
1288 # only manage subrepos and .hgsubstate if .hgsub is present
1289 # only manage subrepos and .hgsubstate if .hgsub is present
1289 if '.hgsub' in wctx:
1290 if '.hgsub' in wctx:
1290 # we'll decide whether to track this ourselves, thanks
1291 # we'll decide whether to track this ourselves, thanks
1291 for c in status.modified, status.added, status.removed:
1292 for c in status.modified, status.added, status.removed:
1292 if '.hgsubstate' in c:
1293 if '.hgsubstate' in c:
1293 c.remove('.hgsubstate')
1294 c.remove('.hgsubstate')
1294
1295
1295 # compare current state to last committed state
1296 # compare current state to last committed state
1296 # build new substate based on last committed state
1297 # build new substate based on last committed state
1297 oldstate = wctx.p1().substate
1298 oldstate = wctx.p1().substate
1298 for s in sorted(newstate.keys()):
1299 for s in sorted(newstate.keys()):
1299 if not match(s):
1300 if not match(s):
1300 # ignore working copy, use old state if present
1301 # ignore working copy, use old state if present
1301 if s in oldstate:
1302 if s in oldstate:
1302 newstate[s] = oldstate[s]
1303 newstate[s] = oldstate[s]
1303 continue
1304 continue
1304 if not force:
1305 if not force:
1305 raise util.Abort(
1306 raise util.Abort(
1306 _("commit with new subrepo %s excluded") % s)
1307 _("commit with new subrepo %s excluded") % s)
1307 if wctx.sub(s).dirty(True):
1308 if wctx.sub(s).dirty(True):
1308 if not self.ui.configbool('ui', 'commitsubrepos'):
1309 if not self.ui.configbool('ui', 'commitsubrepos'):
1309 raise util.Abort(
1310 raise util.Abort(
1310 _("uncommitted changes in subrepo %s") % s,
1311 _("uncommitted changes in subrepo %s") % s,
1311 hint=_("use --subrepos for recursive commit"))
1312 hint=_("use --subrepos for recursive commit"))
1312 subs.append(s)
1313 subs.append(s)
1313 commitsubs.add(s)
1314 commitsubs.add(s)
1314 else:
1315 else:
1315 bs = wctx.sub(s).basestate()
1316 bs = wctx.sub(s).basestate()
1316 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1317 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1317 if oldstate.get(s, (None, None, None))[1] != bs:
1318 if oldstate.get(s, (None, None, None))[1] != bs:
1318 subs.append(s)
1319 subs.append(s)
1319
1320
1320 # check for removed subrepos
1321 # check for removed subrepos
1321 for p in wctx.parents():
1322 for p in wctx.parents():
1322 r = [s for s in p.substate if s not in newstate]
1323 r = [s for s in p.substate if s not in newstate]
1323 subs += [s for s in r if match(s)]
1324 subs += [s for s in r if match(s)]
1324 if subs:
1325 if subs:
1325 if (not match('.hgsub') and
1326 if (not match('.hgsub') and
1326 '.hgsub' in (wctx.modified() + wctx.added())):
1327 '.hgsub' in (wctx.modified() + wctx.added())):
1327 raise util.Abort(
1328 raise util.Abort(
1328 _("can't commit subrepos without .hgsub"))
1329 _("can't commit subrepos without .hgsub"))
1329 status.modified.insert(0, '.hgsubstate')
1330 status.modified.insert(0, '.hgsubstate')
1330
1331
1331 elif '.hgsub' in status.removed:
1332 elif '.hgsub' in status.removed:
1332 # clean up .hgsubstate when .hgsub is removed
1333 # clean up .hgsubstate when .hgsub is removed
1333 if ('.hgsubstate' in wctx and
1334 if ('.hgsubstate' in wctx and
1334 '.hgsubstate' not in (status.modified + status.added +
1335 '.hgsubstate' not in (status.modified + status.added +
1335 status.removed)):
1336 status.removed)):
1336 status.removed.insert(0, '.hgsubstate')
1337 status.removed.insert(0, '.hgsubstate')
1337
1338
1338 # make sure all explicit patterns are matched
1339 # make sure all explicit patterns are matched
1339 if not force and match.files():
1340 if not force and match.files():
1340 matched = set(status.modified + status.added + status.removed)
1341 matched = set(status.modified + status.added + status.removed)
1341
1342
1342 for f in match.files():
1343 for f in match.files():
1343 f = self.dirstate.normalize(f)
1344 f = self.dirstate.normalize(f)
1344 if f == '.' or f in matched or f in wctx.substate:
1345 if f == '.' or f in matched or f in wctx.substate:
1345 continue
1346 continue
1346 if f in status.deleted:
1347 if f in status.deleted:
1347 fail(f, _('file not found!'))
1348 fail(f, _('file not found!'))
1348 if f in vdirs: # visited directory
1349 if f in vdirs: # visited directory
1349 d = f + '/'
1350 d = f + '/'
1350 for mf in matched:
1351 for mf in matched:
1351 if mf.startswith(d):
1352 if mf.startswith(d):
1352 break
1353 break
1353 else:
1354 else:
1354 fail(f, _("no match under directory!"))
1355 fail(f, _("no match under directory!"))
1355 elif f not in self.dirstate:
1356 elif f not in self.dirstate:
1356 fail(f, _("file not tracked!"))
1357 fail(f, _("file not tracked!"))
1357
1358
1358 cctx = context.workingcommitctx(self, status,
1359 cctx = context.workingcommitctx(self, status,
1359 text, user, date, extra)
1360 text, user, date, extra)
1360
1361
1361 if (not force and not extra.get("close") and not merge
1362 if (not force and not extra.get("close") and not merge
1362 and not cctx.files()
1363 and not cctx.files()
1363 and wctx.branch() == wctx.p1().branch()):
1364 and wctx.branch() == wctx.p1().branch()):
1364 return None
1365 return None
1365
1366
1366 if merge and cctx.deleted():
1367 if merge and cctx.deleted():
1367 raise util.Abort(_("cannot commit merge with missing files"))
1368 raise util.Abort(_("cannot commit merge with missing files"))
1368
1369
1369 ms = mergemod.mergestate(self)
1370 ms = mergemod.mergestate(self)
1370 for f in status.modified:
1371 for f in status.modified:
1371 if f in ms and ms[f] == 'u':
1372 if f in ms and ms[f] == 'u':
1372 raise util.Abort(_('unresolved merge conflicts '
1373 raise util.Abort(_('unresolved merge conflicts '
1373 '(see "hg help resolve")'))
1374 '(see "hg help resolve")'))
1374
1375
1375 if editor:
1376 if editor:
1376 cctx._text = editor(self, cctx, subs)
1377 cctx._text = editor(self, cctx, subs)
1377 edited = (text != cctx._text)
1378 edited = (text != cctx._text)
1378
1379
1379 # Save commit message in case this transaction gets rolled back
1380 # Save commit message in case this transaction gets rolled back
1380 # (e.g. by a pretxncommit hook). Leave the content alone on
1381 # (e.g. by a pretxncommit hook). Leave the content alone on
1381 # the assumption that the user will use the same editor again.
1382 # the assumption that the user will use the same editor again.
1382 msgfn = self.savecommitmessage(cctx._text)
1383 msgfn = self.savecommitmessage(cctx._text)
1383
1384
1384 # commit subs and write new state
1385 # commit subs and write new state
1385 if subs:
1386 if subs:
1386 for s in sorted(commitsubs):
1387 for s in sorted(commitsubs):
1387 sub = wctx.sub(s)
1388 sub = wctx.sub(s)
1388 self.ui.status(_('committing subrepository %s\n') %
1389 self.ui.status(_('committing subrepository %s\n') %
1389 subrepo.subrelpath(sub))
1390 subrepo.subrelpath(sub))
1390 sr = sub.commit(cctx._text, user, date)
1391 sr = sub.commit(cctx._text, user, date)
1391 newstate[s] = (newstate[s][0], sr)
1392 newstate[s] = (newstate[s][0], sr)
1392 subrepo.writestate(self, newstate)
1393 subrepo.writestate(self, newstate)
1393
1394
1394 p1, p2 = self.dirstate.parents()
1395 p1, p2 = self.dirstate.parents()
1395 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1396 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1396 try:
1397 try:
1397 self.hook("precommit", throw=True, parent1=hookp1,
1398 self.hook("precommit", throw=True, parent1=hookp1,
1398 parent2=hookp2)
1399 parent2=hookp2)
1399 ret = self.commitctx(cctx, True)
1400 ret = self.commitctx(cctx, True)
1400 except: # re-raises
1401 except: # re-raises
1401 if edited:
1402 if edited:
1402 self.ui.write(
1403 self.ui.write(
1403 _('note: commit message saved in %s\n') % msgfn)
1404 _('note: commit message saved in %s\n') % msgfn)
1404 raise
1405 raise
1405
1406
1406 # update bookmarks, dirstate and mergestate
1407 # update bookmarks, dirstate and mergestate
1407 bookmarks.update(self, [p1, p2], ret)
1408 bookmarks.update(self, [p1, p2], ret)
1408 cctx.markcommitted(ret)
1409 cctx.markcommitted(ret)
1409 ms.reset()
1410 ms.reset()
1410 finally:
1411 finally:
1411 wlock.release()
1412 wlock.release()
1412
1413
1413 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1414 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1414 # hack for command that use a temporary commit (eg: histedit)
1415 # hack for command that use a temporary commit (eg: histedit)
1415 # temporary commit got stripped before hook release
1416 # temporary commit got stripped before hook release
1416 if node in self:
1417 if node in self:
1417 self.hook("commit", node=node, parent1=parent1,
1418 self.hook("commit", node=node, parent1=parent1,
1418 parent2=parent2)
1419 parent2=parent2)
1419 self._afterlock(commithook)
1420 self._afterlock(commithook)
1420 return ret
1421 return ret
1421
1422
1422 @unfilteredmethod
1423 @unfilteredmethod
1423 def commitctx(self, ctx, error=False):
1424 def commitctx(self, ctx, error=False):
1424 """Add a new revision to current repository.
1425 """Add a new revision to current repository.
1425 Revision information is passed via the context argument.
1426 Revision information is passed via the context argument.
1426 """
1427 """
1427
1428
1428 tr = None
1429 tr = None
1429 p1, p2 = ctx.p1(), ctx.p2()
1430 p1, p2 = ctx.p1(), ctx.p2()
1430 user = ctx.user()
1431 user = ctx.user()
1431
1432
1432 lock = self.lock()
1433 lock = self.lock()
1433 try:
1434 try:
1434 tr = self.transaction("commit")
1435 tr = self.transaction("commit")
1435 trp = weakref.proxy(tr)
1436 trp = weakref.proxy(tr)
1436
1437
1437 if ctx.files():
1438 if ctx.files():
1438 m1 = p1.manifest()
1439 m1 = p1.manifest()
1439 m2 = p2.manifest()
1440 m2 = p2.manifest()
1440 m = m1.copy()
1441 m = m1.copy()
1441
1442
1442 # check in files
1443 # check in files
1443 added = []
1444 added = []
1444 changed = []
1445 changed = []
1445 removed = list(ctx.removed())
1446 removed = list(ctx.removed())
1446 linkrev = len(self)
1447 linkrev = len(self)
1447 self.ui.note(_("committing files:\n"))
1448 self.ui.note(_("committing files:\n"))
1448 for f in sorted(ctx.modified() + ctx.added()):
1449 for f in sorted(ctx.modified() + ctx.added()):
1449 self.ui.note(f + "\n")
1450 self.ui.note(f + "\n")
1450 try:
1451 try:
1451 fctx = ctx[f]
1452 fctx = ctx[f]
1452 if fctx is None:
1453 if fctx is None:
1453 removed.append(f)
1454 removed.append(f)
1454 else:
1455 else:
1455 added.append(f)
1456 added.append(f)
1456 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1457 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1457 trp, changed)
1458 trp, changed)
1458 m.setflag(f, fctx.flags())
1459 m.setflag(f, fctx.flags())
1459 except OSError, inst:
1460 except OSError, inst:
1460 self.ui.warn(_("trouble committing %s!\n") % f)
1461 self.ui.warn(_("trouble committing %s!\n") % f)
1461 raise
1462 raise
1462 except IOError, inst:
1463 except IOError, inst:
1463 errcode = getattr(inst, 'errno', errno.ENOENT)
1464 errcode = getattr(inst, 'errno', errno.ENOENT)
1464 if error or errcode and errcode != errno.ENOENT:
1465 if error or errcode and errcode != errno.ENOENT:
1465 self.ui.warn(_("trouble committing %s!\n") % f)
1466 self.ui.warn(_("trouble committing %s!\n") % f)
1466 raise
1467 raise
1467
1468
1468 # update manifest
1469 # update manifest
1469 self.ui.note(_("committing manifest\n"))
1470 self.ui.note(_("committing manifest\n"))
1470 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1471 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1471 drop = [f for f in removed if f in m]
1472 drop = [f for f in removed if f in m]
1472 for f in drop:
1473 for f in drop:
1473 del m[f]
1474 del m[f]
1474 mn = self.manifest.add(m, trp, linkrev,
1475 mn = self.manifest.add(m, trp, linkrev,
1475 p1.manifestnode(), p2.manifestnode(),
1476 p1.manifestnode(), p2.manifestnode(),
1476 added, drop)
1477 added, drop)
1477 files = changed + removed
1478 files = changed + removed
1478 else:
1479 else:
1479 mn = p1.manifestnode()
1480 mn = p1.manifestnode()
1480 files = []
1481 files = []
1481
1482
1482 # update changelog
1483 # update changelog
1483 self.ui.note(_("committing changelog\n"))
1484 self.ui.note(_("committing changelog\n"))
1484 self.changelog.delayupdate(tr)
1485 self.changelog.delayupdate(tr)
1485 n = self.changelog.add(mn, files, ctx.description(),
1486 n = self.changelog.add(mn, files, ctx.description(),
1486 trp, p1.node(), p2.node(),
1487 trp, p1.node(), p2.node(),
1487 user, ctx.date(), ctx.extra().copy())
1488 user, ctx.date(), ctx.extra().copy())
1488 p = lambda: tr.writepending() and self.root or ""
1489 p = lambda: tr.writepending() and self.root or ""
1489 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1490 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1490 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1491 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1491 parent2=xp2, pending=p)
1492 parent2=xp2, pending=p)
1492 # set the new commit is proper phase
1493 # set the new commit is proper phase
1493 targetphase = subrepo.newcommitphase(self.ui, ctx)
1494 targetphase = subrepo.newcommitphase(self.ui, ctx)
1494 if targetphase:
1495 if targetphase:
1495 # retract boundary do not alter parent changeset.
1496 # retract boundary do not alter parent changeset.
1496 # if a parent have higher the resulting phase will
1497 # if a parent have higher the resulting phase will
1497 # be compliant anyway
1498 # be compliant anyway
1498 #
1499 #
1499 # if minimal phase was 0 we don't need to retract anything
1500 # if minimal phase was 0 we don't need to retract anything
1500 phases.retractboundary(self, tr, targetphase, [n])
1501 phases.retractboundary(self, tr, targetphase, [n])
1501 tr.close()
1502 tr.close()
1502 branchmap.updatecache(self.filtered('served'))
1503 branchmap.updatecache(self.filtered('served'))
1503 return n
1504 return n
1504 finally:
1505 finally:
1505 if tr:
1506 if tr:
1506 tr.release()
1507 tr.release()
1507 lock.release()
1508 lock.release()
1508
1509
1509 @unfilteredmethod
1510 @unfilteredmethod
1510 def destroying(self):
1511 def destroying(self):
1511 '''Inform the repository that nodes are about to be destroyed.
1512 '''Inform the repository that nodes are about to be destroyed.
1512 Intended for use by strip and rollback, so there's a common
1513 Intended for use by strip and rollback, so there's a common
1513 place for anything that has to be done before destroying history.
1514 place for anything that has to be done before destroying history.
1514
1515
1515 This is mostly useful for saving state that is in memory and waiting
1516 This is mostly useful for saving state that is in memory and waiting
1516 to be flushed when the current lock is released. Because a call to
1517 to be flushed when the current lock is released. Because a call to
1517 destroyed is imminent, the repo will be invalidated causing those
1518 destroyed is imminent, the repo will be invalidated causing those
1518 changes to stay in memory (waiting for the next unlock), or vanish
1519 changes to stay in memory (waiting for the next unlock), or vanish
1519 completely.
1520 completely.
1520 '''
1521 '''
1521 # When using the same lock to commit and strip, the phasecache is left
1522 # When using the same lock to commit and strip, the phasecache is left
1522 # dirty after committing. Then when we strip, the repo is invalidated,
1523 # dirty after committing. Then when we strip, the repo is invalidated,
1523 # causing those changes to disappear.
1524 # causing those changes to disappear.
1524 if '_phasecache' in vars(self):
1525 if '_phasecache' in vars(self):
1525 self._phasecache.write()
1526 self._phasecache.write()
1526
1527
1527 @unfilteredmethod
1528 @unfilteredmethod
1528 def destroyed(self):
1529 def destroyed(self):
1529 '''Inform the repository that nodes have been destroyed.
1530 '''Inform the repository that nodes have been destroyed.
1530 Intended for use by strip and rollback, so there's a common
1531 Intended for use by strip and rollback, so there's a common
1531 place for anything that has to be done after destroying history.
1532 place for anything that has to be done after destroying history.
1532 '''
1533 '''
1533 # When one tries to:
1534 # When one tries to:
1534 # 1) destroy nodes thus calling this method (e.g. strip)
1535 # 1) destroy nodes thus calling this method (e.g. strip)
1535 # 2) use phasecache somewhere (e.g. commit)
1536 # 2) use phasecache somewhere (e.g. commit)
1536 #
1537 #
1537 # then 2) will fail because the phasecache contains nodes that were
1538 # then 2) will fail because the phasecache contains nodes that were
1538 # removed. We can either remove phasecache from the filecache,
1539 # removed. We can either remove phasecache from the filecache,
1539 # causing it to reload next time it is accessed, or simply filter
1540 # causing it to reload next time it is accessed, or simply filter
1540 # the removed nodes now and write the updated cache.
1541 # the removed nodes now and write the updated cache.
1541 self._phasecache.filterunknown(self)
1542 self._phasecache.filterunknown(self)
1542 self._phasecache.write()
1543 self._phasecache.write()
1543
1544
1544 # update the 'served' branch cache to help read only server process
1545 # update the 'served' branch cache to help read only server process
1545 # Thanks to branchcache collaboration this is done from the nearest
1546 # Thanks to branchcache collaboration this is done from the nearest
1546 # filtered subset and it is expected to be fast.
1547 # filtered subset and it is expected to be fast.
1547 branchmap.updatecache(self.filtered('served'))
1548 branchmap.updatecache(self.filtered('served'))
1548
1549
1549 # Ensure the persistent tag cache is updated. Doing it now
1550 # Ensure the persistent tag cache is updated. Doing it now
1550 # means that the tag cache only has to worry about destroyed
1551 # means that the tag cache only has to worry about destroyed
1551 # heads immediately after a strip/rollback. That in turn
1552 # heads immediately after a strip/rollback. That in turn
1552 # guarantees that "cachetip == currenttip" (comparing both rev
1553 # guarantees that "cachetip == currenttip" (comparing both rev
1553 # and node) always means no nodes have been added or destroyed.
1554 # and node) always means no nodes have been added or destroyed.
1554
1555
1555 # XXX this is suboptimal when qrefresh'ing: we strip the current
1556 # XXX this is suboptimal when qrefresh'ing: we strip the current
1556 # head, refresh the tag cache, then immediately add a new head.
1557 # head, refresh the tag cache, then immediately add a new head.
1557 # But I think doing it this way is necessary for the "instant
1558 # But I think doing it this way is necessary for the "instant
1558 # tag cache retrieval" case to work.
1559 # tag cache retrieval" case to work.
1559 self.invalidate()
1560 self.invalidate()
1560
1561
1561 def walk(self, match, node=None):
1562 def walk(self, match, node=None):
1562 '''
1563 '''
1563 walk recursively through the directory tree or a given
1564 walk recursively through the directory tree or a given
1564 changeset, finding all files matched by the match
1565 changeset, finding all files matched by the match
1565 function
1566 function
1566 '''
1567 '''
1567 return self[node].walk(match)
1568 return self[node].walk(match)
1568
1569
1569 def status(self, node1='.', node2=None, match=None,
1570 def status(self, node1='.', node2=None, match=None,
1570 ignored=False, clean=False, unknown=False,
1571 ignored=False, clean=False, unknown=False,
1571 listsubrepos=False):
1572 listsubrepos=False):
1572 '''a convenience method that calls node1.status(node2)'''
1573 '''a convenience method that calls node1.status(node2)'''
1573 return self[node1].status(node2, match, ignored, clean, unknown,
1574 return self[node1].status(node2, match, ignored, clean, unknown,
1574 listsubrepos)
1575 listsubrepos)
1575
1576
1576 def heads(self, start=None):
1577 def heads(self, start=None):
1577 heads = self.changelog.heads(start)
1578 heads = self.changelog.heads(start)
1578 # sort the output in rev descending order
1579 # sort the output in rev descending order
1579 return sorted(heads, key=self.changelog.rev, reverse=True)
1580 return sorted(heads, key=self.changelog.rev, reverse=True)
1580
1581
1581 def branchheads(self, branch=None, start=None, closed=False):
1582 def branchheads(self, branch=None, start=None, closed=False):
1582 '''return a (possibly filtered) list of heads for the given branch
1583 '''return a (possibly filtered) list of heads for the given branch
1583
1584
1584 Heads are returned in topological order, from newest to oldest.
1585 Heads are returned in topological order, from newest to oldest.
1585 If branch is None, use the dirstate branch.
1586 If branch is None, use the dirstate branch.
1586 If start is not None, return only heads reachable from start.
1587 If start is not None, return only heads reachable from start.
1587 If closed is True, return heads that are marked as closed as well.
1588 If closed is True, return heads that are marked as closed as well.
1588 '''
1589 '''
1589 if branch is None:
1590 if branch is None:
1590 branch = self[None].branch()
1591 branch = self[None].branch()
1591 branches = self.branchmap()
1592 branches = self.branchmap()
1592 if branch not in branches:
1593 if branch not in branches:
1593 return []
1594 return []
1594 # the cache returns heads ordered lowest to highest
1595 # the cache returns heads ordered lowest to highest
1595 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1596 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1596 if start is not None:
1597 if start is not None:
1597 # filter out the heads that cannot be reached from startrev
1598 # filter out the heads that cannot be reached from startrev
1598 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1599 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1599 bheads = [h for h in bheads if h in fbheads]
1600 bheads = [h for h in bheads if h in fbheads]
1600 return bheads
1601 return bheads
1601
1602
1602 def branches(self, nodes):
1603 def branches(self, nodes):
1603 if not nodes:
1604 if not nodes:
1604 nodes = [self.changelog.tip()]
1605 nodes = [self.changelog.tip()]
1605 b = []
1606 b = []
1606 for n in nodes:
1607 for n in nodes:
1607 t = n
1608 t = n
1608 while True:
1609 while True:
1609 p = self.changelog.parents(n)
1610 p = self.changelog.parents(n)
1610 if p[1] != nullid or p[0] == nullid:
1611 if p[1] != nullid or p[0] == nullid:
1611 b.append((t, n, p[0], p[1]))
1612 b.append((t, n, p[0], p[1]))
1612 break
1613 break
1613 n = p[0]
1614 n = p[0]
1614 return b
1615 return b
1615
1616
1616 def between(self, pairs):
1617 def between(self, pairs):
1617 r = []
1618 r = []
1618
1619
1619 for top, bottom in pairs:
1620 for top, bottom in pairs:
1620 n, l, i = top, [], 0
1621 n, l, i = top, [], 0
1621 f = 1
1622 f = 1
1622
1623
1623 while n != bottom and n != nullid:
1624 while n != bottom and n != nullid:
1624 p = self.changelog.parents(n)[0]
1625 p = self.changelog.parents(n)[0]
1625 if i == f:
1626 if i == f:
1626 l.append(n)
1627 l.append(n)
1627 f = f * 2
1628 f = f * 2
1628 n = p
1629 n = p
1629 i += 1
1630 i += 1
1630
1631
1631 r.append(l)
1632 r.append(l)
1632
1633
1633 return r
1634 return r
1634
1635
1635 def checkpush(self, pushop):
1636 def checkpush(self, pushop):
1636 """Extensions can override this function if additional checks have
1637 """Extensions can override this function if additional checks have
1637 to be performed before pushing, or call it if they override push
1638 to be performed before pushing, or call it if they override push
1638 command.
1639 command.
1639 """
1640 """
1640 pass
1641 pass
1641
1642
1642 @unfilteredpropertycache
1643 @unfilteredpropertycache
1643 def prepushoutgoinghooks(self):
1644 def prepushoutgoinghooks(self):
1644 """Return util.hooks consists of "(repo, remote, outgoing)"
1645 """Return util.hooks consists of "(repo, remote, outgoing)"
1645 functions, which are called before pushing changesets.
1646 functions, which are called before pushing changesets.
1646 """
1647 """
1647 return util.hooks()
1648 return util.hooks()
1648
1649
1649 def stream_in(self, remote, requirements):
1650 def stream_in(self, remote, requirements):
1650 lock = self.lock()
1651 lock = self.lock()
1651 try:
1652 try:
1652 # Save remote branchmap. We will use it later
1653 # Save remote branchmap. We will use it later
1653 # to speed up branchcache creation
1654 # to speed up branchcache creation
1654 rbranchmap = None
1655 rbranchmap = None
1655 if remote.capable("branchmap"):
1656 if remote.capable("branchmap"):
1656 rbranchmap = remote.branchmap()
1657 rbranchmap = remote.branchmap()
1657
1658
1658 fp = remote.stream_out()
1659 fp = remote.stream_out()
1659 l = fp.readline()
1660 l = fp.readline()
1660 try:
1661 try:
1661 resp = int(l)
1662 resp = int(l)
1662 except ValueError:
1663 except ValueError:
1663 raise error.ResponseError(
1664 raise error.ResponseError(
1664 _('unexpected response from remote server:'), l)
1665 _('unexpected response from remote server:'), l)
1665 if resp == 1:
1666 if resp == 1:
1666 raise util.Abort(_('operation forbidden by server'))
1667 raise util.Abort(_('operation forbidden by server'))
1667 elif resp == 2:
1668 elif resp == 2:
1668 raise util.Abort(_('locking the remote repository failed'))
1669 raise util.Abort(_('locking the remote repository failed'))
1669 elif resp != 0:
1670 elif resp != 0:
1670 raise util.Abort(_('the server sent an unknown error code'))
1671 raise util.Abort(_('the server sent an unknown error code'))
1671 self.ui.status(_('streaming all changes\n'))
1672 self.ui.status(_('streaming all changes\n'))
1672 l = fp.readline()
1673 l = fp.readline()
1673 try:
1674 try:
1674 total_files, total_bytes = map(int, l.split(' ', 1))
1675 total_files, total_bytes = map(int, l.split(' ', 1))
1675 except (ValueError, TypeError):
1676 except (ValueError, TypeError):
1676 raise error.ResponseError(
1677 raise error.ResponseError(
1677 _('unexpected response from remote server:'), l)
1678 _('unexpected response from remote server:'), l)
1678 self.ui.status(_('%d files to transfer, %s of data\n') %
1679 self.ui.status(_('%d files to transfer, %s of data\n') %
1679 (total_files, util.bytecount(total_bytes)))
1680 (total_files, util.bytecount(total_bytes)))
1680 handled_bytes = 0
1681 handled_bytes = 0
1681 self.ui.progress(_('clone'), 0, total=total_bytes)
1682 self.ui.progress(_('clone'), 0, total=total_bytes)
1682 start = time.time()
1683 start = time.time()
1683
1684
1684 tr = self.transaction(_('clone'))
1685 tr = self.transaction(_('clone'))
1685 try:
1686 try:
1686 for i in xrange(total_files):
1687 for i in xrange(total_files):
1687 # XXX doesn't support '\n' or '\r' in filenames
1688 # XXX doesn't support '\n' or '\r' in filenames
1688 l = fp.readline()
1689 l = fp.readline()
1689 try:
1690 try:
1690 name, size = l.split('\0', 1)
1691 name, size = l.split('\0', 1)
1691 size = int(size)
1692 size = int(size)
1692 except (ValueError, TypeError):
1693 except (ValueError, TypeError):
1693 raise error.ResponseError(
1694 raise error.ResponseError(
1694 _('unexpected response from remote server:'), l)
1695 _('unexpected response from remote server:'), l)
1695 if self.ui.debugflag:
1696 if self.ui.debugflag:
1696 self.ui.debug('adding %s (%s)\n' %
1697 self.ui.debug('adding %s (%s)\n' %
1697 (name, util.bytecount(size)))
1698 (name, util.bytecount(size)))
1698 # for backwards compat, name was partially encoded
1699 # for backwards compat, name was partially encoded
1699 ofp = self.svfs(store.decodedir(name), 'w')
1700 ofp = self.svfs(store.decodedir(name), 'w')
1700 for chunk in util.filechunkiter(fp, limit=size):
1701 for chunk in util.filechunkiter(fp, limit=size):
1701 handled_bytes += len(chunk)
1702 handled_bytes += len(chunk)
1702 self.ui.progress(_('clone'), handled_bytes,
1703 self.ui.progress(_('clone'), handled_bytes,
1703 total=total_bytes)
1704 total=total_bytes)
1704 ofp.write(chunk)
1705 ofp.write(chunk)
1705 ofp.close()
1706 ofp.close()
1706 tr.close()
1707 tr.close()
1707 finally:
1708 finally:
1708 tr.release()
1709 tr.release()
1709
1710
1710 # Writing straight to files circumvented the inmemory caches
1711 # Writing straight to files circumvented the inmemory caches
1711 self.invalidate()
1712 self.invalidate()
1712
1713
1713 elapsed = time.time() - start
1714 elapsed = time.time() - start
1714 if elapsed <= 0:
1715 if elapsed <= 0:
1715 elapsed = 0.001
1716 elapsed = 0.001
1716 self.ui.progress(_('clone'), None)
1717 self.ui.progress(_('clone'), None)
1717 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1718 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1718 (util.bytecount(total_bytes), elapsed,
1719 (util.bytecount(total_bytes), elapsed,
1719 util.bytecount(total_bytes / elapsed)))
1720 util.bytecount(total_bytes / elapsed)))
1720
1721
1721 # new requirements = old non-format requirements +
1722 # new requirements = old non-format requirements +
1722 # new format-related
1723 # new format-related
1723 # requirements from the streamed-in repository
1724 # requirements from the streamed-in repository
1724 requirements.update(set(self.requirements) - self.supportedformats)
1725 requirements.update(set(self.requirements) - self.supportedformats)
1725 self._applyrequirements(requirements)
1726 self._applyrequirements(requirements)
1726 self._writerequirements()
1727 self._writerequirements()
1727
1728
1728 if rbranchmap:
1729 if rbranchmap:
1729 rbheads = []
1730 rbheads = []
1730 closed = []
1731 closed = []
1731 for bheads in rbranchmap.itervalues():
1732 for bheads in rbranchmap.itervalues():
1732 rbheads.extend(bheads)
1733 rbheads.extend(bheads)
1733 for h in bheads:
1734 for h in bheads:
1734 r = self.changelog.rev(h)
1735 r = self.changelog.rev(h)
1735 b, c = self.changelog.branchinfo(r)
1736 b, c = self.changelog.branchinfo(r)
1736 if c:
1737 if c:
1737 closed.append(h)
1738 closed.append(h)
1738
1739
1739 if rbheads:
1740 if rbheads:
1740 rtiprev = max((int(self.changelog.rev(node))
1741 rtiprev = max((int(self.changelog.rev(node))
1741 for node in rbheads))
1742 for node in rbheads))
1742 cache = branchmap.branchcache(rbranchmap,
1743 cache = branchmap.branchcache(rbranchmap,
1743 self[rtiprev].node(),
1744 self[rtiprev].node(),
1744 rtiprev,
1745 rtiprev,
1745 closednodes=closed)
1746 closednodes=closed)
1746 # Try to stick it as low as possible
1747 # Try to stick it as low as possible
1747 # filter above served are unlikely to be fetch from a clone
1748 # filter above served are unlikely to be fetch from a clone
1748 for candidate in ('base', 'immutable', 'served'):
1749 for candidate in ('base', 'immutable', 'served'):
1749 rview = self.filtered(candidate)
1750 rview = self.filtered(candidate)
1750 if cache.validfor(rview):
1751 if cache.validfor(rview):
1751 self._branchcaches[candidate] = cache
1752 self._branchcaches[candidate] = cache
1752 cache.write(rview)
1753 cache.write(rview)
1753 break
1754 break
1754 self.invalidate()
1755 self.invalidate()
1755 return len(self.heads()) + 1
1756 return len(self.heads()) + 1
1756 finally:
1757 finally:
1757 lock.release()
1758 lock.release()
1758
1759
1759 def clone(self, remote, heads=[], stream=None):
1760 def clone(self, remote, heads=[], stream=None):
1760 '''clone remote repository.
1761 '''clone remote repository.
1761
1762
1762 keyword arguments:
1763 keyword arguments:
1763 heads: list of revs to clone (forces use of pull)
1764 heads: list of revs to clone (forces use of pull)
1764 stream: use streaming clone if possible'''
1765 stream: use streaming clone if possible'''
1765
1766
1766 # now, all clients that can request uncompressed clones can
1767 # now, all clients that can request uncompressed clones can
1767 # read repo formats supported by all servers that can serve
1768 # read repo formats supported by all servers that can serve
1768 # them.
1769 # them.
1769
1770
1770 # if revlog format changes, client will have to check version
1771 # if revlog format changes, client will have to check version
1771 # and format flags on "stream" capability, and use
1772 # and format flags on "stream" capability, and use
1772 # uncompressed only if compatible.
1773 # uncompressed only if compatible.
1773
1774
1774 if stream is None:
1775 if stream is None:
1775 # if the server explicitly prefers to stream (for fast LANs)
1776 # if the server explicitly prefers to stream (for fast LANs)
1776 stream = remote.capable('stream-preferred')
1777 stream = remote.capable('stream-preferred')
1777
1778
1778 if stream and not heads:
1779 if stream and not heads:
1779 # 'stream' means remote revlog format is revlogv1 only
1780 # 'stream' means remote revlog format is revlogv1 only
1780 if remote.capable('stream'):
1781 if remote.capable('stream'):
1781 self.stream_in(remote, set(('revlogv1',)))
1782 self.stream_in(remote, set(('revlogv1',)))
1782 else:
1783 else:
1783 # otherwise, 'streamreqs' contains the remote revlog format
1784 # otherwise, 'streamreqs' contains the remote revlog format
1784 streamreqs = remote.capable('streamreqs')
1785 streamreqs = remote.capable('streamreqs')
1785 if streamreqs:
1786 if streamreqs:
1786 streamreqs = set(streamreqs.split(','))
1787 streamreqs = set(streamreqs.split(','))
1787 # if we support it, stream in and adjust our requirements
1788 # if we support it, stream in and adjust our requirements
1788 if not streamreqs - self.supportedformats:
1789 if not streamreqs - self.supportedformats:
1789 self.stream_in(remote, streamreqs)
1790 self.stream_in(remote, streamreqs)
1790
1791
1791 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1792 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1792 try:
1793 try:
1793 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1794 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1794 ret = exchange.pull(self, remote, heads).cgresult
1795 ret = exchange.pull(self, remote, heads).cgresult
1795 finally:
1796 finally:
1796 self.ui.restoreconfig(quiet)
1797 self.ui.restoreconfig(quiet)
1797 return ret
1798 return ret
1798
1799
1799 def pushkey(self, namespace, key, old, new):
1800 def pushkey(self, namespace, key, old, new):
1800 try:
1801 try:
1801 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1802 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1802 old=old, new=new)
1803 old=old, new=new)
1803 except error.HookAbort, exc:
1804 except error.HookAbort, exc:
1804 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1805 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1805 if exc.hint:
1806 if exc.hint:
1806 self.ui.write_err(_("(%s)\n") % exc.hint)
1807 self.ui.write_err(_("(%s)\n") % exc.hint)
1807 return False
1808 return False
1808 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1809 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1809 ret = pushkey.push(self, namespace, key, old, new)
1810 ret = pushkey.push(self, namespace, key, old, new)
1810 def runhook():
1811 def runhook():
1811 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1812 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1812 ret=ret)
1813 ret=ret)
1813 self._afterlock(runhook)
1814 self._afterlock(runhook)
1814 return ret
1815 return ret
1815
1816
1816 def listkeys(self, namespace):
1817 def listkeys(self, namespace):
1817 self.hook('prelistkeys', throw=True, namespace=namespace)
1818 self.hook('prelistkeys', throw=True, namespace=namespace)
1818 self.ui.debug('listing keys for "%s"\n' % namespace)
1819 self.ui.debug('listing keys for "%s"\n' % namespace)
1819 values = pushkey.list(self, namespace)
1820 values = pushkey.list(self, namespace)
1820 self.hook('listkeys', namespace=namespace, values=values)
1821 self.hook('listkeys', namespace=namespace, values=values)
1821 return values
1822 return values
1822
1823
1823 def debugwireargs(self, one, two, three=None, four=None, five=None):
1824 def debugwireargs(self, one, two, three=None, four=None, five=None):
1824 '''used to test argument passing over the wire'''
1825 '''used to test argument passing over the wire'''
1825 return "%s %s %s %s %s" % (one, two, three, four, five)
1826 return "%s %s %s %s %s" % (one, two, three, four, five)
1826
1827
1827 def savecommitmessage(self, text):
1828 def savecommitmessage(self, text):
1828 fp = self.vfs('last-message.txt', 'wb')
1829 fp = self.vfs('last-message.txt', 'wb')
1829 try:
1830 try:
1830 fp.write(text)
1831 fp.write(text)
1831 finally:
1832 finally:
1832 fp.close()
1833 fp.close()
1833 return self.pathto(fp.name[len(self.root) + 1:])
1834 return self.pathto(fp.name[len(self.root) + 1:])
1834
1835
1835 # used to avoid circular references so destructors work
1836 # used to avoid circular references so destructors work
1836 def aftertrans(files):
1837 def aftertrans(files):
1837 renamefiles = [tuple(t) for t in files]
1838 renamefiles = [tuple(t) for t in files]
1838 def a():
1839 def a():
1839 for vfs, src, dest in renamefiles:
1840 for vfs, src, dest in renamefiles:
1840 try:
1841 try:
1841 vfs.rename(src, dest)
1842 vfs.rename(src, dest)
1842 except OSError: # journal file does not yet exist
1843 except OSError: # journal file does not yet exist
1843 pass
1844 pass
1844 return a
1845 return a
1845
1846
1846 def undoname(fn):
1847 def undoname(fn):
1847 base, name = os.path.split(fn)
1848 base, name = os.path.split(fn)
1848 assert name.startswith('journal')
1849 assert name.startswith('journal')
1849 return os.path.join(base, name.replace('journal', 'undo', 1))
1850 return os.path.join(base, name.replace('journal', 'undo', 1))
1850
1851
1851 def instance(ui, path, create):
1852 def instance(ui, path, create):
1852 return localrepository(ui, util.urllocalpath(path), create)
1853 return localrepository(ui, util.urllocalpath(path), create)
1853
1854
1854 def islocal(path):
1855 def islocal(path):
1855 return True
1856 return True
@@ -1,838 +1,797
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [phases]
2 > [phases]
3 > # public changeset are not obsolete
3 > # public changeset are not obsolete
4 > publish=false
4 > publish=false
5 > [ui]
5 > [ui]
6 > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
6 > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
7 > EOF
7 > EOF
8 $ mkcommit() {
8 $ mkcommit() {
9 > echo "$1" > "$1"
9 > echo "$1" > "$1"
10 > hg add "$1"
10 > hg add "$1"
11 > hg ci -m "add $1"
11 > hg ci -m "add $1"
12 > }
12 > }
13 $ getid() {
13 $ getid() {
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 > }
15 > }
16
16
17 $ cat > debugkeys.py <<EOF
17 $ cat > debugkeys.py <<EOF
18 > def reposetup(ui, repo):
18 > def reposetup(ui, repo):
19 > class debugkeysrepo(repo.__class__):
19 > class debugkeysrepo(repo.__class__):
20 > def listkeys(self, namespace):
20 > def listkeys(self, namespace):
21 > ui.write('listkeys %s\n' % (namespace,))
21 > ui.write('listkeys %s\n' % (namespace,))
22 > return super(debugkeysrepo, self).listkeys(namespace)
22 > return super(debugkeysrepo, self).listkeys(namespace)
23 >
23 >
24 > if repo.local():
24 > if repo.local():
25 > repo.__class__ = debugkeysrepo
25 > repo.__class__ = debugkeysrepo
26 > EOF
26 > EOF
27
27
28 $ hg init tmpa
28 $ hg init tmpa
29 $ cd tmpa
29 $ cd tmpa
30 $ mkcommit kill_me
30 $ mkcommit kill_me
31
31
32 Checking that the feature is properly disabled
32 Checking that the feature is properly disabled
33
33
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 abort: creating obsolete markers is not enabled on this repo
35 abort: creating obsolete markers is not enabled on this repo
36 [255]
36 [255]
37
37
38 Enabling it
38 Enabling it
39
39
40 $ cat >> $HGRCPATH << EOF
40 $ cat >> $HGRCPATH << EOF
41 > [experimental]
41 > [experimental]
42 > evolution=createmarkers,exchange
42 > evolution=createmarkers,exchange
43 > EOF
43 > EOF
44
44
45 Killing a single changeset without replacement
45 Killing a single changeset without replacement
46
46
47 $ hg debugobsolete 0
47 $ hg debugobsolete 0
48 abort: changeset references must be full hexadecimal node identifiers
48 abort: changeset references must be full hexadecimal node identifiers
49 [255]
49 [255]
50 $ hg debugobsolete '00'
50 $ hg debugobsolete '00'
51 abort: changeset references must be full hexadecimal node identifiers
51 abort: changeset references must be full hexadecimal node identifiers
52 [255]
52 [255]
53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
54 $ hg debugobsolete
54 $ hg debugobsolete
55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
56
56
57 (test that mercurial is not confused)
57 (test that mercurial is not confused)
58
58
59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
60 $ hg tip
60 $ hg tip
61 -1:000000000000 (public) [tip ]
61 -1:000000000000 (public) [tip ]
62 $ hg up --hidden tip --quiet
62 $ hg up --hidden tip --quiet
63
63
64 Killing a single changeset with itself should fail
64 Killing a single changeset with itself should fail
65 (simple local safeguard)
65 (simple local safeguard)
66
66
67 $ hg debugobsolete `getid kill_me` `getid kill_me`
67 $ hg debugobsolete `getid kill_me` `getid kill_me`
68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
69 [255]
69 [255]
70
70
71 $ cd ..
71 $ cd ..
72
72
73 Killing a single changeset with replacement
73 Killing a single changeset with replacement
74 (and testing the format option)
74 (and testing the format option)
75
75
76 $ hg init tmpb
76 $ hg init tmpb
77 $ cd tmpb
77 $ cd tmpb
78 $ mkcommit a
78 $ mkcommit a
79 $ mkcommit b
79 $ mkcommit b
80 $ mkcommit original_c
80 $ mkcommit original_c
81 $ hg up "desc('b')"
81 $ hg up "desc('b')"
82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 $ mkcommit new_c
83 $ mkcommit new_c
84 created new head
84 created new head
85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
88 2:245bde4270cd add original_c
88 2:245bde4270cd add original_c
89 $ hg debugrevlog -cd
89 $ hg debugrevlog -cd
90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
95 $ hg debugobsolete
95 $ hg debugobsolete
96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
97
97
98 (check for version number of the obsstore)
98 (check for version number of the obsstore)
99
99
100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
101 \x00 (no-eol) (esc)
101 \x00 (no-eol) (esc)
102
102
103 do it again (it read the obsstore before adding new changeset)
103 do it again (it read the obsstore before adding new changeset)
104
104
105 $ hg up '.^'
105 $ hg up '.^'
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 $ mkcommit new_2_c
107 $ mkcommit new_2_c
108 created new head
108 created new head
109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
110 $ hg debugobsolete
110 $ hg debugobsolete
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
113
113
114 Register two markers with a missing node
114 Register two markers with a missing node
115
115
116 $ hg up '.^'
116 $ hg up '.^'
117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
118 $ mkcommit new_3_c
118 $ mkcommit new_3_c
119 created new head
119 created new head
120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
122 $ hg debugobsolete
122 $ hg debugobsolete
123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
127
127
128 Refuse pathological nullid successors
128 Refuse pathological nullid successors
129 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
129 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
130 transaction abort!
130 transaction abort!
131 rollback completed
131 rollback completed
132 abort: bad obsolescence marker detected: invalid successors nullid
132 abort: bad obsolescence marker detected: invalid successors nullid
133 [255]
133 [255]
134
134
135 Check that graphlog detect that a changeset is obsolete:
135 Check that graphlog detect that a changeset is obsolete:
136
136
137 $ hg log -G
137 $ hg log -G
138 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
138 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
139 |
139 |
140 o 1:7c3bad9141dc (draft) [ ] add b
140 o 1:7c3bad9141dc (draft) [ ] add b
141 |
141 |
142 o 0:1f0dee641bb7 (draft) [ ] add a
142 o 0:1f0dee641bb7 (draft) [ ] add a
143
143
144
144
145 check that heads does not report them
145 check that heads does not report them
146
146
147 $ hg heads
147 $ hg heads
148 5:5601fb93a350 (draft) [tip ] add new_3_c
148 5:5601fb93a350 (draft) [tip ] add new_3_c
149 $ hg heads --hidden
149 $ hg heads --hidden
150 5:5601fb93a350 (draft) [tip ] add new_3_c
150 5:5601fb93a350 (draft) [tip ] add new_3_c
151 4:ca819180edb9 (draft) [ ] add new_2_c
151 4:ca819180edb9 (draft) [ ] add new_2_c
152 3:cdbce2fbb163 (draft) [ ] add new_c
152 3:cdbce2fbb163 (draft) [ ] add new_c
153 2:245bde4270cd (draft) [ ] add original_c
153 2:245bde4270cd (draft) [ ] add original_c
154
154
155
155
156 check that summary does not report them
156 check that summary does not report them
157
157
158 $ hg init ../sink
158 $ hg init ../sink
159 $ echo '[paths]' >> .hg/hgrc
159 $ echo '[paths]' >> .hg/hgrc
160 $ echo 'default=../sink' >> .hg/hgrc
160 $ echo 'default=../sink' >> .hg/hgrc
161 $ hg summary --remote
161 $ hg summary --remote
162 parent: 5:5601fb93a350 tip
162 parent: 5:5601fb93a350 tip
163 add new_3_c
163 add new_3_c
164 branch: default
164 branch: default
165 commit: (clean)
165 commit: (clean)
166 update: (current)
166 update: (current)
167 remote: 3 outgoing
167 remote: 3 outgoing
168
168
169 $ hg summary --remote --hidden
169 $ hg summary --remote --hidden
170 parent: 5:5601fb93a350 tip
170 parent: 5:5601fb93a350 tip
171 add new_3_c
171 add new_3_c
172 branch: default
172 branch: default
173 commit: (clean)
173 commit: (clean)
174 update: 3 new changesets, 4 branch heads (merge)
174 update: 3 new changesets, 4 branch heads (merge)
175 remote: 3 outgoing
175 remote: 3 outgoing
176
176
177 check that various commands work well with filtering
177 check that various commands work well with filtering
178
178
179 $ hg tip
179 $ hg tip
180 5:5601fb93a350 (draft) [tip ] add new_3_c
180 5:5601fb93a350 (draft) [tip ] add new_3_c
181 $ hg log -r 6
181 $ hg log -r 6
182 abort: unknown revision '6'!
182 abort: unknown revision '6'!
183 [255]
183 [255]
184 $ hg log -r 4
184 $ hg log -r 4
185 abort: hidden revision '4'!
185 abort: hidden revision '4'!
186 (use --hidden to access hidden revisions)
186 (use --hidden to access hidden revisions)
187 [255]
187 [255]
188 $ hg debugrevspec 'rev(6)'
188 $ hg debugrevspec 'rev(6)'
189 $ hg debugrevspec 'rev(4)'
189 $ hg debugrevspec 'rev(4)'
190 $ hg debugrevspec 'null'
190 $ hg debugrevspec 'null'
191 -1
191 -1
192
192
193 Check that public changeset are not accounted as obsolete:
193 Check that public changeset are not accounted as obsolete:
194
194
195 $ hg --hidden phase --public 2
195 $ hg --hidden phase --public 2
196 $ hg log -G
196 $ hg log -G
197 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
197 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
198 |
198 |
199 | o 2:245bde4270cd (public) [ ] add original_c
199 | o 2:245bde4270cd (public) [ ] add original_c
200 |/
200 |/
201 o 1:7c3bad9141dc (public) [ ] add b
201 o 1:7c3bad9141dc (public) [ ] add b
202 |
202 |
203 o 0:1f0dee641bb7 (public) [ ] add a
203 o 0:1f0dee641bb7 (public) [ ] add a
204
204
205
205
206 And that bumped changeset are detected
206 And that bumped changeset are detected
207 --------------------------------------
207 --------------------------------------
208
208
209 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
209 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
210 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
210 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
211 the public changeset
211 the public changeset
212
212
213 $ hg log --hidden -r 'bumped()'
213 $ hg log --hidden -r 'bumped()'
214 5:5601fb93a350 (draft) [tip ] add new_3_c
214 5:5601fb93a350 (draft) [tip ] add new_3_c
215
215
216 And that we can't push bumped changeset
216 And that we can't push bumped changeset
217
217
218 $ hg push ../tmpa -r 0 --force #(make repo related)
218 $ hg push ../tmpa -r 0 --force #(make repo related)
219 pushing to ../tmpa
219 pushing to ../tmpa
220 searching for changes
220 searching for changes
221 warning: repository is unrelated
221 warning: repository is unrelated
222 adding changesets
222 adding changesets
223 adding manifests
223 adding manifests
224 adding file changes
224 adding file changes
225 added 1 changesets with 1 changes to 1 files (+1 heads)
225 added 1 changesets with 1 changes to 1 files (+1 heads)
226 $ hg push ../tmpa
226 $ hg push ../tmpa
227 pushing to ../tmpa
227 pushing to ../tmpa
228 searching for changes
228 searching for changes
229 abort: push includes bumped changeset: 5601fb93a350!
229 abort: push includes bumped changeset: 5601fb93a350!
230 [255]
230 [255]
231
231
232 Fixing "bumped" situation
232 Fixing "bumped" situation
233 We need to create a clone of 5 and add a special marker with a flag
233 We need to create a clone of 5 and add a special marker with a flag
234
234
235 $ hg up '5^'
235 $ hg up '5^'
236 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
236 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
237 $ hg revert -ar 5
237 $ hg revert -ar 5
238 adding new_3_c
238 adding new_3_c
239 $ hg ci -m 'add n3w_3_c'
239 $ hg ci -m 'add n3w_3_c'
240 created new head
240 created new head
241 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
241 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
242 $ hg log -r 'bumped()'
242 $ hg log -r 'bumped()'
243 $ hg log -G
243 $ hg log -G
244 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
244 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
245 |
245 |
246 | o 2:245bde4270cd (public) [ ] add original_c
246 | o 2:245bde4270cd (public) [ ] add original_c
247 |/
247 |/
248 o 1:7c3bad9141dc (public) [ ] add b
248 o 1:7c3bad9141dc (public) [ ] add b
249 |
249 |
250 o 0:1f0dee641bb7 (public) [ ] add a
250 o 0:1f0dee641bb7 (public) [ ] add a
251
251
252
252
253
253
254
254
255 $ cd ..
255 $ cd ..
256
256
257 Exchange Test
257 Exchange Test
258 ============================
258 ============================
259
259
260 Destination repo does not have any data
260 Destination repo does not have any data
261 ---------------------------------------
261 ---------------------------------------
262
262
263 Simple incoming test
263 Simple incoming test
264
264
265 $ hg init tmpc
265 $ hg init tmpc
266 $ cd tmpc
266 $ cd tmpc
267 $ hg incoming ../tmpb
267 $ hg incoming ../tmpb
268 comparing with ../tmpb
268 comparing with ../tmpb
269 0:1f0dee641bb7 (public) [ ] add a
269 0:1f0dee641bb7 (public) [ ] add a
270 1:7c3bad9141dc (public) [ ] add b
270 1:7c3bad9141dc (public) [ ] add b
271 2:245bde4270cd (public) [ ] add original_c
271 2:245bde4270cd (public) [ ] add original_c
272 6:6f9641995072 (draft) [tip ] add n3w_3_c
272 6:6f9641995072 (draft) [tip ] add n3w_3_c
273
273
274 Try to pull markers
274 Try to pull markers
275 (extinct changeset are excluded but marker are pushed)
275 (extinct changeset are excluded but marker are pushed)
276
276
277 $ hg pull ../tmpb
277 $ hg pull ../tmpb
278 pulling from ../tmpb
278 pulling from ../tmpb
279 requesting all changes
279 requesting all changes
280 adding changesets
280 adding changesets
281 adding manifests
281 adding manifests
282 adding file changes
282 adding file changes
283 added 4 changesets with 4 changes to 4 files (+1 heads)
283 added 4 changesets with 4 changes to 4 files (+1 heads)
284 (run 'hg heads' to see heads, 'hg merge' to merge)
284 (run 'hg heads' to see heads, 'hg merge' to merge)
285 $ hg debugobsolete
285 $ hg debugobsolete
286 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
286 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
287 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
287 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
288 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
288 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
289 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
289 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
290 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
290 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
291
291
292 Rollback//Transaction support
292 Rollback//Transaction support
293
293
294 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
294 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
295 $ hg debugobsolete
295 $ hg debugobsolete
296 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
296 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
297 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
297 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
298 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
298 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
299 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
299 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
300 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
300 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
301 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
301 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
302 $ hg rollback -n
302 $ hg rollback -n
303 repository tip rolled back to revision 3 (undo debugobsolete)
303 repository tip rolled back to revision 3 (undo debugobsolete)
304 $ hg rollback
304 $ hg rollback
305 repository tip rolled back to revision 3 (undo debugobsolete)
305 repository tip rolled back to revision 3 (undo debugobsolete)
306 $ hg debugobsolete
306 $ hg debugobsolete
307 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
307 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
308 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
308 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
309 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
309 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
310 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
310 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
311 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
311 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
312
312
313 $ cd ..
313 $ cd ..
314
314
315 Try to push markers
315 Try to push markers
316
316
317 $ hg init tmpd
317 $ hg init tmpd
318 $ hg -R tmpb push tmpd
318 $ hg -R tmpb push tmpd
319 pushing to tmpd
319 pushing to tmpd
320 searching for changes
320 searching for changes
321 adding changesets
321 adding changesets
322 adding manifests
322 adding manifests
323 adding file changes
323 adding file changes
324 added 4 changesets with 4 changes to 4 files (+1 heads)
324 added 4 changesets with 4 changes to 4 files (+1 heads)
325 $ hg -R tmpd debugobsolete | sort
325 $ hg -R tmpd debugobsolete | sort
326 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
326 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
327 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
327 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
328 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
328 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
329 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
329 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
330 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
330 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
331
331
332 Check obsolete keys are exchanged only if source has an obsolete store
332 Check obsolete keys are exchanged only if source has an obsolete store
333
333
334 $ hg init empty
334 $ hg init empty
335 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
335 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
336 pushing to tmpd
336 pushing to tmpd
337 listkeys phases
337 listkeys phases
338 listkeys bookmarks
338 listkeys bookmarks
339 no changes found
339 no changes found
340 listkeys phases
340 listkeys phases
341 [1]
341 [1]
342
342
343 clone support
343 clone support
344 (markers are copied and extinct changesets are included to allow hardlinks)
344 (markers are copied and extinct changesets are included to allow hardlinks)
345
345
346 $ hg clone tmpb clone-dest
346 $ hg clone tmpb clone-dest
347 updating to branch default
347 updating to branch default
348 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
348 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
349 $ hg -R clone-dest log -G --hidden
349 $ hg -R clone-dest log -G --hidden
350 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
350 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
351 |
351 |
352 | x 5:5601fb93a350 (draft) [ ] add new_3_c
352 | x 5:5601fb93a350 (draft) [ ] add new_3_c
353 |/
353 |/
354 | x 4:ca819180edb9 (draft) [ ] add new_2_c
354 | x 4:ca819180edb9 (draft) [ ] add new_2_c
355 |/
355 |/
356 | x 3:cdbce2fbb163 (draft) [ ] add new_c
356 | x 3:cdbce2fbb163 (draft) [ ] add new_c
357 |/
357 |/
358 | o 2:245bde4270cd (public) [ ] add original_c
358 | o 2:245bde4270cd (public) [ ] add original_c
359 |/
359 |/
360 o 1:7c3bad9141dc (public) [ ] add b
360 o 1:7c3bad9141dc (public) [ ] add b
361 |
361 |
362 o 0:1f0dee641bb7 (public) [ ] add a
362 o 0:1f0dee641bb7 (public) [ ] add a
363
363
364 $ hg -R clone-dest debugobsolete
364 $ hg -R clone-dest debugobsolete
365 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
365 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
366 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
366 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
367 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
367 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
368 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
368 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
369 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
369 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
370
370
371
371
372 Destination repo have existing data
372 Destination repo have existing data
373 ---------------------------------------
373 ---------------------------------------
374
374
375 On pull
375 On pull
376
376
377 $ hg init tmpe
377 $ hg init tmpe
378 $ cd tmpe
378 $ cd tmpe
379 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
379 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
380 $ hg pull ../tmpb
380 $ hg pull ../tmpb
381 pulling from ../tmpb
381 pulling from ../tmpb
382 requesting all changes
382 requesting all changes
383 adding changesets
383 adding changesets
384 adding manifests
384 adding manifests
385 adding file changes
385 adding file changes
386 added 4 changesets with 4 changes to 4 files (+1 heads)
386 added 4 changesets with 4 changes to 4 files (+1 heads)
387 (run 'hg heads' to see heads, 'hg merge' to merge)
387 (run 'hg heads' to see heads, 'hg merge' to merge)
388 $ hg debugobsolete
388 $ hg debugobsolete
389 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
389 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
390 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
390 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
391 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
391 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
392 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
392 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
393 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
393 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
394 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
394 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
395
395
396
396
397 On push
397 On push
398
398
399 $ hg push ../tmpc
399 $ hg push ../tmpc
400 pushing to ../tmpc
400 pushing to ../tmpc
401 searching for changes
401 searching for changes
402 no changes found
402 no changes found
403 [1]
403 [1]
404 $ hg -R ../tmpc debugobsolete
404 $ hg -R ../tmpc debugobsolete
405 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
405 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
406 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
406 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
407 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
407 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
408 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
408 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
409 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
409 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
410 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
410 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
411
411
412 detect outgoing obsolete and unstable
412 detect outgoing obsolete and unstable
413 ---------------------------------------
413 ---------------------------------------
414
414
415
415
416 $ hg log -G
416 $ hg log -G
417 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
417 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
418 |
418 |
419 | o 2:245bde4270cd (public) [ ] add original_c
419 | o 2:245bde4270cd (public) [ ] add original_c
420 |/
420 |/
421 o 1:7c3bad9141dc (public) [ ] add b
421 o 1:7c3bad9141dc (public) [ ] add b
422 |
422 |
423 o 0:1f0dee641bb7 (public) [ ] add a
423 o 0:1f0dee641bb7 (public) [ ] add a
424
424
425 $ hg up 'desc("n3w_3_c")'
425 $ hg up 'desc("n3w_3_c")'
426 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
426 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
427 $ mkcommit original_d
427 $ mkcommit original_d
428 $ mkcommit original_e
428 $ mkcommit original_e
429 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
429 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
430 $ hg debugobsolete | grep `getid original_d`
430 $ hg debugobsolete | grep `getid original_d`
431 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432 $ hg log -r 'obsolete()'
432 $ hg log -r 'obsolete()'
433 4:94b33453f93b (draft) [ ] add original_d
433 4:94b33453f93b (draft) [ ] add original_d
434 $ hg log -G -r '::unstable()'
434 $ hg log -G -r '::unstable()'
435 @ 5:cda648ca50f5 (draft) [tip ] add original_e
435 @ 5:cda648ca50f5 (draft) [tip ] add original_e
436 |
436 |
437 x 4:94b33453f93b (draft) [ ] add original_d
437 x 4:94b33453f93b (draft) [ ] add original_d
438 |
438 |
439 o 3:6f9641995072 (draft) [ ] add n3w_3_c
439 o 3:6f9641995072 (draft) [ ] add n3w_3_c
440 |
440 |
441 o 1:7c3bad9141dc (public) [ ] add b
441 o 1:7c3bad9141dc (public) [ ] add b
442 |
442 |
443 o 0:1f0dee641bb7 (public) [ ] add a
443 o 0:1f0dee641bb7 (public) [ ] add a
444
444
445
445
446 refuse to push obsolete changeset
446 refuse to push obsolete changeset
447
447
448 $ hg push ../tmpc/ -r 'desc("original_d")'
448 $ hg push ../tmpc/ -r 'desc("original_d")'
449 pushing to ../tmpc/
449 pushing to ../tmpc/
450 searching for changes
450 searching for changes
451 abort: push includes obsolete changeset: 94b33453f93b!
451 abort: push includes obsolete changeset: 94b33453f93b!
452 [255]
452 [255]
453
453
454 refuse to push unstable changeset
454 refuse to push unstable changeset
455
455
456 $ hg push ../tmpc/
456 $ hg push ../tmpc/
457 pushing to ../tmpc/
457 pushing to ../tmpc/
458 searching for changes
458 searching for changes
459 abort: push includes unstable changeset: cda648ca50f5!
459 abort: push includes unstable changeset: cda648ca50f5!
460 [255]
460 [255]
461
461
462 Test that extinct changeset are properly detected
462 Test that extinct changeset are properly detected
463
463
464 $ hg log -r 'extinct()'
464 $ hg log -r 'extinct()'
465
465
466 Don't try to push extinct changeset
466 Don't try to push extinct changeset
467
467
468 $ hg init ../tmpf
468 $ hg init ../tmpf
469 $ hg out ../tmpf
469 $ hg out ../tmpf
470 comparing with ../tmpf
470 comparing with ../tmpf
471 searching for changes
471 searching for changes
472 0:1f0dee641bb7 (public) [ ] add a
472 0:1f0dee641bb7 (public) [ ] add a
473 1:7c3bad9141dc (public) [ ] add b
473 1:7c3bad9141dc (public) [ ] add b
474 2:245bde4270cd (public) [ ] add original_c
474 2:245bde4270cd (public) [ ] add original_c
475 3:6f9641995072 (draft) [ ] add n3w_3_c
475 3:6f9641995072 (draft) [ ] add n3w_3_c
476 4:94b33453f93b (draft) [ ] add original_d
476 4:94b33453f93b (draft) [ ] add original_d
477 5:cda648ca50f5 (draft) [tip ] add original_e
477 5:cda648ca50f5 (draft) [tip ] add original_e
478 $ hg push ../tmpf -f # -f because be push unstable too
478 $ hg push ../tmpf -f # -f because be push unstable too
479 pushing to ../tmpf
479 pushing to ../tmpf
480 searching for changes
480 searching for changes
481 adding changesets
481 adding changesets
482 adding manifests
482 adding manifests
483 adding file changes
483 adding file changes
484 added 6 changesets with 6 changes to 6 files (+1 heads)
484 added 6 changesets with 6 changes to 6 files (+1 heads)
485
485
486 no warning displayed
486 no warning displayed
487
487
488 $ hg push ../tmpf
488 $ hg push ../tmpf
489 pushing to ../tmpf
489 pushing to ../tmpf
490 searching for changes
490 searching for changes
491 no changes found
491 no changes found
492 [1]
492 [1]
493
493
494 Do not warn about new head when the new head is a successors of a remote one
494 Do not warn about new head when the new head is a successors of a remote one
495
495
496 $ hg log -G
496 $ hg log -G
497 @ 5:cda648ca50f5 (draft) [tip ] add original_e
497 @ 5:cda648ca50f5 (draft) [tip ] add original_e
498 |
498 |
499 x 4:94b33453f93b (draft) [ ] add original_d
499 x 4:94b33453f93b (draft) [ ] add original_d
500 |
500 |
501 o 3:6f9641995072 (draft) [ ] add n3w_3_c
501 o 3:6f9641995072 (draft) [ ] add n3w_3_c
502 |
502 |
503 | o 2:245bde4270cd (public) [ ] add original_c
503 | o 2:245bde4270cd (public) [ ] add original_c
504 |/
504 |/
505 o 1:7c3bad9141dc (public) [ ] add b
505 o 1:7c3bad9141dc (public) [ ] add b
506 |
506 |
507 o 0:1f0dee641bb7 (public) [ ] add a
507 o 0:1f0dee641bb7 (public) [ ] add a
508
508
509 $ hg up -q 'desc(n3w_3_c)'
509 $ hg up -q 'desc(n3w_3_c)'
510 $ mkcommit obsolete_e
510 $ mkcommit obsolete_e
511 created new head
511 created new head
512 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
512 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
513 $ hg outgoing ../tmpf # parasite hg outgoing testin
513 $ hg outgoing ../tmpf # parasite hg outgoing testin
514 comparing with ../tmpf
514 comparing with ../tmpf
515 searching for changes
515 searching for changes
516 6:3de5eca88c00 (draft) [tip ] add obsolete_e
516 6:3de5eca88c00 (draft) [tip ] add obsolete_e
517 $ hg push ../tmpf
517 $ hg push ../tmpf
518 pushing to ../tmpf
518 pushing to ../tmpf
519 searching for changes
519 searching for changes
520 adding changesets
520 adding changesets
521 adding manifests
521 adding manifests
522 adding file changes
522 adding file changes
523 added 1 changesets with 1 changes to 1 files (+1 heads)
523 added 1 changesets with 1 changes to 1 files (+1 heads)
524
524
525 test relevance computation
525 test relevance computation
526 ---------------------------------------
526 ---------------------------------------
527
527
528 Checking simple case of "marker relevance".
528 Checking simple case of "marker relevance".
529
529
530
530
531 Reminder of the repo situation
531 Reminder of the repo situation
532
532
533 $ hg log --hidden --graph
533 $ hg log --hidden --graph
534 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
534 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
535 |
535 |
536 | x 5:cda648ca50f5 (draft) [ ] add original_e
536 | x 5:cda648ca50f5 (draft) [ ] add original_e
537 | |
537 | |
538 | x 4:94b33453f93b (draft) [ ] add original_d
538 | x 4:94b33453f93b (draft) [ ] add original_d
539 |/
539 |/
540 o 3:6f9641995072 (draft) [ ] add n3w_3_c
540 o 3:6f9641995072 (draft) [ ] add n3w_3_c
541 |
541 |
542 | o 2:245bde4270cd (public) [ ] add original_c
542 | o 2:245bde4270cd (public) [ ] add original_c
543 |/
543 |/
544 o 1:7c3bad9141dc (public) [ ] add b
544 o 1:7c3bad9141dc (public) [ ] add b
545 |
545 |
546 o 0:1f0dee641bb7 (public) [ ] add a
546 o 0:1f0dee641bb7 (public) [ ] add a
547
547
548
548
549 List of all markers
549 List of all markers
550
550
551 $ hg debugobsolete
551 $ hg debugobsolete
552 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
552 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
553 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
553 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
554 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
554 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
555 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
555 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
556 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
556 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
557 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
557 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
558 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
558 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
559 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
559 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
560
560
561 List of changesets with no chain
561 List of changesets with no chain
562
562
563 $ hg debugobsolete --hidden --rev ::2
563 $ hg debugobsolete --hidden --rev ::2
564
564
565 List of changesets that are included on marker chain
565 List of changesets that are included on marker chain
566
566
567 $ hg debugobsolete --hidden --rev 6
567 $ hg debugobsolete --hidden --rev 6
568 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
568 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
569
569
570 List of changesets with a longer chain, (including a pruned children)
570 List of changesets with a longer chain, (including a pruned children)
571
571
572 $ hg debugobsolete --hidden --rev 3
572 $ hg debugobsolete --hidden --rev 3
573 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
573 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
574 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
574 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
575 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
575 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
576 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
576 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
577 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
577 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
578 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
578 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
579 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
579 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
580
580
581 List of both
581 List of both
582
582
583 $ hg debugobsolete --hidden --rev 3::6
583 $ hg debugobsolete --hidden --rev 3::6
584 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
584 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
585 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
585 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
586 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
586 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
587 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
587 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
588 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
588 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
589 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
589 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
590 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
590 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
591 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
591 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
592
592
593 #if serve
593 #if serve
594
594
595 check hgweb does not explode
595 check hgweb does not explode
596 ====================================
596 ====================================
597
597
598 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
598 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
599 adding changesets
599 adding changesets
600 adding manifests
600 adding manifests
601 adding file changes
601 adding file changes
602 added 62 changesets with 63 changes to 9 files (+60 heads)
602 added 62 changesets with 63 changes to 9 files (+60 heads)
603 (run 'hg heads .' to see heads, 'hg merge' to merge)
603 (run 'hg heads .' to see heads, 'hg merge' to merge)
604 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
604 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
605 > do
605 > do
606 > hg debugobsolete $node
606 > hg debugobsolete $node
607 > done
607 > done
608 $ hg up tip
608 $ hg up tip
609 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
609 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
610
610
611 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
611 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
612 $ cat hg.pid >> $DAEMON_PIDS
612 $ cat hg.pid >> $DAEMON_PIDS
613
613
614 check changelog view
614 check changelog view
615
615
616 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'shortlog/'
616 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'shortlog/'
617 200 Script output follows
617 200 Script output follows
618
618
619 check graph view
619 check graph view
620
620
621 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'graph'
621 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'graph'
622 200 Script output follows
622 200 Script output follows
623
623
624 check filelog view
624 check filelog view
625
625
626 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
626 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
627 200 Script output follows
627 200 Script output follows
628
628
629 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/68'
629 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/68'
630 200 Script output follows
630 200 Script output follows
631 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
631 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
632 404 Not Found
632 404 Not Found
633 [1]
633 [1]
634
634
635 check that web.view config option:
635 check that web.view config option:
636
636
637 $ "$TESTDIR/killdaemons.py" hg.pid
637 $ "$TESTDIR/killdaemons.py" hg.pid
638 $ cat >> .hg/hgrc << EOF
638 $ cat >> .hg/hgrc << EOF
639 > [web]
639 > [web]
640 > view=all
640 > view=all
641 > EOF
641 > EOF
642 $ wait
642 $ wait
643 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
643 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
644 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
644 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
645 200 Script output follows
645 200 Script output follows
646 $ "$TESTDIR/killdaemons.py" hg.pid
646 $ "$TESTDIR/killdaemons.py" hg.pid
647
647
648 Checking _enable=False warning if obsolete marker exists
648 Checking _enable=False warning if obsolete marker exists
649
649
650 $ echo '[experimental]' >> $HGRCPATH
650 $ echo '[experimental]' >> $HGRCPATH
651 $ echo "evolution=" >> $HGRCPATH
651 $ echo "evolution=" >> $HGRCPATH
652 $ hg log -r tip
652 $ hg log -r tip
653 obsolete feature not enabled but 68 markers found!
653 obsolete feature not enabled but 68 markers found!
654 68:c15e9edfca13 (draft) [tip ] add celestine
654 68:c15e9edfca13 (draft) [tip ] add celestine
655
655
656 reenable for later test
656 reenable for later test
657
657
658 $ echo '[experimental]' >> $HGRCPATH
658 $ echo '[experimental]' >> $HGRCPATH
659 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
659 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
660
660
661 #endif
661 #endif
662
662
663 Test incoming/outcoming with changesets obsoleted remotely, known locally
663 Test incoming/outcoming with changesets obsoleted remotely, known locally
664 ===============================================================================
664 ===============================================================================
665
665
666 This test issue 3805
666 This test issue 3805
667
667
668 $ hg init repo-issue3805
668 $ hg init repo-issue3805
669 $ cd repo-issue3805
669 $ cd repo-issue3805
670 $ echo "foo" > foo
670 $ echo "foo" > foo
671 $ hg ci -Am "A"
671 $ hg ci -Am "A"
672 adding foo
672 adding foo
673 $ hg clone . ../other-issue3805
673 $ hg clone . ../other-issue3805
674 updating to branch default
674 updating to branch default
675 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
675 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
676 $ echo "bar" >> foo
676 $ echo "bar" >> foo
677 $ hg ci --amend
677 $ hg ci --amend
678 $ cd ../other-issue3805
678 $ cd ../other-issue3805
679 $ hg log -G
679 $ hg log -G
680 @ 0:193e9254ce7e (draft) [tip ] A
680 @ 0:193e9254ce7e (draft) [tip ] A
681
681
682 $ hg log -G -R ../repo-issue3805
682 $ hg log -G -R ../repo-issue3805
683 @ 2:3816541e5485 (draft) [tip ] A
683 @ 2:3816541e5485 (draft) [tip ] A
684
684
685 $ hg incoming
685 $ hg incoming
686 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
686 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
687 searching for changes
687 searching for changes
688 2:3816541e5485 (draft) [tip ] A
688 2:3816541e5485 (draft) [tip ] A
689 $ hg incoming --bundle ../issue3805.hg
689 $ hg incoming --bundle ../issue3805.hg
690 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
690 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
691 searching for changes
691 searching for changes
692 2:3816541e5485 (draft) [tip ] A
692 2:3816541e5485 (draft) [tip ] A
693 $ hg outgoing
693 $ hg outgoing
694 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
694 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
695 searching for changes
695 searching for changes
696 no changes found
696 no changes found
697 [1]
697 [1]
698
698
699 #if serve
699 #if serve
700
700
701 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
701 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
702 $ cat hg.pid >> $DAEMON_PIDS
702 $ cat hg.pid >> $DAEMON_PIDS
703
703
704 $ hg incoming http://localhost:$HGPORT
704 $ hg incoming http://localhost:$HGPORT
705 comparing with http://localhost:$HGPORT/
705 comparing with http://localhost:$HGPORT/
706 searching for changes
706 searching for changes
707 1:3816541e5485 (draft) [tip ] A
707 1:3816541e5485 (draft) [tip ] A
708 $ hg outgoing http://localhost:$HGPORT
708 $ hg outgoing http://localhost:$HGPORT
709 comparing with http://localhost:$HGPORT/
709 comparing with http://localhost:$HGPORT/
710 searching for changes
710 searching for changes
711 no changes found
711 no changes found
712 [1]
712 [1]
713
713
714 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
714 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
715
715
716 #endif
716 #endif
717
717
718 This test issue 3814
718 This test issue 3814
719
719
720 (nothing to push but locally hidden changeset)
720 (nothing to push but locally hidden changeset)
721
721
722 $ cd ..
722 $ cd ..
723 $ hg init repo-issue3814
723 $ hg init repo-issue3814
724 $ cd repo-issue3805
724 $ cd repo-issue3805
725 $ hg push -r 3816541e5485 ../repo-issue3814
725 $ hg push -r 3816541e5485 ../repo-issue3814
726 pushing to ../repo-issue3814
726 pushing to ../repo-issue3814
727 searching for changes
727 searching for changes
728 adding changesets
728 adding changesets
729 adding manifests
729 adding manifests
730 adding file changes
730 adding file changes
731 added 1 changesets with 1 changes to 1 files
731 added 1 changesets with 1 changes to 1 files
732 $ hg out ../repo-issue3814
732 $ hg out ../repo-issue3814
733 comparing with ../repo-issue3814
733 comparing with ../repo-issue3814
734 searching for changes
734 searching for changes
735 no changes found
735 no changes found
736 [1]
736 [1]
737
737
738 Test that a local tag blocks a changeset from being hidden
738 Test that a local tag blocks a changeset from being hidden
739
739
740 $ hg tag -l visible -r 0 --hidden
740 $ hg tag -l visible -r 0 --hidden
741 $ hg log -G
741 $ hg log -G
742 @ 2:3816541e5485 (draft) [tip ] A
742 @ 2:3816541e5485 (draft) [tip ] A
743
743
744 x 0:193e9254ce7e (draft) [visible ] A
744 x 0:193e9254ce7e (draft) [visible ] A
745
745
746 Test that removing a local tag does not cause some commands to fail
746 Test that removing a local tag does not cause some commands to fail
747
747
748 $ hg tag -l -r tip tiptag
748 $ hg tag -l -r tip tiptag
749 $ hg tags
749 $ hg tags
750 tiptag 2:3816541e5485
750 tiptag 2:3816541e5485
751 tip 2:3816541e5485
751 tip 2:3816541e5485
752 visible 0:193e9254ce7e
752 visible 0:193e9254ce7e
753 $ hg --config extensions.strip= strip -r tip --no-backup
753 $ hg --config extensions.strip= strip -r tip --no-backup
754 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
754 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
755 $ hg tags
755 $ hg tags
756 visible 0:193e9254ce7e
756 visible 0:193e9254ce7e
757 tip 0:193e9254ce7e
757 tip 0:193e9254ce7e
758
758
759 #if serve
759 #if serve
760
760
761 Test issue 4506
761 Test issue 4506
762
762
763 $ cd ..
763 $ cd ..
764 $ hg init repo-issue4506
764 $ hg init repo-issue4506
765 $ cd repo-issue4506
765 $ cd repo-issue4506
766 $ echo "0" > foo
766 $ echo "0" > foo
767 $ hg add foo
767 $ hg add foo
768 $ hg ci -m "content-0"
768 $ hg ci -m "content-0"
769
769
770 $ hg up null
770 $ hg up null
771 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
771 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
772 $ echo "1" > bar
772 $ echo "1" > bar
773 $ hg add bar
773 $ hg add bar
774 $ hg ci -m "content-1"
774 $ hg ci -m "content-1"
775 created new head
775 created new head
776 $ hg up 0
776 $ hg up 0
777 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
777 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
778 $ hg graft 1
778 $ hg graft 1
779 grafting 1:1c9eddb02162 "content-1" (tip)
779 grafting 1:1c9eddb02162 "content-1" (tip)
780
780
781 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
781 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
782
782
783 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
783 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
784 $ cat hg.pid >> $DAEMON_PIDS
784 $ cat hg.pid >> $DAEMON_PIDS
785
785
786 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/1'
786 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/1'
787 404 Not Found
787 404 Not Found
788 [1]
788 [1]
789 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'file/tip/bar'
789 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'file/tip/bar'
790 200 Script output follows
790 200 Script output follows
791 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'annotate/tip/bar'
791 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'annotate/tip/bar'
792 200 Script output follows
792 200 Script output follows
793
793
794 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
794 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
795
795
796 #endif
796 #endif
797
797
798 $ hg init a
799 $ cd a
800 $ touch foo
801 $ hg add foo
802 $ hg ci -mfoo
803 $ touch bar
804 $ hg add bar
805 $ hg ci -mbar
806 $ hg up 0
807 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
808 $ touch quux
809 $ hg add quux
810 $ hg ci -m quux
811 created new head
812 $ hg up 1
813 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
814 $ hg tag 1.0
815
816 $ hg up 2
817 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
818 $ hg log -G
819 o 3:bc47fc7e1c1d (draft) [tip ] Added tag 1.0 for changeset 50c889141114
820 |
821 | @ 2:3d7f255a0081 (draft) [ ] quux
822 | |
823 o | 1:50c889141114 (draft) [1.0 ] bar
824 |/
825 o 0:1f7b0de80e11 (draft) [ ] foo
826
827 $ hg debugobsolete `getid bar`
828 $ hg debugobsolete `getid 1.0`
829 $ hg tag 1.0
830 $ hg log -G
831 @ 4:f9f2ab71ffd5 (draft) [tip ] Added tag 1.0 for changeset 3d7f255a0081
832 |
833 o 2:3d7f255a0081 (draft) [1.0 ] quux
834 |
835 o 0:1f7b0de80e11 (draft) [ ] foo
836
837 $ cat .hgtags
838 3d7f255a008103380aeb2a7d581fe257f40969e7 1.0
General Comments 0
You need to be logged in to leave comments. Login now