##// END OF EJS Templates
clone: properly mark branches closed with --uncompressed (issue4428)...
Matt Mackall -
r23126:8b4a8a91 stable
parent child Browse files
Show More
@@ -1,1793 +1,1800 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import urllib
9 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock as lockmod
12 import lock as lockmod
13 import transaction, store, encoding, exchange, bundle2
13 import transaction, store, encoding, exchange, bundle2
14 import scmutil, util, extensions, hook, error, revset
14 import scmutil, util, extensions, hook, error, revset
15 import match as matchmod
15 import match as matchmod
16 import merge as mergemod
16 import merge as mergemod
17 import tags as tagsmod
17 import tags as tagsmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 import branchmap, pathutil
20 import branchmap, pathutil
21 propertycache = util.propertycache
21 propertycache = util.propertycache
22 filecache = scmutil.filecache
22 filecache = scmutil.filecache
23
23
24 class repofilecache(filecache):
24 class repofilecache(filecache):
25 """All filecache usage on repo are done for logic that should be unfiltered
25 """All filecache usage on repo are done for logic that should be unfiltered
26 """
26 """
27
27
28 def __get__(self, repo, type=None):
28 def __get__(self, repo, type=None):
29 return super(repofilecache, self).__get__(repo.unfiltered(), type)
29 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 def __set__(self, repo, value):
30 def __set__(self, repo, value):
31 return super(repofilecache, self).__set__(repo.unfiltered(), value)
31 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 def __delete__(self, repo):
32 def __delete__(self, repo):
33 return super(repofilecache, self).__delete__(repo.unfiltered())
33 return super(repofilecache, self).__delete__(repo.unfiltered())
34
34
35 class storecache(repofilecache):
35 class storecache(repofilecache):
36 """filecache for files in the store"""
36 """filecache for files in the store"""
37 def join(self, obj, fname):
37 def join(self, obj, fname):
38 return obj.sjoin(fname)
38 return obj.sjoin(fname)
39
39
40 class unfilteredpropertycache(propertycache):
40 class unfilteredpropertycache(propertycache):
41 """propertycache that apply to unfiltered repo only"""
41 """propertycache that apply to unfiltered repo only"""
42
42
43 def __get__(self, repo, type=None):
43 def __get__(self, repo, type=None):
44 unfi = repo.unfiltered()
44 unfi = repo.unfiltered()
45 if unfi is repo:
45 if unfi is repo:
46 return super(unfilteredpropertycache, self).__get__(unfi)
46 return super(unfilteredpropertycache, self).__get__(unfi)
47 return getattr(unfi, self.name)
47 return getattr(unfi, self.name)
48
48
49 class filteredpropertycache(propertycache):
49 class filteredpropertycache(propertycache):
50 """propertycache that must take filtering in account"""
50 """propertycache that must take filtering in account"""
51
51
52 def cachevalue(self, obj, value):
52 def cachevalue(self, obj, value):
53 object.__setattr__(obj, self.name, value)
53 object.__setattr__(obj, self.name, value)
54
54
55
55
56 def hasunfilteredcache(repo, name):
56 def hasunfilteredcache(repo, name):
57 """check if a repo has an unfilteredpropertycache value for <name>"""
57 """check if a repo has an unfilteredpropertycache value for <name>"""
58 return name in vars(repo.unfiltered())
58 return name in vars(repo.unfiltered())
59
59
60 def unfilteredmethod(orig):
60 def unfilteredmethod(orig):
61 """decorate method that always need to be run on unfiltered version"""
61 """decorate method that always need to be run on unfiltered version"""
62 def wrapper(repo, *args, **kwargs):
62 def wrapper(repo, *args, **kwargs):
63 return orig(repo.unfiltered(), *args, **kwargs)
63 return orig(repo.unfiltered(), *args, **kwargs)
64 return wrapper
64 return wrapper
65
65
66 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
66 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 'unbundle'))
67 'unbundle'))
68 legacycaps = moderncaps.union(set(['changegroupsubset']))
68 legacycaps = moderncaps.union(set(['changegroupsubset']))
69
69
70 class localpeer(peer.peerrepository):
70 class localpeer(peer.peerrepository):
71 '''peer for a local repo; reflects only the most recent API'''
71 '''peer for a local repo; reflects only the most recent API'''
72
72
73 def __init__(self, repo, caps=moderncaps):
73 def __init__(self, repo, caps=moderncaps):
74 peer.peerrepository.__init__(self)
74 peer.peerrepository.__init__(self)
75 self._repo = repo.filtered('served')
75 self._repo = repo.filtered('served')
76 self.ui = repo.ui
76 self.ui = repo.ui
77 self._caps = repo._restrictcapabilities(caps)
77 self._caps = repo._restrictcapabilities(caps)
78 self.requirements = repo.requirements
78 self.requirements = repo.requirements
79 self.supportedformats = repo.supportedformats
79 self.supportedformats = repo.supportedformats
80
80
81 def close(self):
81 def close(self):
82 self._repo.close()
82 self._repo.close()
83
83
84 def _capabilities(self):
84 def _capabilities(self):
85 return self._caps
85 return self._caps
86
86
87 def local(self):
87 def local(self):
88 return self._repo
88 return self._repo
89
89
90 def canpush(self):
90 def canpush(self):
91 return True
91 return True
92
92
93 def url(self):
93 def url(self):
94 return self._repo.url()
94 return self._repo.url()
95
95
96 def lookup(self, key):
96 def lookup(self, key):
97 return self._repo.lookup(key)
97 return self._repo.lookup(key)
98
98
99 def branchmap(self):
99 def branchmap(self):
100 return self._repo.branchmap()
100 return self._repo.branchmap()
101
101
102 def heads(self):
102 def heads(self):
103 return self._repo.heads()
103 return self._repo.heads()
104
104
105 def known(self, nodes):
105 def known(self, nodes):
106 return self._repo.known(nodes)
106 return self._repo.known(nodes)
107
107
108 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
108 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 format='HG10', **kwargs):
109 format='HG10', **kwargs):
110 cg = exchange.getbundle(self._repo, source, heads=heads,
110 cg = exchange.getbundle(self._repo, source, heads=heads,
111 common=common, bundlecaps=bundlecaps, **kwargs)
111 common=common, bundlecaps=bundlecaps, **kwargs)
112 if bundlecaps is not None and 'HG2Y' in bundlecaps:
112 if bundlecaps is not None and 'HG2Y' in bundlecaps:
113 # When requesting a bundle2, getbundle returns a stream to make the
113 # When requesting a bundle2, getbundle returns a stream to make the
114 # wire level function happier. We need to build a proper object
114 # wire level function happier. We need to build a proper object
115 # from it in local peer.
115 # from it in local peer.
116 cg = bundle2.unbundle20(self.ui, cg)
116 cg = bundle2.unbundle20(self.ui, cg)
117 return cg
117 return cg
118
118
119 # TODO We might want to move the next two calls into legacypeer and add
119 # TODO We might want to move the next two calls into legacypeer and add
120 # unbundle instead.
120 # unbundle instead.
121
121
122 def unbundle(self, cg, heads, url):
122 def unbundle(self, cg, heads, url):
123 """apply a bundle on a repo
123 """apply a bundle on a repo
124
124
125 This function handles the repo locking itself."""
125 This function handles the repo locking itself."""
126 try:
126 try:
127 cg = exchange.readbundle(self.ui, cg, None)
127 cg = exchange.readbundle(self.ui, cg, None)
128 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
128 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
129 if util.safehasattr(ret, 'getchunks'):
129 if util.safehasattr(ret, 'getchunks'):
130 # This is a bundle20 object, turn it into an unbundler.
130 # This is a bundle20 object, turn it into an unbundler.
131 # This little dance should be dropped eventually when the API
131 # This little dance should be dropped eventually when the API
132 # is finally improved.
132 # is finally improved.
133 stream = util.chunkbuffer(ret.getchunks())
133 stream = util.chunkbuffer(ret.getchunks())
134 ret = bundle2.unbundle20(self.ui, stream)
134 ret = bundle2.unbundle20(self.ui, stream)
135 return ret
135 return ret
136 except error.PushRaced, exc:
136 except error.PushRaced, exc:
137 raise error.ResponseError(_('push failed:'), str(exc))
137 raise error.ResponseError(_('push failed:'), str(exc))
138
138
139 def lock(self):
139 def lock(self):
140 return self._repo.lock()
140 return self._repo.lock()
141
141
142 def addchangegroup(self, cg, source, url):
142 def addchangegroup(self, cg, source, url):
143 return changegroup.addchangegroup(self._repo, cg, source, url)
143 return changegroup.addchangegroup(self._repo, cg, source, url)
144
144
145 def pushkey(self, namespace, key, old, new):
145 def pushkey(self, namespace, key, old, new):
146 return self._repo.pushkey(namespace, key, old, new)
146 return self._repo.pushkey(namespace, key, old, new)
147
147
148 def listkeys(self, namespace):
148 def listkeys(self, namespace):
149 return self._repo.listkeys(namespace)
149 return self._repo.listkeys(namespace)
150
150
151 def debugwireargs(self, one, two, three=None, four=None, five=None):
151 def debugwireargs(self, one, two, three=None, four=None, five=None):
152 '''used to test argument passing over the wire'''
152 '''used to test argument passing over the wire'''
153 return "%s %s %s %s %s" % (one, two, three, four, five)
153 return "%s %s %s %s %s" % (one, two, three, four, five)
154
154
155 class locallegacypeer(localpeer):
155 class locallegacypeer(localpeer):
156 '''peer extension which implements legacy methods too; used for tests with
156 '''peer extension which implements legacy methods too; used for tests with
157 restricted capabilities'''
157 restricted capabilities'''
158
158
159 def __init__(self, repo):
159 def __init__(self, repo):
160 localpeer.__init__(self, repo, caps=legacycaps)
160 localpeer.__init__(self, repo, caps=legacycaps)
161
161
162 def branches(self, nodes):
162 def branches(self, nodes):
163 return self._repo.branches(nodes)
163 return self._repo.branches(nodes)
164
164
165 def between(self, pairs):
165 def between(self, pairs):
166 return self._repo.between(pairs)
166 return self._repo.between(pairs)
167
167
168 def changegroup(self, basenodes, source):
168 def changegroup(self, basenodes, source):
169 return changegroup.changegroup(self._repo, basenodes, source)
169 return changegroup.changegroup(self._repo, basenodes, source)
170
170
171 def changegroupsubset(self, bases, heads, source):
171 def changegroupsubset(self, bases, heads, source):
172 return changegroup.changegroupsubset(self._repo, bases, heads, source)
172 return changegroup.changegroupsubset(self._repo, bases, heads, source)
173
173
174 class localrepository(object):
174 class localrepository(object):
175
175
176 supportedformats = set(('revlogv1', 'generaldelta'))
176 supportedformats = set(('revlogv1', 'generaldelta'))
177 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
177 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
178 'dotencode'))
178 'dotencode'))
179 openerreqs = set(('revlogv1', 'generaldelta'))
179 openerreqs = set(('revlogv1', 'generaldelta'))
180 requirements = ['revlogv1']
180 requirements = ['revlogv1']
181 filtername = None
181 filtername = None
182
182
183 # a list of (ui, featureset) functions.
183 # a list of (ui, featureset) functions.
184 # only functions defined in module of enabled extensions are invoked
184 # only functions defined in module of enabled extensions are invoked
185 featuresetupfuncs = set()
185 featuresetupfuncs = set()
186
186
187 def _baserequirements(self, create):
187 def _baserequirements(self, create):
188 return self.requirements[:]
188 return self.requirements[:]
189
189
190 def __init__(self, baseui, path=None, create=False):
190 def __init__(self, baseui, path=None, create=False):
191 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
191 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
192 self.wopener = self.wvfs
192 self.wopener = self.wvfs
193 self.root = self.wvfs.base
193 self.root = self.wvfs.base
194 self.path = self.wvfs.join(".hg")
194 self.path = self.wvfs.join(".hg")
195 self.origroot = path
195 self.origroot = path
196 self.auditor = pathutil.pathauditor(self.root, self._checknested)
196 self.auditor = pathutil.pathauditor(self.root, self._checknested)
197 self.vfs = scmutil.vfs(self.path)
197 self.vfs = scmutil.vfs(self.path)
198 self.opener = self.vfs
198 self.opener = self.vfs
199 self.baseui = baseui
199 self.baseui = baseui
200 self.ui = baseui.copy()
200 self.ui = baseui.copy()
201 self.ui.copy = baseui.copy # prevent copying repo configuration
201 self.ui.copy = baseui.copy # prevent copying repo configuration
202 # A list of callback to shape the phase if no data were found.
202 # A list of callback to shape the phase if no data were found.
203 # Callback are in the form: func(repo, roots) --> processed root.
203 # Callback are in the form: func(repo, roots) --> processed root.
204 # This list it to be filled by extension during repo setup
204 # This list it to be filled by extension during repo setup
205 self._phasedefaults = []
205 self._phasedefaults = []
206 try:
206 try:
207 self.ui.readconfig(self.join("hgrc"), self.root)
207 self.ui.readconfig(self.join("hgrc"), self.root)
208 extensions.loadall(self.ui)
208 extensions.loadall(self.ui)
209 except IOError:
209 except IOError:
210 pass
210 pass
211
211
212 if self.featuresetupfuncs:
212 if self.featuresetupfuncs:
213 self.supported = set(self._basesupported) # use private copy
213 self.supported = set(self._basesupported) # use private copy
214 extmods = set(m.__name__ for n, m
214 extmods = set(m.__name__ for n, m
215 in extensions.extensions(self.ui))
215 in extensions.extensions(self.ui))
216 for setupfunc in self.featuresetupfuncs:
216 for setupfunc in self.featuresetupfuncs:
217 if setupfunc.__module__ in extmods:
217 if setupfunc.__module__ in extmods:
218 setupfunc(self.ui, self.supported)
218 setupfunc(self.ui, self.supported)
219 else:
219 else:
220 self.supported = self._basesupported
220 self.supported = self._basesupported
221
221
222 if not self.vfs.isdir():
222 if not self.vfs.isdir():
223 if create:
223 if create:
224 if not self.wvfs.exists():
224 if not self.wvfs.exists():
225 self.wvfs.makedirs()
225 self.wvfs.makedirs()
226 self.vfs.makedir(notindexed=True)
226 self.vfs.makedir(notindexed=True)
227 requirements = self._baserequirements(create)
227 requirements = self._baserequirements(create)
228 if self.ui.configbool('format', 'usestore', True):
228 if self.ui.configbool('format', 'usestore', True):
229 self.vfs.mkdir("store")
229 self.vfs.mkdir("store")
230 requirements.append("store")
230 requirements.append("store")
231 if self.ui.configbool('format', 'usefncache', True):
231 if self.ui.configbool('format', 'usefncache', True):
232 requirements.append("fncache")
232 requirements.append("fncache")
233 if self.ui.configbool('format', 'dotencode', True):
233 if self.ui.configbool('format', 'dotencode', True):
234 requirements.append('dotencode')
234 requirements.append('dotencode')
235 # create an invalid changelog
235 # create an invalid changelog
236 self.vfs.append(
236 self.vfs.append(
237 "00changelog.i",
237 "00changelog.i",
238 '\0\0\0\2' # represents revlogv2
238 '\0\0\0\2' # represents revlogv2
239 ' dummy changelog to prevent using the old repo layout'
239 ' dummy changelog to prevent using the old repo layout'
240 )
240 )
241 if self.ui.configbool('format', 'generaldelta', False):
241 if self.ui.configbool('format', 'generaldelta', False):
242 requirements.append("generaldelta")
242 requirements.append("generaldelta")
243 requirements = set(requirements)
243 requirements = set(requirements)
244 else:
244 else:
245 raise error.RepoError(_("repository %s not found") % path)
245 raise error.RepoError(_("repository %s not found") % path)
246 elif create:
246 elif create:
247 raise error.RepoError(_("repository %s already exists") % path)
247 raise error.RepoError(_("repository %s already exists") % path)
248 else:
248 else:
249 try:
249 try:
250 requirements = scmutil.readrequires(self.vfs, self.supported)
250 requirements = scmutil.readrequires(self.vfs, self.supported)
251 except IOError, inst:
251 except IOError, inst:
252 if inst.errno != errno.ENOENT:
252 if inst.errno != errno.ENOENT:
253 raise
253 raise
254 requirements = set()
254 requirements = set()
255
255
256 self.sharedpath = self.path
256 self.sharedpath = self.path
257 try:
257 try:
258 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
258 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
259 realpath=True)
259 realpath=True)
260 s = vfs.base
260 s = vfs.base
261 if not vfs.exists():
261 if not vfs.exists():
262 raise error.RepoError(
262 raise error.RepoError(
263 _('.hg/sharedpath points to nonexistent directory %s') % s)
263 _('.hg/sharedpath points to nonexistent directory %s') % s)
264 self.sharedpath = s
264 self.sharedpath = s
265 except IOError, inst:
265 except IOError, inst:
266 if inst.errno != errno.ENOENT:
266 if inst.errno != errno.ENOENT:
267 raise
267 raise
268
268
269 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
269 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
270 self.spath = self.store.path
270 self.spath = self.store.path
271 self.svfs = self.store.vfs
271 self.svfs = self.store.vfs
272 self.sopener = self.svfs
272 self.sopener = self.svfs
273 self.sjoin = self.store.join
273 self.sjoin = self.store.join
274 self.vfs.createmode = self.store.createmode
274 self.vfs.createmode = self.store.createmode
275 self._applyrequirements(requirements)
275 self._applyrequirements(requirements)
276 if create:
276 if create:
277 self._writerequirements()
277 self._writerequirements()
278
278
279
279
280 self._branchcaches = {}
280 self._branchcaches = {}
281 self.filterpats = {}
281 self.filterpats = {}
282 self._datafilters = {}
282 self._datafilters = {}
283 self._transref = self._lockref = self._wlockref = None
283 self._transref = self._lockref = self._wlockref = None
284
284
285 # A cache for various files under .hg/ that tracks file changes,
285 # A cache for various files under .hg/ that tracks file changes,
286 # (used by the filecache decorator)
286 # (used by the filecache decorator)
287 #
287 #
288 # Maps a property name to its util.filecacheentry
288 # Maps a property name to its util.filecacheentry
289 self._filecache = {}
289 self._filecache = {}
290
290
291 # hold sets of revision to be filtered
291 # hold sets of revision to be filtered
292 # should be cleared when something might have changed the filter value:
292 # should be cleared when something might have changed the filter value:
293 # - new changesets,
293 # - new changesets,
294 # - phase change,
294 # - phase change,
295 # - new obsolescence marker,
295 # - new obsolescence marker,
296 # - working directory parent change,
296 # - working directory parent change,
297 # - bookmark changes
297 # - bookmark changes
298 self.filteredrevcache = {}
298 self.filteredrevcache = {}
299
299
300 def close(self):
300 def close(self):
301 pass
301 pass
302
302
303 def _restrictcapabilities(self, caps):
303 def _restrictcapabilities(self, caps):
304 # bundle2 is not ready for prime time, drop it unless explicitly
304 # bundle2 is not ready for prime time, drop it unless explicitly
305 # required by the tests (or some brave tester)
305 # required by the tests (or some brave tester)
306 if self.ui.configbool('experimental', 'bundle2-exp', False):
306 if self.ui.configbool('experimental', 'bundle2-exp', False):
307 caps = set(caps)
307 caps = set(caps)
308 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
308 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
309 caps.add('bundle2-exp=' + urllib.quote(capsblob))
309 caps.add('bundle2-exp=' + urllib.quote(capsblob))
310 return caps
310 return caps
311
311
312 def _applyrequirements(self, requirements):
312 def _applyrequirements(self, requirements):
313 self.requirements = requirements
313 self.requirements = requirements
314 self.sopener.options = dict((r, 1) for r in requirements
314 self.sopener.options = dict((r, 1) for r in requirements
315 if r in self.openerreqs)
315 if r in self.openerreqs)
316 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
316 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
317 if chunkcachesize is not None:
317 if chunkcachesize is not None:
318 self.sopener.options['chunkcachesize'] = chunkcachesize
318 self.sopener.options['chunkcachesize'] = chunkcachesize
319
319
320 def _writerequirements(self):
320 def _writerequirements(self):
321 reqfile = self.opener("requires", "w")
321 reqfile = self.opener("requires", "w")
322 for r in sorted(self.requirements):
322 for r in sorted(self.requirements):
323 reqfile.write("%s\n" % r)
323 reqfile.write("%s\n" % r)
324 reqfile.close()
324 reqfile.close()
325
325
326 def _checknested(self, path):
326 def _checknested(self, path):
327 """Determine if path is a legal nested repository."""
327 """Determine if path is a legal nested repository."""
328 if not path.startswith(self.root):
328 if not path.startswith(self.root):
329 return False
329 return False
330 subpath = path[len(self.root) + 1:]
330 subpath = path[len(self.root) + 1:]
331 normsubpath = util.pconvert(subpath)
331 normsubpath = util.pconvert(subpath)
332
332
333 # XXX: Checking against the current working copy is wrong in
333 # XXX: Checking against the current working copy is wrong in
334 # the sense that it can reject things like
334 # the sense that it can reject things like
335 #
335 #
336 # $ hg cat -r 10 sub/x.txt
336 # $ hg cat -r 10 sub/x.txt
337 #
337 #
338 # if sub/ is no longer a subrepository in the working copy
338 # if sub/ is no longer a subrepository in the working copy
339 # parent revision.
339 # parent revision.
340 #
340 #
341 # However, it can of course also allow things that would have
341 # However, it can of course also allow things that would have
342 # been rejected before, such as the above cat command if sub/
342 # been rejected before, such as the above cat command if sub/
343 # is a subrepository now, but was a normal directory before.
343 # is a subrepository now, but was a normal directory before.
344 # The old path auditor would have rejected by mistake since it
344 # The old path auditor would have rejected by mistake since it
345 # panics when it sees sub/.hg/.
345 # panics when it sees sub/.hg/.
346 #
346 #
347 # All in all, checking against the working copy seems sensible
347 # All in all, checking against the working copy seems sensible
348 # since we want to prevent access to nested repositories on
348 # since we want to prevent access to nested repositories on
349 # the filesystem *now*.
349 # the filesystem *now*.
350 ctx = self[None]
350 ctx = self[None]
351 parts = util.splitpath(subpath)
351 parts = util.splitpath(subpath)
352 while parts:
352 while parts:
353 prefix = '/'.join(parts)
353 prefix = '/'.join(parts)
354 if prefix in ctx.substate:
354 if prefix in ctx.substate:
355 if prefix == normsubpath:
355 if prefix == normsubpath:
356 return True
356 return True
357 else:
357 else:
358 sub = ctx.sub(prefix)
358 sub = ctx.sub(prefix)
359 return sub.checknested(subpath[len(prefix) + 1:])
359 return sub.checknested(subpath[len(prefix) + 1:])
360 else:
360 else:
361 parts.pop()
361 parts.pop()
362 return False
362 return False
363
363
364 def peer(self):
364 def peer(self):
365 return localpeer(self) # not cached to avoid reference cycle
365 return localpeer(self) # not cached to avoid reference cycle
366
366
367 def unfiltered(self):
367 def unfiltered(self):
368 """Return unfiltered version of the repository
368 """Return unfiltered version of the repository
369
369
370 Intended to be overwritten by filtered repo."""
370 Intended to be overwritten by filtered repo."""
371 return self
371 return self
372
372
373 def filtered(self, name):
373 def filtered(self, name):
374 """Return a filtered version of a repository"""
374 """Return a filtered version of a repository"""
375 # build a new class with the mixin and the current class
375 # build a new class with the mixin and the current class
376 # (possibly subclass of the repo)
376 # (possibly subclass of the repo)
377 class proxycls(repoview.repoview, self.unfiltered().__class__):
377 class proxycls(repoview.repoview, self.unfiltered().__class__):
378 pass
378 pass
379 return proxycls(self, name)
379 return proxycls(self, name)
380
380
381 @repofilecache('bookmarks')
381 @repofilecache('bookmarks')
382 def _bookmarks(self):
382 def _bookmarks(self):
383 return bookmarks.bmstore(self)
383 return bookmarks.bmstore(self)
384
384
385 @repofilecache('bookmarks.current')
385 @repofilecache('bookmarks.current')
386 def _bookmarkcurrent(self):
386 def _bookmarkcurrent(self):
387 return bookmarks.readcurrent(self)
387 return bookmarks.readcurrent(self)
388
388
389 def bookmarkheads(self, bookmark):
389 def bookmarkheads(self, bookmark):
390 name = bookmark.split('@', 1)[0]
390 name = bookmark.split('@', 1)[0]
391 heads = []
391 heads = []
392 for mark, n in self._bookmarks.iteritems():
392 for mark, n in self._bookmarks.iteritems():
393 if mark.split('@', 1)[0] == name:
393 if mark.split('@', 1)[0] == name:
394 heads.append(n)
394 heads.append(n)
395 return heads
395 return heads
396
396
397 @storecache('phaseroots')
397 @storecache('phaseroots')
398 def _phasecache(self):
398 def _phasecache(self):
399 return phases.phasecache(self, self._phasedefaults)
399 return phases.phasecache(self, self._phasedefaults)
400
400
401 @storecache('obsstore')
401 @storecache('obsstore')
402 def obsstore(self):
402 def obsstore(self):
403 # read default format for new obsstore.
403 # read default format for new obsstore.
404 defaultformat = self.ui.configint('format', 'obsstore-version', None)
404 defaultformat = self.ui.configint('format', 'obsstore-version', None)
405 # rely on obsstore class default when possible.
405 # rely on obsstore class default when possible.
406 kwargs = {}
406 kwargs = {}
407 if defaultformat is not None:
407 if defaultformat is not None:
408 kwargs['defaultformat'] = defaultformat
408 kwargs['defaultformat'] = defaultformat
409 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
409 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
410 store = obsolete.obsstore(self.sopener, readonly=readonly,
410 store = obsolete.obsstore(self.sopener, readonly=readonly,
411 **kwargs)
411 **kwargs)
412 if store and readonly:
412 if store and readonly:
413 # message is rare enough to not be translated
413 # message is rare enough to not be translated
414 msg = 'obsolete feature not enabled but %i markers found!\n'
414 msg = 'obsolete feature not enabled but %i markers found!\n'
415 self.ui.warn(msg % len(list(store)))
415 self.ui.warn(msg % len(list(store)))
416 return store
416 return store
417
417
418 @storecache('00changelog.i')
418 @storecache('00changelog.i')
419 def changelog(self):
419 def changelog(self):
420 c = changelog.changelog(self.sopener)
420 c = changelog.changelog(self.sopener)
421 if 'HG_PENDING' in os.environ:
421 if 'HG_PENDING' in os.environ:
422 p = os.environ['HG_PENDING']
422 p = os.environ['HG_PENDING']
423 if p.startswith(self.root):
423 if p.startswith(self.root):
424 c.readpending('00changelog.i.a')
424 c.readpending('00changelog.i.a')
425 return c
425 return c
426
426
427 @storecache('00manifest.i')
427 @storecache('00manifest.i')
428 def manifest(self):
428 def manifest(self):
429 return manifest.manifest(self.sopener)
429 return manifest.manifest(self.sopener)
430
430
431 @repofilecache('dirstate')
431 @repofilecache('dirstate')
432 def dirstate(self):
432 def dirstate(self):
433 warned = [0]
433 warned = [0]
434 def validate(node):
434 def validate(node):
435 try:
435 try:
436 self.changelog.rev(node)
436 self.changelog.rev(node)
437 return node
437 return node
438 except error.LookupError:
438 except error.LookupError:
439 if not warned[0]:
439 if not warned[0]:
440 warned[0] = True
440 warned[0] = True
441 self.ui.warn(_("warning: ignoring unknown"
441 self.ui.warn(_("warning: ignoring unknown"
442 " working parent %s!\n") % short(node))
442 " working parent %s!\n") % short(node))
443 return nullid
443 return nullid
444
444
445 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
445 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
446
446
447 def __getitem__(self, changeid):
447 def __getitem__(self, changeid):
448 if changeid is None:
448 if changeid is None:
449 return context.workingctx(self)
449 return context.workingctx(self)
450 return context.changectx(self, changeid)
450 return context.changectx(self, changeid)
451
451
452 def __contains__(self, changeid):
452 def __contains__(self, changeid):
453 try:
453 try:
454 return bool(self.lookup(changeid))
454 return bool(self.lookup(changeid))
455 except error.RepoLookupError:
455 except error.RepoLookupError:
456 return False
456 return False
457
457
458 def __nonzero__(self):
458 def __nonzero__(self):
459 return True
459 return True
460
460
461 def __len__(self):
461 def __len__(self):
462 return len(self.changelog)
462 return len(self.changelog)
463
463
464 def __iter__(self):
464 def __iter__(self):
465 return iter(self.changelog)
465 return iter(self.changelog)
466
466
467 def revs(self, expr, *args):
467 def revs(self, expr, *args):
468 '''Return a list of revisions matching the given revset'''
468 '''Return a list of revisions matching the given revset'''
469 expr = revset.formatspec(expr, *args)
469 expr = revset.formatspec(expr, *args)
470 m = revset.match(None, expr)
470 m = revset.match(None, expr)
471 return m(self, revset.spanset(self))
471 return m(self, revset.spanset(self))
472
472
473 def set(self, expr, *args):
473 def set(self, expr, *args):
474 '''
474 '''
475 Yield a context for each matching revision, after doing arg
475 Yield a context for each matching revision, after doing arg
476 replacement via revset.formatspec
476 replacement via revset.formatspec
477 '''
477 '''
478 for r in self.revs(expr, *args):
478 for r in self.revs(expr, *args):
479 yield self[r]
479 yield self[r]
480
480
481 def url(self):
481 def url(self):
482 return 'file:' + self.root
482 return 'file:' + self.root
483
483
484 def hook(self, name, throw=False, **args):
484 def hook(self, name, throw=False, **args):
485 """Call a hook, passing this repo instance.
485 """Call a hook, passing this repo instance.
486
486
487 This a convenience method to aid invoking hooks. Extensions likely
487 This a convenience method to aid invoking hooks. Extensions likely
488 won't call this unless they have registered a custom hook or are
488 won't call this unless they have registered a custom hook or are
489 replacing code that is expected to call a hook.
489 replacing code that is expected to call a hook.
490 """
490 """
491 return hook.hook(self.ui, self, name, throw, **args)
491 return hook.hook(self.ui, self, name, throw, **args)
492
492
493 @unfilteredmethod
493 @unfilteredmethod
494 def _tag(self, names, node, message, local, user, date, extra={},
494 def _tag(self, names, node, message, local, user, date, extra={},
495 editor=False):
495 editor=False):
496 if isinstance(names, str):
496 if isinstance(names, str):
497 names = (names,)
497 names = (names,)
498
498
499 branches = self.branchmap()
499 branches = self.branchmap()
500 for name in names:
500 for name in names:
501 self.hook('pretag', throw=True, node=hex(node), tag=name,
501 self.hook('pretag', throw=True, node=hex(node), tag=name,
502 local=local)
502 local=local)
503 if name in branches:
503 if name in branches:
504 self.ui.warn(_("warning: tag %s conflicts with existing"
504 self.ui.warn(_("warning: tag %s conflicts with existing"
505 " branch name\n") % name)
505 " branch name\n") % name)
506
506
507 def writetags(fp, names, munge, prevtags):
507 def writetags(fp, names, munge, prevtags):
508 fp.seek(0, 2)
508 fp.seek(0, 2)
509 if prevtags and prevtags[-1] != '\n':
509 if prevtags and prevtags[-1] != '\n':
510 fp.write('\n')
510 fp.write('\n')
511 for name in names:
511 for name in names:
512 m = munge and munge(name) or name
512 m = munge and munge(name) or name
513 if (self._tagscache.tagtypes and
513 if (self._tagscache.tagtypes and
514 name in self._tagscache.tagtypes):
514 name in self._tagscache.tagtypes):
515 old = self.tags().get(name, nullid)
515 old = self.tags().get(name, nullid)
516 fp.write('%s %s\n' % (hex(old), m))
516 fp.write('%s %s\n' % (hex(old), m))
517 fp.write('%s %s\n' % (hex(node), m))
517 fp.write('%s %s\n' % (hex(node), m))
518 fp.close()
518 fp.close()
519
519
520 prevtags = ''
520 prevtags = ''
521 if local:
521 if local:
522 try:
522 try:
523 fp = self.opener('localtags', 'r+')
523 fp = self.opener('localtags', 'r+')
524 except IOError:
524 except IOError:
525 fp = self.opener('localtags', 'a')
525 fp = self.opener('localtags', 'a')
526 else:
526 else:
527 prevtags = fp.read()
527 prevtags = fp.read()
528
528
529 # local tags are stored in the current charset
529 # local tags are stored in the current charset
530 writetags(fp, names, None, prevtags)
530 writetags(fp, names, None, prevtags)
531 for name in names:
531 for name in names:
532 self.hook('tag', node=hex(node), tag=name, local=local)
532 self.hook('tag', node=hex(node), tag=name, local=local)
533 return
533 return
534
534
535 try:
535 try:
536 fp = self.wfile('.hgtags', 'rb+')
536 fp = self.wfile('.hgtags', 'rb+')
537 except IOError, e:
537 except IOError, e:
538 if e.errno != errno.ENOENT:
538 if e.errno != errno.ENOENT:
539 raise
539 raise
540 fp = self.wfile('.hgtags', 'ab')
540 fp = self.wfile('.hgtags', 'ab')
541 else:
541 else:
542 prevtags = fp.read()
542 prevtags = fp.read()
543
543
544 # committed tags are stored in UTF-8
544 # committed tags are stored in UTF-8
545 writetags(fp, names, encoding.fromlocal, prevtags)
545 writetags(fp, names, encoding.fromlocal, prevtags)
546
546
547 fp.close()
547 fp.close()
548
548
549 self.invalidatecaches()
549 self.invalidatecaches()
550
550
551 if '.hgtags' not in self.dirstate:
551 if '.hgtags' not in self.dirstate:
552 self[None].add(['.hgtags'])
552 self[None].add(['.hgtags'])
553
553
554 m = matchmod.exact(self.root, '', ['.hgtags'])
554 m = matchmod.exact(self.root, '', ['.hgtags'])
555 tagnode = self.commit(message, user, date, extra=extra, match=m,
555 tagnode = self.commit(message, user, date, extra=extra, match=m,
556 editor=editor)
556 editor=editor)
557
557
558 for name in names:
558 for name in names:
559 self.hook('tag', node=hex(node), tag=name, local=local)
559 self.hook('tag', node=hex(node), tag=name, local=local)
560
560
561 return tagnode
561 return tagnode
562
562
563 def tag(self, names, node, message, local, user, date, editor=False):
563 def tag(self, names, node, message, local, user, date, editor=False):
564 '''tag a revision with one or more symbolic names.
564 '''tag a revision with one or more symbolic names.
565
565
566 names is a list of strings or, when adding a single tag, names may be a
566 names is a list of strings or, when adding a single tag, names may be a
567 string.
567 string.
568
568
569 if local is True, the tags are stored in a per-repository file.
569 if local is True, the tags are stored in a per-repository file.
570 otherwise, they are stored in the .hgtags file, and a new
570 otherwise, they are stored in the .hgtags file, and a new
571 changeset is committed with the change.
571 changeset is committed with the change.
572
572
573 keyword arguments:
573 keyword arguments:
574
574
575 local: whether to store tags in non-version-controlled file
575 local: whether to store tags in non-version-controlled file
576 (default False)
576 (default False)
577
577
578 message: commit message to use if committing
578 message: commit message to use if committing
579
579
580 user: name of user to use if committing
580 user: name of user to use if committing
581
581
582 date: date tuple to use if committing'''
582 date: date tuple to use if committing'''
583
583
584 if not local:
584 if not local:
585 m = matchmod.exact(self.root, '', ['.hgtags'])
585 m = matchmod.exact(self.root, '', ['.hgtags'])
586 if util.any(self.status(match=m, unknown=True, ignored=True)):
586 if util.any(self.status(match=m, unknown=True, ignored=True)):
587 raise util.Abort(_('working copy of .hgtags is changed'),
587 raise util.Abort(_('working copy of .hgtags is changed'),
588 hint=_('please commit .hgtags manually'))
588 hint=_('please commit .hgtags manually'))
589
589
590 self.tags() # instantiate the cache
590 self.tags() # instantiate the cache
591 self._tag(names, node, message, local, user, date, editor=editor)
591 self._tag(names, node, message, local, user, date, editor=editor)
592
592
593 @filteredpropertycache
593 @filteredpropertycache
594 def _tagscache(self):
594 def _tagscache(self):
595 '''Returns a tagscache object that contains various tags related
595 '''Returns a tagscache object that contains various tags related
596 caches.'''
596 caches.'''
597
597
598 # This simplifies its cache management by having one decorated
598 # This simplifies its cache management by having one decorated
599 # function (this one) and the rest simply fetch things from it.
599 # function (this one) and the rest simply fetch things from it.
600 class tagscache(object):
600 class tagscache(object):
601 def __init__(self):
601 def __init__(self):
602 # These two define the set of tags for this repository. tags
602 # These two define the set of tags for this repository. tags
603 # maps tag name to node; tagtypes maps tag name to 'global' or
603 # maps tag name to node; tagtypes maps tag name to 'global' or
604 # 'local'. (Global tags are defined by .hgtags across all
604 # 'local'. (Global tags are defined by .hgtags across all
605 # heads, and local tags are defined in .hg/localtags.)
605 # heads, and local tags are defined in .hg/localtags.)
606 # They constitute the in-memory cache of tags.
606 # They constitute the in-memory cache of tags.
607 self.tags = self.tagtypes = None
607 self.tags = self.tagtypes = None
608
608
609 self.nodetagscache = self.tagslist = None
609 self.nodetagscache = self.tagslist = None
610
610
611 cache = tagscache()
611 cache = tagscache()
612 cache.tags, cache.tagtypes = self._findtags()
612 cache.tags, cache.tagtypes = self._findtags()
613
613
614 return cache
614 return cache
615
615
616 def tags(self):
616 def tags(self):
617 '''return a mapping of tag to node'''
617 '''return a mapping of tag to node'''
618 t = {}
618 t = {}
619 if self.changelog.filteredrevs:
619 if self.changelog.filteredrevs:
620 tags, tt = self._findtags()
620 tags, tt = self._findtags()
621 else:
621 else:
622 tags = self._tagscache.tags
622 tags = self._tagscache.tags
623 for k, v in tags.iteritems():
623 for k, v in tags.iteritems():
624 try:
624 try:
625 # ignore tags to unknown nodes
625 # ignore tags to unknown nodes
626 self.changelog.rev(v)
626 self.changelog.rev(v)
627 t[k] = v
627 t[k] = v
628 except (error.LookupError, ValueError):
628 except (error.LookupError, ValueError):
629 pass
629 pass
630 return t
630 return t
631
631
632 def _findtags(self):
632 def _findtags(self):
633 '''Do the hard work of finding tags. Return a pair of dicts
633 '''Do the hard work of finding tags. Return a pair of dicts
634 (tags, tagtypes) where tags maps tag name to node, and tagtypes
634 (tags, tagtypes) where tags maps tag name to node, and tagtypes
635 maps tag name to a string like \'global\' or \'local\'.
635 maps tag name to a string like \'global\' or \'local\'.
636 Subclasses or extensions are free to add their own tags, but
636 Subclasses or extensions are free to add their own tags, but
637 should be aware that the returned dicts will be retained for the
637 should be aware that the returned dicts will be retained for the
638 duration of the localrepo object.'''
638 duration of the localrepo object.'''
639
639
640 # XXX what tagtype should subclasses/extensions use? Currently
640 # XXX what tagtype should subclasses/extensions use? Currently
641 # mq and bookmarks add tags, but do not set the tagtype at all.
641 # mq and bookmarks add tags, but do not set the tagtype at all.
642 # Should each extension invent its own tag type? Should there
642 # Should each extension invent its own tag type? Should there
643 # be one tagtype for all such "virtual" tags? Or is the status
643 # be one tagtype for all such "virtual" tags? Or is the status
644 # quo fine?
644 # quo fine?
645
645
646 alltags = {} # map tag name to (node, hist)
646 alltags = {} # map tag name to (node, hist)
647 tagtypes = {}
647 tagtypes = {}
648
648
649 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
649 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
650 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
650 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
651
651
652 # Build the return dicts. Have to re-encode tag names because
652 # Build the return dicts. Have to re-encode tag names because
653 # the tags module always uses UTF-8 (in order not to lose info
653 # the tags module always uses UTF-8 (in order not to lose info
654 # writing to the cache), but the rest of Mercurial wants them in
654 # writing to the cache), but the rest of Mercurial wants them in
655 # local encoding.
655 # local encoding.
656 tags = {}
656 tags = {}
657 for (name, (node, hist)) in alltags.iteritems():
657 for (name, (node, hist)) in alltags.iteritems():
658 if node != nullid:
658 if node != nullid:
659 tags[encoding.tolocal(name)] = node
659 tags[encoding.tolocal(name)] = node
660 tags['tip'] = self.changelog.tip()
660 tags['tip'] = self.changelog.tip()
661 tagtypes = dict([(encoding.tolocal(name), value)
661 tagtypes = dict([(encoding.tolocal(name), value)
662 for (name, value) in tagtypes.iteritems()])
662 for (name, value) in tagtypes.iteritems()])
663 return (tags, tagtypes)
663 return (tags, tagtypes)
664
664
665 def tagtype(self, tagname):
665 def tagtype(self, tagname):
666 '''
666 '''
667 return the type of the given tag. result can be:
667 return the type of the given tag. result can be:
668
668
669 'local' : a local tag
669 'local' : a local tag
670 'global' : a global tag
670 'global' : a global tag
671 None : tag does not exist
671 None : tag does not exist
672 '''
672 '''
673
673
674 return self._tagscache.tagtypes.get(tagname)
674 return self._tagscache.tagtypes.get(tagname)
675
675
676 def tagslist(self):
676 def tagslist(self):
677 '''return a list of tags ordered by revision'''
677 '''return a list of tags ordered by revision'''
678 if not self._tagscache.tagslist:
678 if not self._tagscache.tagslist:
679 l = []
679 l = []
680 for t, n in self.tags().iteritems():
680 for t, n in self.tags().iteritems():
681 l.append((self.changelog.rev(n), t, n))
681 l.append((self.changelog.rev(n), t, n))
682 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
682 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
683
683
684 return self._tagscache.tagslist
684 return self._tagscache.tagslist
685
685
686 def nodetags(self, node):
686 def nodetags(self, node):
687 '''return the tags associated with a node'''
687 '''return the tags associated with a node'''
688 if not self._tagscache.nodetagscache:
688 if not self._tagscache.nodetagscache:
689 nodetagscache = {}
689 nodetagscache = {}
690 for t, n in self._tagscache.tags.iteritems():
690 for t, n in self._tagscache.tags.iteritems():
691 nodetagscache.setdefault(n, []).append(t)
691 nodetagscache.setdefault(n, []).append(t)
692 for tags in nodetagscache.itervalues():
692 for tags in nodetagscache.itervalues():
693 tags.sort()
693 tags.sort()
694 self._tagscache.nodetagscache = nodetagscache
694 self._tagscache.nodetagscache = nodetagscache
695 return self._tagscache.nodetagscache.get(node, [])
695 return self._tagscache.nodetagscache.get(node, [])
696
696
697 def nodebookmarks(self, node):
697 def nodebookmarks(self, node):
698 marks = []
698 marks = []
699 for bookmark, n in self._bookmarks.iteritems():
699 for bookmark, n in self._bookmarks.iteritems():
700 if n == node:
700 if n == node:
701 marks.append(bookmark)
701 marks.append(bookmark)
702 return sorted(marks)
702 return sorted(marks)
703
703
704 def branchmap(self):
704 def branchmap(self):
705 '''returns a dictionary {branch: [branchheads]} with branchheads
705 '''returns a dictionary {branch: [branchheads]} with branchheads
706 ordered by increasing revision number'''
706 ordered by increasing revision number'''
707 branchmap.updatecache(self)
707 branchmap.updatecache(self)
708 return self._branchcaches[self.filtername]
708 return self._branchcaches[self.filtername]
709
709
710 def branchtip(self, branch):
710 def branchtip(self, branch):
711 '''return the tip node for a given branch'''
711 '''return the tip node for a given branch'''
712 try:
712 try:
713 return self.branchmap().branchtip(branch)
713 return self.branchmap().branchtip(branch)
714 except KeyError:
714 except KeyError:
715 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
715 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
716
716
717 def lookup(self, key):
717 def lookup(self, key):
718 return self[key].node()
718 return self[key].node()
719
719
720 def lookupbranch(self, key, remote=None):
720 def lookupbranch(self, key, remote=None):
721 repo = remote or self
721 repo = remote or self
722 if key in repo.branchmap():
722 if key in repo.branchmap():
723 return key
723 return key
724
724
725 repo = (remote and remote.local()) and remote or self
725 repo = (remote and remote.local()) and remote or self
726 return repo[key].branch()
726 return repo[key].branch()
727
727
728 def known(self, nodes):
728 def known(self, nodes):
729 nm = self.changelog.nodemap
729 nm = self.changelog.nodemap
730 pc = self._phasecache
730 pc = self._phasecache
731 result = []
731 result = []
732 for n in nodes:
732 for n in nodes:
733 r = nm.get(n)
733 r = nm.get(n)
734 resp = not (r is None or pc.phase(self, r) >= phases.secret)
734 resp = not (r is None or pc.phase(self, r) >= phases.secret)
735 result.append(resp)
735 result.append(resp)
736 return result
736 return result
737
737
738 def local(self):
738 def local(self):
739 return self
739 return self
740
740
741 def cancopy(self):
741 def cancopy(self):
742 # so statichttprepo's override of local() works
742 # so statichttprepo's override of local() works
743 if not self.local():
743 if not self.local():
744 return False
744 return False
745 if not self.ui.configbool('phases', 'publish', True):
745 if not self.ui.configbool('phases', 'publish', True):
746 return True
746 return True
747 # if publishing we can't copy if there is filtered content
747 # if publishing we can't copy if there is filtered content
748 return not self.filtered('visible').changelog.filteredrevs
748 return not self.filtered('visible').changelog.filteredrevs
749
749
750 def join(self, f, *insidef):
750 def join(self, f, *insidef):
751 return os.path.join(self.path, f, *insidef)
751 return os.path.join(self.path, f, *insidef)
752
752
753 def wjoin(self, f, *insidef):
753 def wjoin(self, f, *insidef):
754 return os.path.join(self.root, f, *insidef)
754 return os.path.join(self.root, f, *insidef)
755
755
756 def file(self, f):
756 def file(self, f):
757 if f[0] == '/':
757 if f[0] == '/':
758 f = f[1:]
758 f = f[1:]
759 return filelog.filelog(self.sopener, f)
759 return filelog.filelog(self.sopener, f)
760
760
761 def changectx(self, changeid):
761 def changectx(self, changeid):
762 return self[changeid]
762 return self[changeid]
763
763
764 def parents(self, changeid=None):
764 def parents(self, changeid=None):
765 '''get list of changectxs for parents of changeid'''
765 '''get list of changectxs for parents of changeid'''
766 return self[changeid].parents()
766 return self[changeid].parents()
767
767
768 def setparents(self, p1, p2=nullid):
768 def setparents(self, p1, p2=nullid):
769 self.dirstate.beginparentchange()
769 self.dirstate.beginparentchange()
770 copies = self.dirstate.setparents(p1, p2)
770 copies = self.dirstate.setparents(p1, p2)
771 pctx = self[p1]
771 pctx = self[p1]
772 if copies:
772 if copies:
773 # Adjust copy records, the dirstate cannot do it, it
773 # Adjust copy records, the dirstate cannot do it, it
774 # requires access to parents manifests. Preserve them
774 # requires access to parents manifests. Preserve them
775 # only for entries added to first parent.
775 # only for entries added to first parent.
776 for f in copies:
776 for f in copies:
777 if f not in pctx and copies[f] in pctx:
777 if f not in pctx and copies[f] in pctx:
778 self.dirstate.copy(copies[f], f)
778 self.dirstate.copy(copies[f], f)
779 if p2 == nullid:
779 if p2 == nullid:
780 for f, s in sorted(self.dirstate.copies().items()):
780 for f, s in sorted(self.dirstate.copies().items()):
781 if f not in pctx and s not in pctx:
781 if f not in pctx and s not in pctx:
782 self.dirstate.copy(None, f)
782 self.dirstate.copy(None, f)
783 self.dirstate.endparentchange()
783 self.dirstate.endparentchange()
784
784
785 def filectx(self, path, changeid=None, fileid=None):
785 def filectx(self, path, changeid=None, fileid=None):
786 """changeid can be a changeset revision, node, or tag.
786 """changeid can be a changeset revision, node, or tag.
787 fileid can be a file revision or node."""
787 fileid can be a file revision or node."""
788 return context.filectx(self, path, changeid, fileid)
788 return context.filectx(self, path, changeid, fileid)
789
789
790 def getcwd(self):
790 def getcwd(self):
791 return self.dirstate.getcwd()
791 return self.dirstate.getcwd()
792
792
793 def pathto(self, f, cwd=None):
793 def pathto(self, f, cwd=None):
794 return self.dirstate.pathto(f, cwd)
794 return self.dirstate.pathto(f, cwd)
795
795
796 def wfile(self, f, mode='r'):
796 def wfile(self, f, mode='r'):
797 return self.wopener(f, mode)
797 return self.wopener(f, mode)
798
798
799 def _link(self, f):
799 def _link(self, f):
800 return self.wvfs.islink(f)
800 return self.wvfs.islink(f)
801
801
802 def _loadfilter(self, filter):
802 def _loadfilter(self, filter):
803 if filter not in self.filterpats:
803 if filter not in self.filterpats:
804 l = []
804 l = []
805 for pat, cmd in self.ui.configitems(filter):
805 for pat, cmd in self.ui.configitems(filter):
806 if cmd == '!':
806 if cmd == '!':
807 continue
807 continue
808 mf = matchmod.match(self.root, '', [pat])
808 mf = matchmod.match(self.root, '', [pat])
809 fn = None
809 fn = None
810 params = cmd
810 params = cmd
811 for name, filterfn in self._datafilters.iteritems():
811 for name, filterfn in self._datafilters.iteritems():
812 if cmd.startswith(name):
812 if cmd.startswith(name):
813 fn = filterfn
813 fn = filterfn
814 params = cmd[len(name):].lstrip()
814 params = cmd[len(name):].lstrip()
815 break
815 break
816 if not fn:
816 if not fn:
817 fn = lambda s, c, **kwargs: util.filter(s, c)
817 fn = lambda s, c, **kwargs: util.filter(s, c)
818 # Wrap old filters not supporting keyword arguments
818 # Wrap old filters not supporting keyword arguments
819 if not inspect.getargspec(fn)[2]:
819 if not inspect.getargspec(fn)[2]:
820 oldfn = fn
820 oldfn = fn
821 fn = lambda s, c, **kwargs: oldfn(s, c)
821 fn = lambda s, c, **kwargs: oldfn(s, c)
822 l.append((mf, fn, params))
822 l.append((mf, fn, params))
823 self.filterpats[filter] = l
823 self.filterpats[filter] = l
824 return self.filterpats[filter]
824 return self.filterpats[filter]
825
825
826 def _filter(self, filterpats, filename, data):
826 def _filter(self, filterpats, filename, data):
827 for mf, fn, cmd in filterpats:
827 for mf, fn, cmd in filterpats:
828 if mf(filename):
828 if mf(filename):
829 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
829 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
830 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
830 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
831 break
831 break
832
832
833 return data
833 return data
834
834
835 @unfilteredpropertycache
835 @unfilteredpropertycache
836 def _encodefilterpats(self):
836 def _encodefilterpats(self):
837 return self._loadfilter('encode')
837 return self._loadfilter('encode')
838
838
839 @unfilteredpropertycache
839 @unfilteredpropertycache
840 def _decodefilterpats(self):
840 def _decodefilterpats(self):
841 return self._loadfilter('decode')
841 return self._loadfilter('decode')
842
842
843 def adddatafilter(self, name, filter):
843 def adddatafilter(self, name, filter):
844 self._datafilters[name] = filter
844 self._datafilters[name] = filter
845
845
846 def wread(self, filename):
846 def wread(self, filename):
847 if self._link(filename):
847 if self._link(filename):
848 data = self.wvfs.readlink(filename)
848 data = self.wvfs.readlink(filename)
849 else:
849 else:
850 data = self.wopener.read(filename)
850 data = self.wopener.read(filename)
851 return self._filter(self._encodefilterpats, filename, data)
851 return self._filter(self._encodefilterpats, filename, data)
852
852
853 def wwrite(self, filename, data, flags):
853 def wwrite(self, filename, data, flags):
854 data = self._filter(self._decodefilterpats, filename, data)
854 data = self._filter(self._decodefilterpats, filename, data)
855 if 'l' in flags:
855 if 'l' in flags:
856 self.wopener.symlink(data, filename)
856 self.wopener.symlink(data, filename)
857 else:
857 else:
858 self.wopener.write(filename, data)
858 self.wopener.write(filename, data)
859 if 'x' in flags:
859 if 'x' in flags:
860 self.wvfs.setflags(filename, False, True)
860 self.wvfs.setflags(filename, False, True)
861
861
862 def wwritedata(self, filename, data):
862 def wwritedata(self, filename, data):
863 return self._filter(self._decodefilterpats, filename, data)
863 return self._filter(self._decodefilterpats, filename, data)
864
864
865 def transaction(self, desc, report=None):
865 def transaction(self, desc, report=None):
866 tr = self._transref and self._transref() or None
866 tr = self._transref and self._transref() or None
867 if tr and tr.running():
867 if tr and tr.running():
868 return tr.nest()
868 return tr.nest()
869
869
870 # abort here if the journal already exists
870 # abort here if the journal already exists
871 if self.svfs.exists("journal"):
871 if self.svfs.exists("journal"):
872 raise error.RepoError(
872 raise error.RepoError(
873 _("abandoned transaction found"),
873 _("abandoned transaction found"),
874 hint=_("run 'hg recover' to clean up transaction"))
874 hint=_("run 'hg recover' to clean up transaction"))
875
875
876 def onclose():
876 def onclose():
877 self.store.write(self._transref())
877 self.store.write(self._transref())
878
878
879 self._writejournal(desc)
879 self._writejournal(desc)
880 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
880 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
881 rp = report and report or self.ui.warn
881 rp = report and report or self.ui.warn
882 tr = transaction.transaction(rp, self.sopener,
882 tr = transaction.transaction(rp, self.sopener,
883 "journal",
883 "journal",
884 aftertrans(renames),
884 aftertrans(renames),
885 self.store.createmode,
885 self.store.createmode,
886 onclose)
886 onclose)
887 self._transref = weakref.ref(tr)
887 self._transref = weakref.ref(tr)
888 return tr
888 return tr
889
889
890 def _journalfiles(self):
890 def _journalfiles(self):
891 return ((self.svfs, 'journal'),
891 return ((self.svfs, 'journal'),
892 (self.vfs, 'journal.dirstate'),
892 (self.vfs, 'journal.dirstate'),
893 (self.vfs, 'journal.branch'),
893 (self.vfs, 'journal.branch'),
894 (self.vfs, 'journal.desc'),
894 (self.vfs, 'journal.desc'),
895 (self.vfs, 'journal.bookmarks'),
895 (self.vfs, 'journal.bookmarks'),
896 (self.svfs, 'journal.phaseroots'))
896 (self.svfs, 'journal.phaseroots'))
897
897
898 def undofiles(self):
898 def undofiles(self):
899 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
899 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
900
900
901 def _writejournal(self, desc):
901 def _writejournal(self, desc):
902 self.opener.write("journal.dirstate",
902 self.opener.write("journal.dirstate",
903 self.opener.tryread("dirstate"))
903 self.opener.tryread("dirstate"))
904 self.opener.write("journal.branch",
904 self.opener.write("journal.branch",
905 encoding.fromlocal(self.dirstate.branch()))
905 encoding.fromlocal(self.dirstate.branch()))
906 self.opener.write("journal.desc",
906 self.opener.write("journal.desc",
907 "%d\n%s\n" % (len(self), desc))
907 "%d\n%s\n" % (len(self), desc))
908 self.opener.write("journal.bookmarks",
908 self.opener.write("journal.bookmarks",
909 self.opener.tryread("bookmarks"))
909 self.opener.tryread("bookmarks"))
910 self.sopener.write("journal.phaseroots",
910 self.sopener.write("journal.phaseroots",
911 self.sopener.tryread("phaseroots"))
911 self.sopener.tryread("phaseroots"))
912
912
913 def recover(self):
913 def recover(self):
914 lock = self.lock()
914 lock = self.lock()
915 try:
915 try:
916 if self.svfs.exists("journal"):
916 if self.svfs.exists("journal"):
917 self.ui.status(_("rolling back interrupted transaction\n"))
917 self.ui.status(_("rolling back interrupted transaction\n"))
918 transaction.rollback(self.sopener, "journal",
918 transaction.rollback(self.sopener, "journal",
919 self.ui.warn)
919 self.ui.warn)
920 self.invalidate()
920 self.invalidate()
921 return True
921 return True
922 else:
922 else:
923 self.ui.warn(_("no interrupted transaction available\n"))
923 self.ui.warn(_("no interrupted transaction available\n"))
924 return False
924 return False
925 finally:
925 finally:
926 lock.release()
926 lock.release()
927
927
928 def rollback(self, dryrun=False, force=False):
928 def rollback(self, dryrun=False, force=False):
929 wlock = lock = None
929 wlock = lock = None
930 try:
930 try:
931 wlock = self.wlock()
931 wlock = self.wlock()
932 lock = self.lock()
932 lock = self.lock()
933 if self.svfs.exists("undo"):
933 if self.svfs.exists("undo"):
934 return self._rollback(dryrun, force)
934 return self._rollback(dryrun, force)
935 else:
935 else:
936 self.ui.warn(_("no rollback information available\n"))
936 self.ui.warn(_("no rollback information available\n"))
937 return 1
937 return 1
938 finally:
938 finally:
939 release(lock, wlock)
939 release(lock, wlock)
940
940
941 @unfilteredmethod # Until we get smarter cache management
941 @unfilteredmethod # Until we get smarter cache management
942 def _rollback(self, dryrun, force):
942 def _rollback(self, dryrun, force):
943 ui = self.ui
943 ui = self.ui
944 try:
944 try:
945 args = self.opener.read('undo.desc').splitlines()
945 args = self.opener.read('undo.desc').splitlines()
946 (oldlen, desc, detail) = (int(args[0]), args[1], None)
946 (oldlen, desc, detail) = (int(args[0]), args[1], None)
947 if len(args) >= 3:
947 if len(args) >= 3:
948 detail = args[2]
948 detail = args[2]
949 oldtip = oldlen - 1
949 oldtip = oldlen - 1
950
950
951 if detail and ui.verbose:
951 if detail and ui.verbose:
952 msg = (_('repository tip rolled back to revision %s'
952 msg = (_('repository tip rolled back to revision %s'
953 ' (undo %s: %s)\n')
953 ' (undo %s: %s)\n')
954 % (oldtip, desc, detail))
954 % (oldtip, desc, detail))
955 else:
955 else:
956 msg = (_('repository tip rolled back to revision %s'
956 msg = (_('repository tip rolled back to revision %s'
957 ' (undo %s)\n')
957 ' (undo %s)\n')
958 % (oldtip, desc))
958 % (oldtip, desc))
959 except IOError:
959 except IOError:
960 msg = _('rolling back unknown transaction\n')
960 msg = _('rolling back unknown transaction\n')
961 desc = None
961 desc = None
962
962
963 if not force and self['.'] != self['tip'] and desc == 'commit':
963 if not force and self['.'] != self['tip'] and desc == 'commit':
964 raise util.Abort(
964 raise util.Abort(
965 _('rollback of last commit while not checked out '
965 _('rollback of last commit while not checked out '
966 'may lose data'), hint=_('use -f to force'))
966 'may lose data'), hint=_('use -f to force'))
967
967
968 ui.status(msg)
968 ui.status(msg)
969 if dryrun:
969 if dryrun:
970 return 0
970 return 0
971
971
972 parents = self.dirstate.parents()
972 parents = self.dirstate.parents()
973 self.destroying()
973 self.destroying()
974 transaction.rollback(self.sopener, 'undo', ui.warn)
974 transaction.rollback(self.sopener, 'undo', ui.warn)
975 if self.vfs.exists('undo.bookmarks'):
975 if self.vfs.exists('undo.bookmarks'):
976 self.vfs.rename('undo.bookmarks', 'bookmarks')
976 self.vfs.rename('undo.bookmarks', 'bookmarks')
977 if self.svfs.exists('undo.phaseroots'):
977 if self.svfs.exists('undo.phaseroots'):
978 self.svfs.rename('undo.phaseroots', 'phaseroots')
978 self.svfs.rename('undo.phaseroots', 'phaseroots')
979 self.invalidate()
979 self.invalidate()
980
980
981 parentgone = (parents[0] not in self.changelog.nodemap or
981 parentgone = (parents[0] not in self.changelog.nodemap or
982 parents[1] not in self.changelog.nodemap)
982 parents[1] not in self.changelog.nodemap)
983 if parentgone:
983 if parentgone:
984 self.vfs.rename('undo.dirstate', 'dirstate')
984 self.vfs.rename('undo.dirstate', 'dirstate')
985 try:
985 try:
986 branch = self.opener.read('undo.branch')
986 branch = self.opener.read('undo.branch')
987 self.dirstate.setbranch(encoding.tolocal(branch))
987 self.dirstate.setbranch(encoding.tolocal(branch))
988 except IOError:
988 except IOError:
989 ui.warn(_('named branch could not be reset: '
989 ui.warn(_('named branch could not be reset: '
990 'current branch is still \'%s\'\n')
990 'current branch is still \'%s\'\n')
991 % self.dirstate.branch())
991 % self.dirstate.branch())
992
992
993 self.dirstate.invalidate()
993 self.dirstate.invalidate()
994 parents = tuple([p.rev() for p in self.parents()])
994 parents = tuple([p.rev() for p in self.parents()])
995 if len(parents) > 1:
995 if len(parents) > 1:
996 ui.status(_('working directory now based on '
996 ui.status(_('working directory now based on '
997 'revisions %d and %d\n') % parents)
997 'revisions %d and %d\n') % parents)
998 else:
998 else:
999 ui.status(_('working directory now based on '
999 ui.status(_('working directory now based on '
1000 'revision %d\n') % parents)
1000 'revision %d\n') % parents)
1001 # TODO: if we know which new heads may result from this rollback, pass
1001 # TODO: if we know which new heads may result from this rollback, pass
1002 # them to destroy(), which will prevent the branchhead cache from being
1002 # them to destroy(), which will prevent the branchhead cache from being
1003 # invalidated.
1003 # invalidated.
1004 self.destroyed()
1004 self.destroyed()
1005 return 0
1005 return 0
1006
1006
1007 def invalidatecaches(self):
1007 def invalidatecaches(self):
1008
1008
1009 if '_tagscache' in vars(self):
1009 if '_tagscache' in vars(self):
1010 # can't use delattr on proxy
1010 # can't use delattr on proxy
1011 del self.__dict__['_tagscache']
1011 del self.__dict__['_tagscache']
1012
1012
1013 self.unfiltered()._branchcaches.clear()
1013 self.unfiltered()._branchcaches.clear()
1014 self.invalidatevolatilesets()
1014 self.invalidatevolatilesets()
1015
1015
1016 def invalidatevolatilesets(self):
1016 def invalidatevolatilesets(self):
1017 self.filteredrevcache.clear()
1017 self.filteredrevcache.clear()
1018 obsolete.clearobscaches(self)
1018 obsolete.clearobscaches(self)
1019
1019
1020 def invalidatedirstate(self):
1020 def invalidatedirstate(self):
1021 '''Invalidates the dirstate, causing the next call to dirstate
1021 '''Invalidates the dirstate, causing the next call to dirstate
1022 to check if it was modified since the last time it was read,
1022 to check if it was modified since the last time it was read,
1023 rereading it if it has.
1023 rereading it if it has.
1024
1024
1025 This is different to dirstate.invalidate() that it doesn't always
1025 This is different to dirstate.invalidate() that it doesn't always
1026 rereads the dirstate. Use dirstate.invalidate() if you want to
1026 rereads the dirstate. Use dirstate.invalidate() if you want to
1027 explicitly read the dirstate again (i.e. restoring it to a previous
1027 explicitly read the dirstate again (i.e. restoring it to a previous
1028 known good state).'''
1028 known good state).'''
1029 if hasunfilteredcache(self, 'dirstate'):
1029 if hasunfilteredcache(self, 'dirstate'):
1030 for k in self.dirstate._filecache:
1030 for k in self.dirstate._filecache:
1031 try:
1031 try:
1032 delattr(self.dirstate, k)
1032 delattr(self.dirstate, k)
1033 except AttributeError:
1033 except AttributeError:
1034 pass
1034 pass
1035 delattr(self.unfiltered(), 'dirstate')
1035 delattr(self.unfiltered(), 'dirstate')
1036
1036
1037 def invalidate(self):
1037 def invalidate(self):
1038 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1038 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1039 for k in self._filecache:
1039 for k in self._filecache:
1040 # dirstate is invalidated separately in invalidatedirstate()
1040 # dirstate is invalidated separately in invalidatedirstate()
1041 if k == 'dirstate':
1041 if k == 'dirstate':
1042 continue
1042 continue
1043
1043
1044 try:
1044 try:
1045 delattr(unfiltered, k)
1045 delattr(unfiltered, k)
1046 except AttributeError:
1046 except AttributeError:
1047 pass
1047 pass
1048 self.invalidatecaches()
1048 self.invalidatecaches()
1049 self.store.invalidatecaches()
1049 self.store.invalidatecaches()
1050
1050
1051 def invalidateall(self):
1051 def invalidateall(self):
1052 '''Fully invalidates both store and non-store parts, causing the
1052 '''Fully invalidates both store and non-store parts, causing the
1053 subsequent operation to reread any outside changes.'''
1053 subsequent operation to reread any outside changes.'''
1054 # extension should hook this to invalidate its caches
1054 # extension should hook this to invalidate its caches
1055 self.invalidate()
1055 self.invalidate()
1056 self.invalidatedirstate()
1056 self.invalidatedirstate()
1057
1057
1058 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1058 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1059 try:
1059 try:
1060 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1060 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1061 except error.LockHeld, inst:
1061 except error.LockHeld, inst:
1062 if not wait:
1062 if not wait:
1063 raise
1063 raise
1064 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1064 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1065 (desc, inst.locker))
1065 (desc, inst.locker))
1066 # default to 600 seconds timeout
1066 # default to 600 seconds timeout
1067 l = lockmod.lock(vfs, lockname,
1067 l = lockmod.lock(vfs, lockname,
1068 int(self.ui.config("ui", "timeout", "600")),
1068 int(self.ui.config("ui", "timeout", "600")),
1069 releasefn, desc=desc)
1069 releasefn, desc=desc)
1070 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1070 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1071 if acquirefn:
1071 if acquirefn:
1072 acquirefn()
1072 acquirefn()
1073 return l
1073 return l
1074
1074
1075 def _afterlock(self, callback):
1075 def _afterlock(self, callback):
1076 """add a callback to the current repository lock.
1076 """add a callback to the current repository lock.
1077
1077
1078 The callback will be executed on lock release."""
1078 The callback will be executed on lock release."""
1079 l = self._lockref and self._lockref()
1079 l = self._lockref and self._lockref()
1080 if l:
1080 if l:
1081 l.postrelease.append(callback)
1081 l.postrelease.append(callback)
1082 else:
1082 else:
1083 callback()
1083 callback()
1084
1084
1085 def lock(self, wait=True):
1085 def lock(self, wait=True):
1086 '''Lock the repository store (.hg/store) and return a weak reference
1086 '''Lock the repository store (.hg/store) and return a weak reference
1087 to the lock. Use this before modifying the store (e.g. committing or
1087 to the lock. Use this before modifying the store (e.g. committing or
1088 stripping). If you are opening a transaction, get a lock as well.)'''
1088 stripping). If you are opening a transaction, get a lock as well.)'''
1089 l = self._lockref and self._lockref()
1089 l = self._lockref and self._lockref()
1090 if l is not None and l.held:
1090 if l is not None and l.held:
1091 l.lock()
1091 l.lock()
1092 return l
1092 return l
1093
1093
1094 def unlock():
1094 def unlock():
1095 for k, ce in self._filecache.items():
1095 for k, ce in self._filecache.items():
1096 if k == 'dirstate' or k not in self.__dict__:
1096 if k == 'dirstate' or k not in self.__dict__:
1097 continue
1097 continue
1098 ce.refresh()
1098 ce.refresh()
1099
1099
1100 l = self._lock(self.svfs, "lock", wait, unlock,
1100 l = self._lock(self.svfs, "lock", wait, unlock,
1101 self.invalidate, _('repository %s') % self.origroot)
1101 self.invalidate, _('repository %s') % self.origroot)
1102 self._lockref = weakref.ref(l)
1102 self._lockref = weakref.ref(l)
1103 return l
1103 return l
1104
1104
1105 def wlock(self, wait=True):
1105 def wlock(self, wait=True):
1106 '''Lock the non-store parts of the repository (everything under
1106 '''Lock the non-store parts of the repository (everything under
1107 .hg except .hg/store) and return a weak reference to the lock.
1107 .hg except .hg/store) and return a weak reference to the lock.
1108 Use this before modifying files in .hg.'''
1108 Use this before modifying files in .hg.'''
1109 l = self._wlockref and self._wlockref()
1109 l = self._wlockref and self._wlockref()
1110 if l is not None and l.held:
1110 if l is not None and l.held:
1111 l.lock()
1111 l.lock()
1112 return l
1112 return l
1113
1113
1114 def unlock():
1114 def unlock():
1115 if self.dirstate.pendingparentchange():
1115 if self.dirstate.pendingparentchange():
1116 self.dirstate.invalidate()
1116 self.dirstate.invalidate()
1117 else:
1117 else:
1118 self.dirstate.write()
1118 self.dirstate.write()
1119
1119
1120 self._filecache['dirstate'].refresh()
1120 self._filecache['dirstate'].refresh()
1121
1121
1122 l = self._lock(self.vfs, "wlock", wait, unlock,
1122 l = self._lock(self.vfs, "wlock", wait, unlock,
1123 self.invalidatedirstate, _('working directory of %s') %
1123 self.invalidatedirstate, _('working directory of %s') %
1124 self.origroot)
1124 self.origroot)
1125 self._wlockref = weakref.ref(l)
1125 self._wlockref = weakref.ref(l)
1126 return l
1126 return l
1127
1127
1128 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1128 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1129 """
1129 """
1130 commit an individual file as part of a larger transaction
1130 commit an individual file as part of a larger transaction
1131 """
1131 """
1132
1132
1133 fname = fctx.path()
1133 fname = fctx.path()
1134 text = fctx.data()
1134 text = fctx.data()
1135 flog = self.file(fname)
1135 flog = self.file(fname)
1136 fparent1 = manifest1.get(fname, nullid)
1136 fparent1 = manifest1.get(fname, nullid)
1137 fparent2 = manifest2.get(fname, nullid)
1137 fparent2 = manifest2.get(fname, nullid)
1138
1138
1139 meta = {}
1139 meta = {}
1140 copy = fctx.renamed()
1140 copy = fctx.renamed()
1141 if copy and copy[0] != fname:
1141 if copy and copy[0] != fname:
1142 # Mark the new revision of this file as a copy of another
1142 # Mark the new revision of this file as a copy of another
1143 # file. This copy data will effectively act as a parent
1143 # file. This copy data will effectively act as a parent
1144 # of this new revision. If this is a merge, the first
1144 # of this new revision. If this is a merge, the first
1145 # parent will be the nullid (meaning "look up the copy data")
1145 # parent will be the nullid (meaning "look up the copy data")
1146 # and the second one will be the other parent. For example:
1146 # and the second one will be the other parent. For example:
1147 #
1147 #
1148 # 0 --- 1 --- 3 rev1 changes file foo
1148 # 0 --- 1 --- 3 rev1 changes file foo
1149 # \ / rev2 renames foo to bar and changes it
1149 # \ / rev2 renames foo to bar and changes it
1150 # \- 2 -/ rev3 should have bar with all changes and
1150 # \- 2 -/ rev3 should have bar with all changes and
1151 # should record that bar descends from
1151 # should record that bar descends from
1152 # bar in rev2 and foo in rev1
1152 # bar in rev2 and foo in rev1
1153 #
1153 #
1154 # this allows this merge to succeed:
1154 # this allows this merge to succeed:
1155 #
1155 #
1156 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1156 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1157 # \ / merging rev3 and rev4 should use bar@rev2
1157 # \ / merging rev3 and rev4 should use bar@rev2
1158 # \- 2 --- 4 as the merge base
1158 # \- 2 --- 4 as the merge base
1159 #
1159 #
1160
1160
1161 cfname = copy[0]
1161 cfname = copy[0]
1162 crev = manifest1.get(cfname)
1162 crev = manifest1.get(cfname)
1163 newfparent = fparent2
1163 newfparent = fparent2
1164
1164
1165 if manifest2: # branch merge
1165 if manifest2: # branch merge
1166 if fparent2 == nullid or crev is None: # copied on remote side
1166 if fparent2 == nullid or crev is None: # copied on remote side
1167 if cfname in manifest2:
1167 if cfname in manifest2:
1168 crev = manifest2[cfname]
1168 crev = manifest2[cfname]
1169 newfparent = fparent1
1169 newfparent = fparent1
1170
1170
1171 # find source in nearest ancestor if we've lost track
1171 # find source in nearest ancestor if we've lost track
1172 if not crev:
1172 if not crev:
1173 self.ui.debug(" %s: searching for copy revision for %s\n" %
1173 self.ui.debug(" %s: searching for copy revision for %s\n" %
1174 (fname, cfname))
1174 (fname, cfname))
1175 for ancestor in self[None].ancestors():
1175 for ancestor in self[None].ancestors():
1176 if cfname in ancestor:
1176 if cfname in ancestor:
1177 crev = ancestor[cfname].filenode()
1177 crev = ancestor[cfname].filenode()
1178 break
1178 break
1179
1179
1180 if crev:
1180 if crev:
1181 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1181 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1182 meta["copy"] = cfname
1182 meta["copy"] = cfname
1183 meta["copyrev"] = hex(crev)
1183 meta["copyrev"] = hex(crev)
1184 fparent1, fparent2 = nullid, newfparent
1184 fparent1, fparent2 = nullid, newfparent
1185 else:
1185 else:
1186 self.ui.warn(_("warning: can't find ancestor for '%s' "
1186 self.ui.warn(_("warning: can't find ancestor for '%s' "
1187 "copied from '%s'!\n") % (fname, cfname))
1187 "copied from '%s'!\n") % (fname, cfname))
1188
1188
1189 elif fparent1 == nullid:
1189 elif fparent1 == nullid:
1190 fparent1, fparent2 = fparent2, nullid
1190 fparent1, fparent2 = fparent2, nullid
1191 elif fparent2 != nullid:
1191 elif fparent2 != nullid:
1192 # is one parent an ancestor of the other?
1192 # is one parent an ancestor of the other?
1193 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1193 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1194 if fparent1 in fparentancestors:
1194 if fparent1 in fparentancestors:
1195 fparent1, fparent2 = fparent2, nullid
1195 fparent1, fparent2 = fparent2, nullid
1196 elif fparent2 in fparentancestors:
1196 elif fparent2 in fparentancestors:
1197 fparent2 = nullid
1197 fparent2 = nullid
1198
1198
1199 # is the file changed?
1199 # is the file changed?
1200 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1200 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1201 changelist.append(fname)
1201 changelist.append(fname)
1202 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1202 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1203 # are just the flags changed during merge?
1203 # are just the flags changed during merge?
1204 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1204 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1205 changelist.append(fname)
1205 changelist.append(fname)
1206
1206
1207 return fparent1
1207 return fparent1
1208
1208
1209 @unfilteredmethod
1209 @unfilteredmethod
1210 def commit(self, text="", user=None, date=None, match=None, force=False,
1210 def commit(self, text="", user=None, date=None, match=None, force=False,
1211 editor=False, extra={}):
1211 editor=False, extra={}):
1212 """Add a new revision to current repository.
1212 """Add a new revision to current repository.
1213
1213
1214 Revision information is gathered from the working directory,
1214 Revision information is gathered from the working directory,
1215 match can be used to filter the committed files. If editor is
1215 match can be used to filter the committed files. If editor is
1216 supplied, it is called to get a commit message.
1216 supplied, it is called to get a commit message.
1217 """
1217 """
1218
1218
1219 def fail(f, msg):
1219 def fail(f, msg):
1220 raise util.Abort('%s: %s' % (f, msg))
1220 raise util.Abort('%s: %s' % (f, msg))
1221
1221
1222 if not match:
1222 if not match:
1223 match = matchmod.always(self.root, '')
1223 match = matchmod.always(self.root, '')
1224
1224
1225 if not force:
1225 if not force:
1226 vdirs = []
1226 vdirs = []
1227 match.explicitdir = vdirs.append
1227 match.explicitdir = vdirs.append
1228 match.bad = fail
1228 match.bad = fail
1229
1229
1230 wlock = self.wlock()
1230 wlock = self.wlock()
1231 try:
1231 try:
1232 wctx = self[None]
1232 wctx = self[None]
1233 merge = len(wctx.parents()) > 1
1233 merge = len(wctx.parents()) > 1
1234
1234
1235 if (not force and merge and match and
1235 if (not force and merge and match and
1236 (match.files() or match.anypats())):
1236 (match.files() or match.anypats())):
1237 raise util.Abort(_('cannot partially commit a merge '
1237 raise util.Abort(_('cannot partially commit a merge '
1238 '(do not specify files or patterns)'))
1238 '(do not specify files or patterns)'))
1239
1239
1240 status = self.status(match=match, clean=force)
1240 status = self.status(match=match, clean=force)
1241 if force:
1241 if force:
1242 status.modified.extend(status.clean) # mq may commit clean files
1242 status.modified.extend(status.clean) # mq may commit clean files
1243
1243
1244 # check subrepos
1244 # check subrepos
1245 subs = []
1245 subs = []
1246 commitsubs = set()
1246 commitsubs = set()
1247 newstate = wctx.substate.copy()
1247 newstate = wctx.substate.copy()
1248 # only manage subrepos and .hgsubstate if .hgsub is present
1248 # only manage subrepos and .hgsubstate if .hgsub is present
1249 if '.hgsub' in wctx:
1249 if '.hgsub' in wctx:
1250 # we'll decide whether to track this ourselves, thanks
1250 # we'll decide whether to track this ourselves, thanks
1251 for c in status.modified, status.added, status.removed:
1251 for c in status.modified, status.added, status.removed:
1252 if '.hgsubstate' in c:
1252 if '.hgsubstate' in c:
1253 c.remove('.hgsubstate')
1253 c.remove('.hgsubstate')
1254
1254
1255 # compare current state to last committed state
1255 # compare current state to last committed state
1256 # build new substate based on last committed state
1256 # build new substate based on last committed state
1257 oldstate = wctx.p1().substate
1257 oldstate = wctx.p1().substate
1258 for s in sorted(newstate.keys()):
1258 for s in sorted(newstate.keys()):
1259 if not match(s):
1259 if not match(s):
1260 # ignore working copy, use old state if present
1260 # ignore working copy, use old state if present
1261 if s in oldstate:
1261 if s in oldstate:
1262 newstate[s] = oldstate[s]
1262 newstate[s] = oldstate[s]
1263 continue
1263 continue
1264 if not force:
1264 if not force:
1265 raise util.Abort(
1265 raise util.Abort(
1266 _("commit with new subrepo %s excluded") % s)
1266 _("commit with new subrepo %s excluded") % s)
1267 if wctx.sub(s).dirty(True):
1267 if wctx.sub(s).dirty(True):
1268 if not self.ui.configbool('ui', 'commitsubrepos'):
1268 if not self.ui.configbool('ui', 'commitsubrepos'):
1269 raise util.Abort(
1269 raise util.Abort(
1270 _("uncommitted changes in subrepo %s") % s,
1270 _("uncommitted changes in subrepo %s") % s,
1271 hint=_("use --subrepos for recursive commit"))
1271 hint=_("use --subrepos for recursive commit"))
1272 subs.append(s)
1272 subs.append(s)
1273 commitsubs.add(s)
1273 commitsubs.add(s)
1274 else:
1274 else:
1275 bs = wctx.sub(s).basestate()
1275 bs = wctx.sub(s).basestate()
1276 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1276 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1277 if oldstate.get(s, (None, None, None))[1] != bs:
1277 if oldstate.get(s, (None, None, None))[1] != bs:
1278 subs.append(s)
1278 subs.append(s)
1279
1279
1280 # check for removed subrepos
1280 # check for removed subrepos
1281 for p in wctx.parents():
1281 for p in wctx.parents():
1282 r = [s for s in p.substate if s not in newstate]
1282 r = [s for s in p.substate if s not in newstate]
1283 subs += [s for s in r if match(s)]
1283 subs += [s for s in r if match(s)]
1284 if subs:
1284 if subs:
1285 if (not match('.hgsub') and
1285 if (not match('.hgsub') and
1286 '.hgsub' in (wctx.modified() + wctx.added())):
1286 '.hgsub' in (wctx.modified() + wctx.added())):
1287 raise util.Abort(
1287 raise util.Abort(
1288 _("can't commit subrepos without .hgsub"))
1288 _("can't commit subrepos without .hgsub"))
1289 status.modified.insert(0, '.hgsubstate')
1289 status.modified.insert(0, '.hgsubstate')
1290
1290
1291 elif '.hgsub' in status.removed:
1291 elif '.hgsub' in status.removed:
1292 # clean up .hgsubstate when .hgsub is removed
1292 # clean up .hgsubstate when .hgsub is removed
1293 if ('.hgsubstate' in wctx and
1293 if ('.hgsubstate' in wctx and
1294 '.hgsubstate' not in (status.modified + status.added +
1294 '.hgsubstate' not in (status.modified + status.added +
1295 status.removed)):
1295 status.removed)):
1296 status.removed.insert(0, '.hgsubstate')
1296 status.removed.insert(0, '.hgsubstate')
1297
1297
1298 # make sure all explicit patterns are matched
1298 # make sure all explicit patterns are matched
1299 if not force and match.files():
1299 if not force and match.files():
1300 matched = set(status.modified + status.added + status.removed)
1300 matched = set(status.modified + status.added + status.removed)
1301
1301
1302 for f in match.files():
1302 for f in match.files():
1303 f = self.dirstate.normalize(f)
1303 f = self.dirstate.normalize(f)
1304 if f == '.' or f in matched or f in wctx.substate:
1304 if f == '.' or f in matched or f in wctx.substate:
1305 continue
1305 continue
1306 if f in status.deleted:
1306 if f in status.deleted:
1307 fail(f, _('file not found!'))
1307 fail(f, _('file not found!'))
1308 if f in vdirs: # visited directory
1308 if f in vdirs: # visited directory
1309 d = f + '/'
1309 d = f + '/'
1310 for mf in matched:
1310 for mf in matched:
1311 if mf.startswith(d):
1311 if mf.startswith(d):
1312 break
1312 break
1313 else:
1313 else:
1314 fail(f, _("no match under directory!"))
1314 fail(f, _("no match under directory!"))
1315 elif f not in self.dirstate:
1315 elif f not in self.dirstate:
1316 fail(f, _("file not tracked!"))
1316 fail(f, _("file not tracked!"))
1317
1317
1318 cctx = context.workingctx(self, text, user, date, extra, status)
1318 cctx = context.workingctx(self, text, user, date, extra, status)
1319
1319
1320 if (not force and not extra.get("close") and not merge
1320 if (not force and not extra.get("close") and not merge
1321 and not cctx.files()
1321 and not cctx.files()
1322 and wctx.branch() == wctx.p1().branch()):
1322 and wctx.branch() == wctx.p1().branch()):
1323 return None
1323 return None
1324
1324
1325 if merge and cctx.deleted():
1325 if merge and cctx.deleted():
1326 raise util.Abort(_("cannot commit merge with missing files"))
1326 raise util.Abort(_("cannot commit merge with missing files"))
1327
1327
1328 ms = mergemod.mergestate(self)
1328 ms = mergemod.mergestate(self)
1329 for f in status.modified:
1329 for f in status.modified:
1330 if f in ms and ms[f] == 'u':
1330 if f in ms and ms[f] == 'u':
1331 raise util.Abort(_("unresolved merge conflicts "
1331 raise util.Abort(_("unresolved merge conflicts "
1332 "(see hg help resolve)"))
1332 "(see hg help resolve)"))
1333
1333
1334 if editor:
1334 if editor:
1335 cctx._text = editor(self, cctx, subs)
1335 cctx._text = editor(self, cctx, subs)
1336 edited = (text != cctx._text)
1336 edited = (text != cctx._text)
1337
1337
1338 # Save commit message in case this transaction gets rolled back
1338 # Save commit message in case this transaction gets rolled back
1339 # (e.g. by a pretxncommit hook). Leave the content alone on
1339 # (e.g. by a pretxncommit hook). Leave the content alone on
1340 # the assumption that the user will use the same editor again.
1340 # the assumption that the user will use the same editor again.
1341 msgfn = self.savecommitmessage(cctx._text)
1341 msgfn = self.savecommitmessage(cctx._text)
1342
1342
1343 # commit subs and write new state
1343 # commit subs and write new state
1344 if subs:
1344 if subs:
1345 for s in sorted(commitsubs):
1345 for s in sorted(commitsubs):
1346 sub = wctx.sub(s)
1346 sub = wctx.sub(s)
1347 self.ui.status(_('committing subrepository %s\n') %
1347 self.ui.status(_('committing subrepository %s\n') %
1348 subrepo.subrelpath(sub))
1348 subrepo.subrelpath(sub))
1349 sr = sub.commit(cctx._text, user, date)
1349 sr = sub.commit(cctx._text, user, date)
1350 newstate[s] = (newstate[s][0], sr)
1350 newstate[s] = (newstate[s][0], sr)
1351 subrepo.writestate(self, newstate)
1351 subrepo.writestate(self, newstate)
1352
1352
1353 p1, p2 = self.dirstate.parents()
1353 p1, p2 = self.dirstate.parents()
1354 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1354 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1355 try:
1355 try:
1356 self.hook("precommit", throw=True, parent1=hookp1,
1356 self.hook("precommit", throw=True, parent1=hookp1,
1357 parent2=hookp2)
1357 parent2=hookp2)
1358 ret = self.commitctx(cctx, True)
1358 ret = self.commitctx(cctx, True)
1359 except: # re-raises
1359 except: # re-raises
1360 if edited:
1360 if edited:
1361 self.ui.write(
1361 self.ui.write(
1362 _('note: commit message saved in %s\n') % msgfn)
1362 _('note: commit message saved in %s\n') % msgfn)
1363 raise
1363 raise
1364
1364
1365 # update bookmarks, dirstate and mergestate
1365 # update bookmarks, dirstate and mergestate
1366 bookmarks.update(self, [p1, p2], ret)
1366 bookmarks.update(self, [p1, p2], ret)
1367 cctx.markcommitted(ret)
1367 cctx.markcommitted(ret)
1368 ms.reset()
1368 ms.reset()
1369 finally:
1369 finally:
1370 wlock.release()
1370 wlock.release()
1371
1371
1372 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1372 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1373 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1373 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1374 self._afterlock(commithook)
1374 self._afterlock(commithook)
1375 return ret
1375 return ret
1376
1376
1377 @unfilteredmethod
1377 @unfilteredmethod
1378 def commitctx(self, ctx, error=False):
1378 def commitctx(self, ctx, error=False):
1379 """Add a new revision to current repository.
1379 """Add a new revision to current repository.
1380 Revision information is passed via the context argument.
1380 Revision information is passed via the context argument.
1381 """
1381 """
1382
1382
1383 tr = None
1383 tr = None
1384 p1, p2 = ctx.p1(), ctx.p2()
1384 p1, p2 = ctx.p1(), ctx.p2()
1385 user = ctx.user()
1385 user = ctx.user()
1386
1386
1387 lock = self.lock()
1387 lock = self.lock()
1388 try:
1388 try:
1389 tr = self.transaction("commit")
1389 tr = self.transaction("commit")
1390 trp = weakref.proxy(tr)
1390 trp = weakref.proxy(tr)
1391
1391
1392 if ctx.files():
1392 if ctx.files():
1393 m1 = p1.manifest()
1393 m1 = p1.manifest()
1394 m2 = p2.manifest()
1394 m2 = p2.manifest()
1395 m = m1.copy()
1395 m = m1.copy()
1396
1396
1397 # check in files
1397 # check in files
1398 added = []
1398 added = []
1399 changed = []
1399 changed = []
1400 removed = list(ctx.removed())
1400 removed = list(ctx.removed())
1401 linkrev = len(self)
1401 linkrev = len(self)
1402 for f in sorted(ctx.modified() + ctx.added()):
1402 for f in sorted(ctx.modified() + ctx.added()):
1403 self.ui.note(f + "\n")
1403 self.ui.note(f + "\n")
1404 try:
1404 try:
1405 fctx = ctx[f]
1405 fctx = ctx[f]
1406 if fctx is None:
1406 if fctx is None:
1407 removed.append(f)
1407 removed.append(f)
1408 else:
1408 else:
1409 added.append(f)
1409 added.append(f)
1410 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1410 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1411 trp, changed)
1411 trp, changed)
1412 m.setflag(f, fctx.flags())
1412 m.setflag(f, fctx.flags())
1413 except OSError, inst:
1413 except OSError, inst:
1414 self.ui.warn(_("trouble committing %s!\n") % f)
1414 self.ui.warn(_("trouble committing %s!\n") % f)
1415 raise
1415 raise
1416 except IOError, inst:
1416 except IOError, inst:
1417 errcode = getattr(inst, 'errno', errno.ENOENT)
1417 errcode = getattr(inst, 'errno', errno.ENOENT)
1418 if error or errcode and errcode != errno.ENOENT:
1418 if error or errcode and errcode != errno.ENOENT:
1419 self.ui.warn(_("trouble committing %s!\n") % f)
1419 self.ui.warn(_("trouble committing %s!\n") % f)
1420 raise
1420 raise
1421
1421
1422 # update manifest
1422 # update manifest
1423 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1423 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1424 drop = [f for f in removed if f in m]
1424 drop = [f for f in removed if f in m]
1425 for f in drop:
1425 for f in drop:
1426 del m[f]
1426 del m[f]
1427 mn = self.manifest.add(m, trp, linkrev,
1427 mn = self.manifest.add(m, trp, linkrev,
1428 p1.manifestnode(), p2.manifestnode(),
1428 p1.manifestnode(), p2.manifestnode(),
1429 added, drop)
1429 added, drop)
1430 files = changed + removed
1430 files = changed + removed
1431 else:
1431 else:
1432 mn = p1.manifestnode()
1432 mn = p1.manifestnode()
1433 files = []
1433 files = []
1434
1434
1435 # update changelog
1435 # update changelog
1436 self.changelog.delayupdate()
1436 self.changelog.delayupdate()
1437 n = self.changelog.add(mn, files, ctx.description(),
1437 n = self.changelog.add(mn, files, ctx.description(),
1438 trp, p1.node(), p2.node(),
1438 trp, p1.node(), p2.node(),
1439 user, ctx.date(), ctx.extra().copy())
1439 user, ctx.date(), ctx.extra().copy())
1440 p = lambda: self.changelog.writepending() and self.root or ""
1440 p = lambda: self.changelog.writepending() and self.root or ""
1441 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1441 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1442 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1442 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1443 parent2=xp2, pending=p)
1443 parent2=xp2, pending=p)
1444 self.changelog.finalize(trp)
1444 self.changelog.finalize(trp)
1445 # set the new commit is proper phase
1445 # set the new commit is proper phase
1446 targetphase = subrepo.newcommitphase(self.ui, ctx)
1446 targetphase = subrepo.newcommitphase(self.ui, ctx)
1447 if targetphase:
1447 if targetphase:
1448 # retract boundary do not alter parent changeset.
1448 # retract boundary do not alter parent changeset.
1449 # if a parent have higher the resulting phase will
1449 # if a parent have higher the resulting phase will
1450 # be compliant anyway
1450 # be compliant anyway
1451 #
1451 #
1452 # if minimal phase was 0 we don't need to retract anything
1452 # if minimal phase was 0 we don't need to retract anything
1453 phases.retractboundary(self, tr, targetphase, [n])
1453 phases.retractboundary(self, tr, targetphase, [n])
1454 tr.close()
1454 tr.close()
1455 branchmap.updatecache(self.filtered('served'))
1455 branchmap.updatecache(self.filtered('served'))
1456 return n
1456 return n
1457 finally:
1457 finally:
1458 if tr:
1458 if tr:
1459 tr.release()
1459 tr.release()
1460 lock.release()
1460 lock.release()
1461
1461
1462 @unfilteredmethod
1462 @unfilteredmethod
1463 def destroying(self):
1463 def destroying(self):
1464 '''Inform the repository that nodes are about to be destroyed.
1464 '''Inform the repository that nodes are about to be destroyed.
1465 Intended for use by strip and rollback, so there's a common
1465 Intended for use by strip and rollback, so there's a common
1466 place for anything that has to be done before destroying history.
1466 place for anything that has to be done before destroying history.
1467
1467
1468 This is mostly useful for saving state that is in memory and waiting
1468 This is mostly useful for saving state that is in memory and waiting
1469 to be flushed when the current lock is released. Because a call to
1469 to be flushed when the current lock is released. Because a call to
1470 destroyed is imminent, the repo will be invalidated causing those
1470 destroyed is imminent, the repo will be invalidated causing those
1471 changes to stay in memory (waiting for the next unlock), or vanish
1471 changes to stay in memory (waiting for the next unlock), or vanish
1472 completely.
1472 completely.
1473 '''
1473 '''
1474 # When using the same lock to commit and strip, the phasecache is left
1474 # When using the same lock to commit and strip, the phasecache is left
1475 # dirty after committing. Then when we strip, the repo is invalidated,
1475 # dirty after committing. Then when we strip, the repo is invalidated,
1476 # causing those changes to disappear.
1476 # causing those changes to disappear.
1477 if '_phasecache' in vars(self):
1477 if '_phasecache' in vars(self):
1478 self._phasecache.write()
1478 self._phasecache.write()
1479
1479
1480 @unfilteredmethod
1480 @unfilteredmethod
1481 def destroyed(self):
1481 def destroyed(self):
1482 '''Inform the repository that nodes have been destroyed.
1482 '''Inform the repository that nodes have been destroyed.
1483 Intended for use by strip and rollback, so there's a common
1483 Intended for use by strip and rollback, so there's a common
1484 place for anything that has to be done after destroying history.
1484 place for anything that has to be done after destroying history.
1485 '''
1485 '''
1486 # When one tries to:
1486 # When one tries to:
1487 # 1) destroy nodes thus calling this method (e.g. strip)
1487 # 1) destroy nodes thus calling this method (e.g. strip)
1488 # 2) use phasecache somewhere (e.g. commit)
1488 # 2) use phasecache somewhere (e.g. commit)
1489 #
1489 #
1490 # then 2) will fail because the phasecache contains nodes that were
1490 # then 2) will fail because the phasecache contains nodes that were
1491 # removed. We can either remove phasecache from the filecache,
1491 # removed. We can either remove phasecache from the filecache,
1492 # causing it to reload next time it is accessed, or simply filter
1492 # causing it to reload next time it is accessed, or simply filter
1493 # the removed nodes now and write the updated cache.
1493 # the removed nodes now and write the updated cache.
1494 self._phasecache.filterunknown(self)
1494 self._phasecache.filterunknown(self)
1495 self._phasecache.write()
1495 self._phasecache.write()
1496
1496
1497 # update the 'served' branch cache to help read only server process
1497 # update the 'served' branch cache to help read only server process
1498 # Thanks to branchcache collaboration this is done from the nearest
1498 # Thanks to branchcache collaboration this is done from the nearest
1499 # filtered subset and it is expected to be fast.
1499 # filtered subset and it is expected to be fast.
1500 branchmap.updatecache(self.filtered('served'))
1500 branchmap.updatecache(self.filtered('served'))
1501
1501
1502 # Ensure the persistent tag cache is updated. Doing it now
1502 # Ensure the persistent tag cache is updated. Doing it now
1503 # means that the tag cache only has to worry about destroyed
1503 # means that the tag cache only has to worry about destroyed
1504 # heads immediately after a strip/rollback. That in turn
1504 # heads immediately after a strip/rollback. That in turn
1505 # guarantees that "cachetip == currenttip" (comparing both rev
1505 # guarantees that "cachetip == currenttip" (comparing both rev
1506 # and node) always means no nodes have been added or destroyed.
1506 # and node) always means no nodes have been added or destroyed.
1507
1507
1508 # XXX this is suboptimal when qrefresh'ing: we strip the current
1508 # XXX this is suboptimal when qrefresh'ing: we strip the current
1509 # head, refresh the tag cache, then immediately add a new head.
1509 # head, refresh the tag cache, then immediately add a new head.
1510 # But I think doing it this way is necessary for the "instant
1510 # But I think doing it this way is necessary for the "instant
1511 # tag cache retrieval" case to work.
1511 # tag cache retrieval" case to work.
1512 self.invalidate()
1512 self.invalidate()
1513
1513
1514 def walk(self, match, node=None):
1514 def walk(self, match, node=None):
1515 '''
1515 '''
1516 walk recursively through the directory tree or a given
1516 walk recursively through the directory tree or a given
1517 changeset, finding all files matched by the match
1517 changeset, finding all files matched by the match
1518 function
1518 function
1519 '''
1519 '''
1520 return self[node].walk(match)
1520 return self[node].walk(match)
1521
1521
1522 def status(self, node1='.', node2=None, match=None,
1522 def status(self, node1='.', node2=None, match=None,
1523 ignored=False, clean=False, unknown=False,
1523 ignored=False, clean=False, unknown=False,
1524 listsubrepos=False):
1524 listsubrepos=False):
1525 '''a convenience method that calls node1.status(node2)'''
1525 '''a convenience method that calls node1.status(node2)'''
1526 return self[node1].status(node2, match, ignored, clean, unknown,
1526 return self[node1].status(node2, match, ignored, clean, unknown,
1527 listsubrepos)
1527 listsubrepos)
1528
1528
1529 def heads(self, start=None):
1529 def heads(self, start=None):
1530 heads = self.changelog.heads(start)
1530 heads = self.changelog.heads(start)
1531 # sort the output in rev descending order
1531 # sort the output in rev descending order
1532 return sorted(heads, key=self.changelog.rev, reverse=True)
1532 return sorted(heads, key=self.changelog.rev, reverse=True)
1533
1533
1534 def branchheads(self, branch=None, start=None, closed=False):
1534 def branchheads(self, branch=None, start=None, closed=False):
1535 '''return a (possibly filtered) list of heads for the given branch
1535 '''return a (possibly filtered) list of heads for the given branch
1536
1536
1537 Heads are returned in topological order, from newest to oldest.
1537 Heads are returned in topological order, from newest to oldest.
1538 If branch is None, use the dirstate branch.
1538 If branch is None, use the dirstate branch.
1539 If start is not None, return only heads reachable from start.
1539 If start is not None, return only heads reachable from start.
1540 If closed is True, return heads that are marked as closed as well.
1540 If closed is True, return heads that are marked as closed as well.
1541 '''
1541 '''
1542 if branch is None:
1542 if branch is None:
1543 branch = self[None].branch()
1543 branch = self[None].branch()
1544 branches = self.branchmap()
1544 branches = self.branchmap()
1545 if branch not in branches:
1545 if branch not in branches:
1546 return []
1546 return []
1547 # the cache returns heads ordered lowest to highest
1547 # the cache returns heads ordered lowest to highest
1548 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1548 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1549 if start is not None:
1549 if start is not None:
1550 # filter out the heads that cannot be reached from startrev
1550 # filter out the heads that cannot be reached from startrev
1551 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1551 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1552 bheads = [h for h in bheads if h in fbheads]
1552 bheads = [h for h in bheads if h in fbheads]
1553 return bheads
1553 return bheads
1554
1554
1555 def branches(self, nodes):
1555 def branches(self, nodes):
1556 if not nodes:
1556 if not nodes:
1557 nodes = [self.changelog.tip()]
1557 nodes = [self.changelog.tip()]
1558 b = []
1558 b = []
1559 for n in nodes:
1559 for n in nodes:
1560 t = n
1560 t = n
1561 while True:
1561 while True:
1562 p = self.changelog.parents(n)
1562 p = self.changelog.parents(n)
1563 if p[1] != nullid or p[0] == nullid:
1563 if p[1] != nullid or p[0] == nullid:
1564 b.append((t, n, p[0], p[1]))
1564 b.append((t, n, p[0], p[1]))
1565 break
1565 break
1566 n = p[0]
1566 n = p[0]
1567 return b
1567 return b
1568
1568
1569 def between(self, pairs):
1569 def between(self, pairs):
1570 r = []
1570 r = []
1571
1571
1572 for top, bottom in pairs:
1572 for top, bottom in pairs:
1573 n, l, i = top, [], 0
1573 n, l, i = top, [], 0
1574 f = 1
1574 f = 1
1575
1575
1576 while n != bottom and n != nullid:
1576 while n != bottom and n != nullid:
1577 p = self.changelog.parents(n)[0]
1577 p = self.changelog.parents(n)[0]
1578 if i == f:
1578 if i == f:
1579 l.append(n)
1579 l.append(n)
1580 f = f * 2
1580 f = f * 2
1581 n = p
1581 n = p
1582 i += 1
1582 i += 1
1583
1583
1584 r.append(l)
1584 r.append(l)
1585
1585
1586 return r
1586 return r
1587
1587
1588 def checkpush(self, pushop):
1588 def checkpush(self, pushop):
1589 """Extensions can override this function if additional checks have
1589 """Extensions can override this function if additional checks have
1590 to be performed before pushing, or call it if they override push
1590 to be performed before pushing, or call it if they override push
1591 command.
1591 command.
1592 """
1592 """
1593 pass
1593 pass
1594
1594
1595 @unfilteredpropertycache
1595 @unfilteredpropertycache
1596 def prepushoutgoinghooks(self):
1596 def prepushoutgoinghooks(self):
1597 """Return util.hooks consists of "(repo, remote, outgoing)"
1597 """Return util.hooks consists of "(repo, remote, outgoing)"
1598 functions, which are called before pushing changesets.
1598 functions, which are called before pushing changesets.
1599 """
1599 """
1600 return util.hooks()
1600 return util.hooks()
1601
1601
1602 def stream_in(self, remote, requirements):
1602 def stream_in(self, remote, requirements):
1603 lock = self.lock()
1603 lock = self.lock()
1604 try:
1604 try:
1605 # Save remote branchmap. We will use it later
1605 # Save remote branchmap. We will use it later
1606 # to speed up branchcache creation
1606 # to speed up branchcache creation
1607 rbranchmap = None
1607 rbranchmap = None
1608 if remote.capable("branchmap"):
1608 if remote.capable("branchmap"):
1609 rbranchmap = remote.branchmap()
1609 rbranchmap = remote.branchmap()
1610
1610
1611 fp = remote.stream_out()
1611 fp = remote.stream_out()
1612 l = fp.readline()
1612 l = fp.readline()
1613 try:
1613 try:
1614 resp = int(l)
1614 resp = int(l)
1615 except ValueError:
1615 except ValueError:
1616 raise error.ResponseError(
1616 raise error.ResponseError(
1617 _('unexpected response from remote server:'), l)
1617 _('unexpected response from remote server:'), l)
1618 if resp == 1:
1618 if resp == 1:
1619 raise util.Abort(_('operation forbidden by server'))
1619 raise util.Abort(_('operation forbidden by server'))
1620 elif resp == 2:
1620 elif resp == 2:
1621 raise util.Abort(_('locking the remote repository failed'))
1621 raise util.Abort(_('locking the remote repository failed'))
1622 elif resp != 0:
1622 elif resp != 0:
1623 raise util.Abort(_('the server sent an unknown error code'))
1623 raise util.Abort(_('the server sent an unknown error code'))
1624 self.ui.status(_('streaming all changes\n'))
1624 self.ui.status(_('streaming all changes\n'))
1625 l = fp.readline()
1625 l = fp.readline()
1626 try:
1626 try:
1627 total_files, total_bytes = map(int, l.split(' ', 1))
1627 total_files, total_bytes = map(int, l.split(' ', 1))
1628 except (ValueError, TypeError):
1628 except (ValueError, TypeError):
1629 raise error.ResponseError(
1629 raise error.ResponseError(
1630 _('unexpected response from remote server:'), l)
1630 _('unexpected response from remote server:'), l)
1631 self.ui.status(_('%d files to transfer, %s of data\n') %
1631 self.ui.status(_('%d files to transfer, %s of data\n') %
1632 (total_files, util.bytecount(total_bytes)))
1632 (total_files, util.bytecount(total_bytes)))
1633 handled_bytes = 0
1633 handled_bytes = 0
1634 self.ui.progress(_('clone'), 0, total=total_bytes)
1634 self.ui.progress(_('clone'), 0, total=total_bytes)
1635 start = time.time()
1635 start = time.time()
1636
1636
1637 tr = self.transaction(_('clone'))
1637 tr = self.transaction(_('clone'))
1638 try:
1638 try:
1639 for i in xrange(total_files):
1639 for i in xrange(total_files):
1640 # XXX doesn't support '\n' or '\r' in filenames
1640 # XXX doesn't support '\n' or '\r' in filenames
1641 l = fp.readline()
1641 l = fp.readline()
1642 try:
1642 try:
1643 name, size = l.split('\0', 1)
1643 name, size = l.split('\0', 1)
1644 size = int(size)
1644 size = int(size)
1645 except (ValueError, TypeError):
1645 except (ValueError, TypeError):
1646 raise error.ResponseError(
1646 raise error.ResponseError(
1647 _('unexpected response from remote server:'), l)
1647 _('unexpected response from remote server:'), l)
1648 if self.ui.debugflag:
1648 if self.ui.debugflag:
1649 self.ui.debug('adding %s (%s)\n' %
1649 self.ui.debug('adding %s (%s)\n' %
1650 (name, util.bytecount(size)))
1650 (name, util.bytecount(size)))
1651 # for backwards compat, name was partially encoded
1651 # for backwards compat, name was partially encoded
1652 ofp = self.sopener(store.decodedir(name), 'w')
1652 ofp = self.sopener(store.decodedir(name), 'w')
1653 for chunk in util.filechunkiter(fp, limit=size):
1653 for chunk in util.filechunkiter(fp, limit=size):
1654 handled_bytes += len(chunk)
1654 handled_bytes += len(chunk)
1655 self.ui.progress(_('clone'), handled_bytes,
1655 self.ui.progress(_('clone'), handled_bytes,
1656 total=total_bytes)
1656 total=total_bytes)
1657 ofp.write(chunk)
1657 ofp.write(chunk)
1658 ofp.close()
1658 ofp.close()
1659 tr.close()
1659 tr.close()
1660 finally:
1660 finally:
1661 tr.release()
1661 tr.release()
1662
1662
1663 # Writing straight to files circumvented the inmemory caches
1663 # Writing straight to files circumvented the inmemory caches
1664 self.invalidate()
1664 self.invalidate()
1665
1665
1666 elapsed = time.time() - start
1666 elapsed = time.time() - start
1667 if elapsed <= 0:
1667 if elapsed <= 0:
1668 elapsed = 0.001
1668 elapsed = 0.001
1669 self.ui.progress(_('clone'), None)
1669 self.ui.progress(_('clone'), None)
1670 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1670 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1671 (util.bytecount(total_bytes), elapsed,
1671 (util.bytecount(total_bytes), elapsed,
1672 util.bytecount(total_bytes / elapsed)))
1672 util.bytecount(total_bytes / elapsed)))
1673
1673
1674 # new requirements = old non-format requirements +
1674 # new requirements = old non-format requirements +
1675 # new format-related
1675 # new format-related
1676 # requirements from the streamed-in repository
1676 # requirements from the streamed-in repository
1677 requirements.update(set(self.requirements) - self.supportedformats)
1677 requirements.update(set(self.requirements) - self.supportedformats)
1678 self._applyrequirements(requirements)
1678 self._applyrequirements(requirements)
1679 self._writerequirements()
1679 self._writerequirements()
1680
1680
1681 if rbranchmap:
1681 if rbranchmap:
1682 rbheads = []
1682 rbheads = []
1683 closed = []
1683 for bheads in rbranchmap.itervalues():
1684 for bheads in rbranchmap.itervalues():
1684 rbheads.extend(bheads)
1685 rbheads.extend(bheads)
1686 for h in bheads:
1687 r = self.changelog.rev(h)
1688 b, c = self.changelog.branchinfo(r)
1689 if c:
1690 closed.append(h)
1685
1691
1686 if rbheads:
1692 if rbheads:
1687 rtiprev = max((int(self.changelog.rev(node))
1693 rtiprev = max((int(self.changelog.rev(node))
1688 for node in rbheads))
1694 for node in rbheads))
1689 cache = branchmap.branchcache(rbranchmap,
1695 cache = branchmap.branchcache(rbranchmap,
1690 self[rtiprev].node(),
1696 self[rtiprev].node(),
1691 rtiprev)
1697 rtiprev,
1698 closednodes=closed)
1692 # Try to stick it as low as possible
1699 # Try to stick it as low as possible
1693 # filter above served are unlikely to be fetch from a clone
1700 # filter above served are unlikely to be fetch from a clone
1694 for candidate in ('base', 'immutable', 'served'):
1701 for candidate in ('base', 'immutable', 'served'):
1695 rview = self.filtered(candidate)
1702 rview = self.filtered(candidate)
1696 if cache.validfor(rview):
1703 if cache.validfor(rview):
1697 self._branchcaches[candidate] = cache
1704 self._branchcaches[candidate] = cache
1698 cache.write(rview)
1705 cache.write(rview)
1699 break
1706 break
1700 self.invalidate()
1707 self.invalidate()
1701 return len(self.heads()) + 1
1708 return len(self.heads()) + 1
1702 finally:
1709 finally:
1703 lock.release()
1710 lock.release()
1704
1711
1705 def clone(self, remote, heads=[], stream=False):
1712 def clone(self, remote, heads=[], stream=False):
1706 '''clone remote repository.
1713 '''clone remote repository.
1707
1714
1708 keyword arguments:
1715 keyword arguments:
1709 heads: list of revs to clone (forces use of pull)
1716 heads: list of revs to clone (forces use of pull)
1710 stream: use streaming clone if possible'''
1717 stream: use streaming clone if possible'''
1711
1718
1712 # now, all clients that can request uncompressed clones can
1719 # now, all clients that can request uncompressed clones can
1713 # read repo formats supported by all servers that can serve
1720 # read repo formats supported by all servers that can serve
1714 # them.
1721 # them.
1715
1722
1716 # if revlog format changes, client will have to check version
1723 # if revlog format changes, client will have to check version
1717 # and format flags on "stream" capability, and use
1724 # and format flags on "stream" capability, and use
1718 # uncompressed only if compatible.
1725 # uncompressed only if compatible.
1719
1726
1720 if not stream:
1727 if not stream:
1721 # if the server explicitly prefers to stream (for fast LANs)
1728 # if the server explicitly prefers to stream (for fast LANs)
1722 stream = remote.capable('stream-preferred')
1729 stream = remote.capable('stream-preferred')
1723
1730
1724 if stream and not heads:
1731 if stream and not heads:
1725 # 'stream' means remote revlog format is revlogv1 only
1732 # 'stream' means remote revlog format is revlogv1 only
1726 if remote.capable('stream'):
1733 if remote.capable('stream'):
1727 self.stream_in(remote, set(('revlogv1',)))
1734 self.stream_in(remote, set(('revlogv1',)))
1728 else:
1735 else:
1729 # otherwise, 'streamreqs' contains the remote revlog format
1736 # otherwise, 'streamreqs' contains the remote revlog format
1730 streamreqs = remote.capable('streamreqs')
1737 streamreqs = remote.capable('streamreqs')
1731 if streamreqs:
1738 if streamreqs:
1732 streamreqs = set(streamreqs.split(','))
1739 streamreqs = set(streamreqs.split(','))
1733 # if we support it, stream in and adjust our requirements
1740 # if we support it, stream in and adjust our requirements
1734 if not streamreqs - self.supportedformats:
1741 if not streamreqs - self.supportedformats:
1735 self.stream_in(remote, streamreqs)
1742 self.stream_in(remote, streamreqs)
1736
1743
1737 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1744 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1738 try:
1745 try:
1739 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1746 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1740 ret = exchange.pull(self, remote, heads).cgresult
1747 ret = exchange.pull(self, remote, heads).cgresult
1741 finally:
1748 finally:
1742 self.ui.restoreconfig(quiet)
1749 self.ui.restoreconfig(quiet)
1743 return ret
1750 return ret
1744
1751
1745 def pushkey(self, namespace, key, old, new):
1752 def pushkey(self, namespace, key, old, new):
1746 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1753 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1747 old=old, new=new)
1754 old=old, new=new)
1748 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1755 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1749 ret = pushkey.push(self, namespace, key, old, new)
1756 ret = pushkey.push(self, namespace, key, old, new)
1750 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1757 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1751 ret=ret)
1758 ret=ret)
1752 return ret
1759 return ret
1753
1760
1754 def listkeys(self, namespace):
1761 def listkeys(self, namespace):
1755 self.hook('prelistkeys', throw=True, namespace=namespace)
1762 self.hook('prelistkeys', throw=True, namespace=namespace)
1756 self.ui.debug('listing keys for "%s"\n' % namespace)
1763 self.ui.debug('listing keys for "%s"\n' % namespace)
1757 values = pushkey.list(self, namespace)
1764 values = pushkey.list(self, namespace)
1758 self.hook('listkeys', namespace=namespace, values=values)
1765 self.hook('listkeys', namespace=namespace, values=values)
1759 return values
1766 return values
1760
1767
1761 def debugwireargs(self, one, two, three=None, four=None, five=None):
1768 def debugwireargs(self, one, two, three=None, four=None, five=None):
1762 '''used to test argument passing over the wire'''
1769 '''used to test argument passing over the wire'''
1763 return "%s %s %s %s %s" % (one, two, three, four, five)
1770 return "%s %s %s %s %s" % (one, two, three, four, five)
1764
1771
1765 def savecommitmessage(self, text):
1772 def savecommitmessage(self, text):
1766 fp = self.opener('last-message.txt', 'wb')
1773 fp = self.opener('last-message.txt', 'wb')
1767 try:
1774 try:
1768 fp.write(text)
1775 fp.write(text)
1769 finally:
1776 finally:
1770 fp.close()
1777 fp.close()
1771 return self.pathto(fp.name[len(self.root) + 1:])
1778 return self.pathto(fp.name[len(self.root) + 1:])
1772
1779
1773 # used to avoid circular references so destructors work
1780 # used to avoid circular references so destructors work
1774 def aftertrans(files):
1781 def aftertrans(files):
1775 renamefiles = [tuple(t) for t in files]
1782 renamefiles = [tuple(t) for t in files]
1776 def a():
1783 def a():
1777 for vfs, src, dest in renamefiles:
1784 for vfs, src, dest in renamefiles:
1778 try:
1785 try:
1779 vfs.rename(src, dest)
1786 vfs.rename(src, dest)
1780 except OSError: # journal file does not yet exist
1787 except OSError: # journal file does not yet exist
1781 pass
1788 pass
1782 return a
1789 return a
1783
1790
1784 def undoname(fn):
1791 def undoname(fn):
1785 base, name = os.path.split(fn)
1792 base, name = os.path.split(fn)
1786 assert name.startswith('journal')
1793 assert name.startswith('journal')
1787 return os.path.join(base, name.replace('journal', 'undo', 1))
1794 return os.path.join(base, name.replace('journal', 'undo', 1))
1788
1795
1789 def instance(ui, path, create):
1796 def instance(ui, path, create):
1790 return localrepository(ui, util.urllocalpath(path), create)
1797 return localrepository(ui, util.urllocalpath(path), create)
1791
1798
1792 def islocal(path):
1799 def islocal(path):
1793 return True
1800 return True
@@ -1,451 +1,471 b''
1
1
2
2
3 This test tries to exercise the ssh functionality with a dummy script
3 This test tries to exercise the ssh functionality with a dummy script
4
4
5 creating 'remote' repo
5 creating 'remote' repo
6
6
7 $ hg init remote
7 $ hg init remote
8 $ cd remote
8 $ cd remote
9 $ echo this > foo
9 $ echo this > foo
10 $ echo this > fooO
10 $ echo this > fooO
11 $ hg ci -A -m "init" foo fooO
11 $ hg ci -A -m "init" foo fooO
12
13 insert a closed branch (issue4428)
14
15 $ hg up null
16 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
17 $ hg branch closed
18 marked working directory as branch closed
19 (branches are permanent and global, did you want a bookmark?)
20 $ hg ci -mc0
21 $ hg ci --close-branch -mc1
22 $ hg up -q default
23
24 configure for serving
25
12 $ cat <<EOF > .hg/hgrc
26 $ cat <<EOF > .hg/hgrc
13 > [server]
27 > [server]
14 > uncompressed = True
28 > uncompressed = True
15 >
29 >
16 > [hooks]
30 > [hooks]
17 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
31 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
18 > EOF
32 > EOF
19 $ cd ..
33 $ cd ..
20
34
21 repo not found error
35 repo not found error
22
36
23 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
24 remote: abort: there is no Mercurial repository here (.hg not found)!
38 remote: abort: there is no Mercurial repository here (.hg not found)!
25 abort: no suitable response from remote hg!
39 abort: no suitable response from remote hg!
26 [255]
40 [255]
27
41
28 non-existent absolute path
42 non-existent absolute path
29
43
30 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
44 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
31 remote: abort: there is no Mercurial repository here (.hg not found)!
45 remote: abort: there is no Mercurial repository here (.hg not found)!
32 abort: no suitable response from remote hg!
46 abort: no suitable response from remote hg!
33 [255]
47 [255]
34
48
35 clone remote via stream
49 clone remote via stream
36
50
37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
51 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
38 streaming all changes
52 streaming all changes
39 4 files to transfer, 392 bytes of data
53 4 files to transfer, 615 bytes of data
40 transferred 392 bytes in * seconds (*/sec) (glob)
54 transferred 615 bytes in * seconds (*) (glob)
41 searching for changes
55 searching for changes
42 no changes found
56 no changes found
43 updating to branch default
57 updating to branch default
44 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 $ cd local-stream
59 $ cd local-stream
46 $ hg verify
60 $ hg verify
47 checking changesets
61 checking changesets
48 checking manifests
62 checking manifests
49 crosschecking files in changesets and manifests
63 crosschecking files in changesets and manifests
50 checking files
64 checking files
51 2 files, 1 changesets, 2 total revisions
65 2 files, 3 changesets, 2 total revisions
66 $ hg branches
67 default 0:1160648e36ce
52 $ cd ..
68 $ cd ..
53
69
54 clone bookmarks via stream
70 clone bookmarks via stream
55
71
56 $ hg -R local-stream book mybook
72 $ hg -R local-stream book mybook
57 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
73 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
58 streaming all changes
74 streaming all changes
59 4 files to transfer, 392 bytes of data
75 4 files to transfer, 615 bytes of data
60 transferred 392 bytes in * seconds (* KB/sec) (glob)
76 transferred 615 bytes in * seconds (*) (glob)
61 searching for changes
77 searching for changes
62 no changes found
78 no changes found
63 updating to branch default
79 updating to branch default
64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
80 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 $ cd stream2
81 $ cd stream2
66 $ hg book
82 $ hg book
67 mybook 0:1160648e36ce
83 mybook 0:1160648e36ce
68 $ cd ..
84 $ cd ..
69 $ rm -rf local-stream stream2
85 $ rm -rf local-stream stream2
70
86
71 clone remote via pull
87 clone remote via pull
72
88
73 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
89 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
74 requesting all changes
90 requesting all changes
75 adding changesets
91 adding changesets
76 adding manifests
92 adding manifests
77 adding file changes
93 adding file changes
78 added 1 changesets with 2 changes to 2 files
94 added 3 changesets with 2 changes to 2 files
79 updating to branch default
95 updating to branch default
80 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
81
97
82 verify
98 verify
83
99
84 $ cd local
100 $ cd local
85 $ hg verify
101 $ hg verify
86 checking changesets
102 checking changesets
87 checking manifests
103 checking manifests
88 crosschecking files in changesets and manifests
104 crosschecking files in changesets and manifests
89 checking files
105 checking files
90 2 files, 1 changesets, 2 total revisions
106 2 files, 3 changesets, 2 total revisions
91 $ echo '[hooks]' >> .hg/hgrc
107 $ echo '[hooks]' >> .hg/hgrc
92 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
108 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
93
109
94 empty default pull
110 empty default pull
95
111
96 $ hg paths
112 $ hg paths
97 default = ssh://user@dummy/remote
113 default = ssh://user@dummy/remote
98 $ hg pull -e "python \"$TESTDIR/dummyssh\""
114 $ hg pull -e "python \"$TESTDIR/dummyssh\""
99 pulling from ssh://user@dummy/remote
115 pulling from ssh://user@dummy/remote
100 searching for changes
116 searching for changes
101 no changes found
117 no changes found
102
118
103 local change
119 local change
104
120
105 $ echo bleah > foo
121 $ echo bleah > foo
106 $ hg ci -m "add"
122 $ hg ci -m "add"
107
123
108 updating rc
124 updating rc
109
125
110 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
126 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
111 $ echo "[ui]" >> .hg/hgrc
127 $ echo "[ui]" >> .hg/hgrc
112 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
128 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
113
129
114 find outgoing
130 find outgoing
115
131
116 $ hg out ssh://user@dummy/remote
132 $ hg out ssh://user@dummy/remote
117 comparing with ssh://user@dummy/remote
133 comparing with ssh://user@dummy/remote
118 searching for changes
134 searching for changes
119 changeset: 1:a28a9d1a809c
135 changeset: 3:a28a9d1a809c
120 tag: tip
136 tag: tip
137 parent: 0:1160648e36ce
121 user: test
138 user: test
122 date: Thu Jan 01 00:00:00 1970 +0000
139 date: Thu Jan 01 00:00:00 1970 +0000
123 summary: add
140 summary: add
124
141
125
142
126 find incoming on the remote side
143 find incoming on the remote side
127
144
128 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
145 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
129 comparing with ssh://user@dummy/local
146 comparing with ssh://user@dummy/local
130 searching for changes
147 searching for changes
131 changeset: 1:a28a9d1a809c
148 changeset: 3:a28a9d1a809c
132 tag: tip
149 tag: tip
150 parent: 0:1160648e36ce
133 user: test
151 user: test
134 date: Thu Jan 01 00:00:00 1970 +0000
152 date: Thu Jan 01 00:00:00 1970 +0000
135 summary: add
153 summary: add
136
154
137
155
138 find incoming on the remote side (using absolute path)
156 find incoming on the remote side (using absolute path)
139
157
140 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
158 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
141 comparing with ssh://user@dummy/$TESTTMP/local
159 comparing with ssh://user@dummy/$TESTTMP/local
142 searching for changes
160 searching for changes
143 changeset: 1:a28a9d1a809c
161 changeset: 3:a28a9d1a809c
144 tag: tip
162 tag: tip
163 parent: 0:1160648e36ce
145 user: test
164 user: test
146 date: Thu Jan 01 00:00:00 1970 +0000
165 date: Thu Jan 01 00:00:00 1970 +0000
147 summary: add
166 summary: add
148
167
149
168
150 push
169 push
151
170
152 $ hg push
171 $ hg push
153 pushing to ssh://user@dummy/remote
172 pushing to ssh://user@dummy/remote
154 searching for changes
173 searching for changes
155 remote: adding changesets
174 remote: adding changesets
156 remote: adding manifests
175 remote: adding manifests
157 remote: adding file changes
176 remote: adding file changes
158 remote: added 1 changesets with 1 changes to 1 files
177 remote: added 1 changesets with 1 changes to 1 files
159 $ cd ../remote
178 $ cd ../remote
160
179
161 check remote tip
180 check remote tip
162
181
163 $ hg tip
182 $ hg tip
164 changeset: 1:a28a9d1a809c
183 changeset: 3:a28a9d1a809c
165 tag: tip
184 tag: tip
185 parent: 0:1160648e36ce
166 user: test
186 user: test
167 date: Thu Jan 01 00:00:00 1970 +0000
187 date: Thu Jan 01 00:00:00 1970 +0000
168 summary: add
188 summary: add
169
189
170 $ hg verify
190 $ hg verify
171 checking changesets
191 checking changesets
172 checking manifests
192 checking manifests
173 crosschecking files in changesets and manifests
193 crosschecking files in changesets and manifests
174 checking files
194 checking files
175 2 files, 2 changesets, 3 total revisions
195 2 files, 4 changesets, 3 total revisions
176 $ hg cat -r tip foo
196 $ hg cat -r tip foo
177 bleah
197 bleah
178 $ echo z > z
198 $ echo z > z
179 $ hg ci -A -m z z
199 $ hg ci -A -m z z
180 created new head
200 created new head
181
201
182 test pushkeys and bookmarks
202 test pushkeys and bookmarks
183
203
184 $ cd ../local
204 $ cd ../local
185 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
205 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
186 bookmarks
206 bookmarks
187 namespaces
207 namespaces
188 phases
208 phases
189 $ hg book foo -r 0
209 $ hg book foo -r 0
190 $ hg out -B
210 $ hg out -B
191 comparing with ssh://user@dummy/remote
211 comparing with ssh://user@dummy/remote
192 searching for changed bookmarks
212 searching for changed bookmarks
193 foo 1160648e36ce
213 foo 1160648e36ce
194 $ hg push -B foo
214 $ hg push -B foo
195 pushing to ssh://user@dummy/remote
215 pushing to ssh://user@dummy/remote
196 searching for changes
216 searching for changes
197 no changes found
217 no changes found
198 exporting bookmark foo
218 exporting bookmark foo
199 [1]
219 [1]
200 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
220 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
201 foo 1160648e36cec0054048a7edc4110c6f84fde594
221 foo 1160648e36cec0054048a7edc4110c6f84fde594
202 $ hg book -f foo
222 $ hg book -f foo
203 $ hg push --traceback
223 $ hg push --traceback
204 pushing to ssh://user@dummy/remote
224 pushing to ssh://user@dummy/remote
205 searching for changes
225 searching for changes
206 no changes found
226 no changes found
207 updating bookmark foo
227 updating bookmark foo
208 [1]
228 [1]
209 $ hg book -d foo
229 $ hg book -d foo
210 $ hg in -B
230 $ hg in -B
211 comparing with ssh://user@dummy/remote
231 comparing with ssh://user@dummy/remote
212 searching for changed bookmarks
232 searching for changed bookmarks
213 foo a28a9d1a809c
233 foo a28a9d1a809c
214 $ hg book -f -r 0 foo
234 $ hg book -f -r 0 foo
215 $ hg pull -B foo
235 $ hg pull -B foo
216 pulling from ssh://user@dummy/remote
236 pulling from ssh://user@dummy/remote
217 no changes found
237 no changes found
218 updating bookmark foo
238 updating bookmark foo
219 $ hg book -d foo
239 $ hg book -d foo
220 $ hg push -B foo
240 $ hg push -B foo
221 pushing to ssh://user@dummy/remote
241 pushing to ssh://user@dummy/remote
222 searching for changes
242 searching for changes
223 no changes found
243 no changes found
224 deleting remote bookmark foo
244 deleting remote bookmark foo
225 [1]
245 [1]
226
246
227 a bad, evil hook that prints to stdout
247 a bad, evil hook that prints to stdout
228
248
229 $ cat <<EOF > $TESTTMP/badhook
249 $ cat <<EOF > $TESTTMP/badhook
230 > import sys
250 > import sys
231 > sys.stdout.write("KABOOM\n")
251 > sys.stdout.write("KABOOM\n")
232 > EOF
252 > EOF
233
253
234 $ echo '[hooks]' >> ../remote/.hg/hgrc
254 $ echo '[hooks]' >> ../remote/.hg/hgrc
235 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
255 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
236 $ echo r > r
256 $ echo r > r
237 $ hg ci -A -m z r
257 $ hg ci -A -m z r
238
258
239 push should succeed even though it has an unexpected response
259 push should succeed even though it has an unexpected response
240
260
241 $ hg push
261 $ hg push
242 pushing to ssh://user@dummy/remote
262 pushing to ssh://user@dummy/remote
243 searching for changes
263 searching for changes
244 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
264 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
245 remote: adding changesets
265 remote: adding changesets
246 remote: adding manifests
266 remote: adding manifests
247 remote: adding file changes
267 remote: adding file changes
248 remote: added 1 changesets with 1 changes to 1 files
268 remote: added 1 changesets with 1 changes to 1 files
249 remote: KABOOM
269 remote: KABOOM
250 $ hg -R ../remote heads
270 $ hg -R ../remote heads
251 changeset: 3:1383141674ec
271 changeset: 5:1383141674ec
252 tag: tip
272 tag: tip
253 parent: 1:a28a9d1a809c
273 parent: 3:a28a9d1a809c
254 user: test
274 user: test
255 date: Thu Jan 01 00:00:00 1970 +0000
275 date: Thu Jan 01 00:00:00 1970 +0000
256 summary: z
276 summary: z
257
277
258 changeset: 2:6c0482d977a3
278 changeset: 4:6c0482d977a3
259 parent: 0:1160648e36ce
279 parent: 0:1160648e36ce
260 user: test
280 user: test
261 date: Thu Jan 01 00:00:00 1970 +0000
281 date: Thu Jan 01 00:00:00 1970 +0000
262 summary: z
282 summary: z
263
283
264
284
265 clone bookmarks
285 clone bookmarks
266
286
267 $ hg -R ../remote bookmark test
287 $ hg -R ../remote bookmark test
268 $ hg -R ../remote bookmarks
288 $ hg -R ../remote bookmarks
269 * test 2:6c0482d977a3
289 * test 4:6c0482d977a3
270 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
290 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
271 requesting all changes
291 requesting all changes
272 adding changesets
292 adding changesets
273 adding manifests
293 adding manifests
274 adding file changes
294 adding file changes
275 added 4 changesets with 5 changes to 4 files (+1 heads)
295 added 6 changesets with 5 changes to 4 files (+1 heads)
276 updating to branch default
296 updating to branch default
277 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
297 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
278 $ hg -R local-bookmarks bookmarks
298 $ hg -R local-bookmarks bookmarks
279 test 2:6c0482d977a3
299 test 4:6c0482d977a3
280
300
281 passwords in ssh urls are not supported
301 passwords in ssh urls are not supported
282 (we use a glob here because different Python versions give different
302 (we use a glob here because different Python versions give different
283 results here)
303 results here)
284
304
285 $ hg push ssh://user:erroneouspwd@dummy/remote
305 $ hg push ssh://user:erroneouspwd@dummy/remote
286 pushing to ssh://user:*@dummy/remote (glob)
306 pushing to ssh://user:*@dummy/remote (glob)
287 abort: password in URL not supported!
307 abort: password in URL not supported!
288 [255]
308 [255]
289
309
290 $ cd ..
310 $ cd ..
291
311
292 hide outer repo
312 hide outer repo
293 $ hg init
313 $ hg init
294
314
295 Test remote paths with spaces (issue2983):
315 Test remote paths with spaces (issue2983):
296
316
297 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
317 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
298 $ touch "$TESTTMP/a repo/test"
318 $ touch "$TESTTMP/a repo/test"
299 $ hg -R 'a repo' commit -A -m "test"
319 $ hg -R 'a repo' commit -A -m "test"
300 adding test
320 adding test
301 $ hg -R 'a repo' tag tag
321 $ hg -R 'a repo' tag tag
302 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
322 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
303 73649e48688a
323 73649e48688a
304
324
305 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
325 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
306 abort: unknown revision 'noNoNO'!
326 abort: unknown revision 'noNoNO'!
307 [255]
327 [255]
308
328
309 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
329 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
310
330
311 $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
331 $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
312 destination directory: a repo
332 destination directory: a repo
313 abort: destination 'a repo' is not empty
333 abort: destination 'a repo' is not empty
314 [255]
334 [255]
315
335
316 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
336 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
317 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
337 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
318 parameters:
338 parameters:
319
339
320 $ cat > ssh.sh << EOF
340 $ cat > ssh.sh << EOF
321 > userhost="\$1"
341 > userhost="\$1"
322 > SSH_ORIGINAL_COMMAND="\$2"
342 > SSH_ORIGINAL_COMMAND="\$2"
323 > export SSH_ORIGINAL_COMMAND
343 > export SSH_ORIGINAL_COMMAND
324 > PYTHONPATH="$PYTHONPATH"
344 > PYTHONPATH="$PYTHONPATH"
325 > export PYTHONPATH
345 > export PYTHONPATH
326 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
346 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
327 > EOF
347 > EOF
328
348
329 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
349 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
330 73649e48688a
350 73649e48688a
331
351
332 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
352 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
333 remote: Illegal repository "$TESTTMP/a'repo" (glob)
353 remote: Illegal repository "$TESTTMP/a'repo" (glob)
334 abort: no suitable response from remote hg!
354 abort: no suitable response from remote hg!
335 [255]
355 [255]
336
356
337 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
357 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
338 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
358 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
339 abort: no suitable response from remote hg!
359 abort: no suitable response from remote hg!
340 [255]
360 [255]
341
361
342 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
362 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
343 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
363 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
344 [255]
364 [255]
345
365
346 Test hg-ssh in read-only mode:
366 Test hg-ssh in read-only mode:
347
367
348 $ cat > ssh.sh << EOF
368 $ cat > ssh.sh << EOF
349 > userhost="\$1"
369 > userhost="\$1"
350 > SSH_ORIGINAL_COMMAND="\$2"
370 > SSH_ORIGINAL_COMMAND="\$2"
351 > export SSH_ORIGINAL_COMMAND
371 > export SSH_ORIGINAL_COMMAND
352 > PYTHONPATH="$PYTHONPATH"
372 > PYTHONPATH="$PYTHONPATH"
353 > export PYTHONPATH
373 > export PYTHONPATH
354 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
374 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
355 > EOF
375 > EOF
356
376
357 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
377 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
358 requesting all changes
378 requesting all changes
359 adding changesets
379 adding changesets
360 adding manifests
380 adding manifests
361 adding file changes
381 adding file changes
362 added 4 changesets with 5 changes to 4 files (+1 heads)
382 added 6 changesets with 5 changes to 4 files (+1 heads)
363 updating to branch default
383 updating to branch default
364 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
384 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
365
385
366 $ cd read-only-local
386 $ cd read-only-local
367 $ echo "baz" > bar
387 $ echo "baz" > bar
368 $ hg ci -A -m "unpushable commit" bar
388 $ hg ci -A -m "unpushable commit" bar
369 $ hg push --ssh "sh ../ssh.sh"
389 $ hg push --ssh "sh ../ssh.sh"
370 pushing to ssh://user@dummy/*/remote (glob)
390 pushing to ssh://user@dummy/*/remote (glob)
371 searching for changes
391 searching for changes
372 remote: Permission denied
392 remote: Permission denied
373 remote: abort: prechangegroup.hg-ssh hook failed
393 remote: abort: prechangegroup.hg-ssh hook failed
374 remote: Permission denied
394 remote: Permission denied
375 remote: abort: prepushkey.hg-ssh hook failed
395 remote: abort: prepushkey.hg-ssh hook failed
376 abort: unexpected response: empty string
396 abort: unexpected response: empty string
377 [255]
397 [255]
378
398
379 $ cd ..
399 $ cd ..
380
400
381 stderr from remote commands should be printed before stdout from local code (issue4336)
401 stderr from remote commands should be printed before stdout from local code (issue4336)
382
402
383 $ hg clone remote stderr-ordering
403 $ hg clone remote stderr-ordering
384 updating to branch default
404 updating to branch default
385 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
405 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
386 $ cd stderr-ordering
406 $ cd stderr-ordering
387 $ cat >> localwrite.py << EOF
407 $ cat >> localwrite.py << EOF
388 > from mercurial import exchange, extensions
408 > from mercurial import exchange, extensions
389 >
409 >
390 > def wrappedpush(orig, repo, *args, **kwargs):
410 > def wrappedpush(orig, repo, *args, **kwargs):
391 > res = orig(repo, *args, **kwargs)
411 > res = orig(repo, *args, **kwargs)
392 > repo.ui.write('local stdout\n')
412 > repo.ui.write('local stdout\n')
393 > return res
413 > return res
394 >
414 >
395 > def extsetup(ui):
415 > def extsetup(ui):
396 > extensions.wrapfunction(exchange, 'push', wrappedpush)
416 > extensions.wrapfunction(exchange, 'push', wrappedpush)
397 > EOF
417 > EOF
398
418
399 $ cat >> .hg/hgrc << EOF
419 $ cat >> .hg/hgrc << EOF
400 > [paths]
420 > [paths]
401 > default-push = ssh://user@dummy/remote
421 > default-push = ssh://user@dummy/remote
402 > [ui]
422 > [ui]
403 > ssh = python "$TESTDIR/dummyssh"
423 > ssh = python "$TESTDIR/dummyssh"
404 > [extensions]
424 > [extensions]
405 > localwrite = localwrite.py
425 > localwrite = localwrite.py
406 > EOF
426 > EOF
407
427
408 $ echo localwrite > foo
428 $ echo localwrite > foo
409 $ hg commit -m 'testing localwrite'
429 $ hg commit -m 'testing localwrite'
410 $ hg push
430 $ hg push
411 pushing to ssh://user@dummy/remote
431 pushing to ssh://user@dummy/remote
412 searching for changes
432 searching for changes
413 remote: adding changesets
433 remote: adding changesets
414 remote: adding manifests
434 remote: adding manifests
415 remote: adding file changes
435 remote: adding file changes
416 remote: added 1 changesets with 1 changes to 1 files
436 remote: added 1 changesets with 1 changes to 1 files
417 remote: KABOOM
437 remote: KABOOM
418 local stdout
438 local stdout
419
439
420 $ cd ..
440 $ cd ..
421
441
422 $ cat dummylog
442 $ cat dummylog
423 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
443 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
424 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
444 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
425 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
445 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
426 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
446 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
427 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
447 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
428 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
448 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
429 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
449 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
430 Got arguments 1:user@dummy 2:hg -R local serve --stdio
450 Got arguments 1:user@dummy 2:hg -R local serve --stdio
431 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
451 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
432 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
452 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
433 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
453 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
434 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
454 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
435 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
455 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
436 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
456 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
437 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
457 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
438 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
458 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
439 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
459 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
440 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
460 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
441 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
461 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
442 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
462 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
443 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
463 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
444 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
464 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
445 Got arguments 1:user@dummy 2:hg init 'a repo'
465 Got arguments 1:user@dummy 2:hg init 'a repo'
446 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
466 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
447 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
467 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
448 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
468 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
449 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
469 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
450 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
470 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
451 changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
471 changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
General Comments 0
You need to be logged in to leave comments. Login now