##// END OF EJS Templates
clone: fix copying bookmarks in uncompressed clones (issue4430)...
Durham Goode -
r23116:2dc6b791 stable
parent child Browse files
Show More
@@ -1,1792 +1,1793 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import urllib
9 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock as lockmod
12 import lock as lockmod
13 import transaction, store, encoding, exchange, bundle2
13 import transaction, store, encoding, exchange, bundle2
14 import scmutil, util, extensions, hook, error, revset
14 import scmutil, util, extensions, hook, error, revset
15 import match as matchmod
15 import match as matchmod
16 import merge as mergemod
16 import merge as mergemod
17 import tags as tagsmod
17 import tags as tagsmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 import branchmap, pathutil
20 import branchmap, pathutil
21 propertycache = util.propertycache
21 propertycache = util.propertycache
22 filecache = scmutil.filecache
22 filecache = scmutil.filecache
23
23
24 class repofilecache(filecache):
24 class repofilecache(filecache):
25 """All filecache usage on repo are done for logic that should be unfiltered
25 """All filecache usage on repo are done for logic that should be unfiltered
26 """
26 """
27
27
28 def __get__(self, repo, type=None):
28 def __get__(self, repo, type=None):
29 return super(repofilecache, self).__get__(repo.unfiltered(), type)
29 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 def __set__(self, repo, value):
30 def __set__(self, repo, value):
31 return super(repofilecache, self).__set__(repo.unfiltered(), value)
31 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 def __delete__(self, repo):
32 def __delete__(self, repo):
33 return super(repofilecache, self).__delete__(repo.unfiltered())
33 return super(repofilecache, self).__delete__(repo.unfiltered())
34
34
35 class storecache(repofilecache):
35 class storecache(repofilecache):
36 """filecache for files in the store"""
36 """filecache for files in the store"""
37 def join(self, obj, fname):
37 def join(self, obj, fname):
38 return obj.sjoin(fname)
38 return obj.sjoin(fname)
39
39
40 class unfilteredpropertycache(propertycache):
40 class unfilteredpropertycache(propertycache):
41 """propertycache that apply to unfiltered repo only"""
41 """propertycache that apply to unfiltered repo only"""
42
42
43 def __get__(self, repo, type=None):
43 def __get__(self, repo, type=None):
44 unfi = repo.unfiltered()
44 unfi = repo.unfiltered()
45 if unfi is repo:
45 if unfi is repo:
46 return super(unfilteredpropertycache, self).__get__(unfi)
46 return super(unfilteredpropertycache, self).__get__(unfi)
47 return getattr(unfi, self.name)
47 return getattr(unfi, self.name)
48
48
49 class filteredpropertycache(propertycache):
49 class filteredpropertycache(propertycache):
50 """propertycache that must take filtering in account"""
50 """propertycache that must take filtering in account"""
51
51
52 def cachevalue(self, obj, value):
52 def cachevalue(self, obj, value):
53 object.__setattr__(obj, self.name, value)
53 object.__setattr__(obj, self.name, value)
54
54
55
55
56 def hasunfilteredcache(repo, name):
56 def hasunfilteredcache(repo, name):
57 """check if a repo has an unfilteredpropertycache value for <name>"""
57 """check if a repo has an unfilteredpropertycache value for <name>"""
58 return name in vars(repo.unfiltered())
58 return name in vars(repo.unfiltered())
59
59
60 def unfilteredmethod(orig):
60 def unfilteredmethod(orig):
61 """decorate method that always need to be run on unfiltered version"""
61 """decorate method that always need to be run on unfiltered version"""
62 def wrapper(repo, *args, **kwargs):
62 def wrapper(repo, *args, **kwargs):
63 return orig(repo.unfiltered(), *args, **kwargs)
63 return orig(repo.unfiltered(), *args, **kwargs)
64 return wrapper
64 return wrapper
65
65
66 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
66 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 'unbundle'))
67 'unbundle'))
68 legacycaps = moderncaps.union(set(['changegroupsubset']))
68 legacycaps = moderncaps.union(set(['changegroupsubset']))
69
69
70 class localpeer(peer.peerrepository):
70 class localpeer(peer.peerrepository):
71 '''peer for a local repo; reflects only the most recent API'''
71 '''peer for a local repo; reflects only the most recent API'''
72
72
73 def __init__(self, repo, caps=moderncaps):
73 def __init__(self, repo, caps=moderncaps):
74 peer.peerrepository.__init__(self)
74 peer.peerrepository.__init__(self)
75 self._repo = repo.filtered('served')
75 self._repo = repo.filtered('served')
76 self.ui = repo.ui
76 self.ui = repo.ui
77 self._caps = repo._restrictcapabilities(caps)
77 self._caps = repo._restrictcapabilities(caps)
78 self.requirements = repo.requirements
78 self.requirements = repo.requirements
79 self.supportedformats = repo.supportedformats
79 self.supportedformats = repo.supportedformats
80
80
81 def close(self):
81 def close(self):
82 self._repo.close()
82 self._repo.close()
83
83
84 def _capabilities(self):
84 def _capabilities(self):
85 return self._caps
85 return self._caps
86
86
87 def local(self):
87 def local(self):
88 return self._repo
88 return self._repo
89
89
90 def canpush(self):
90 def canpush(self):
91 return True
91 return True
92
92
93 def url(self):
93 def url(self):
94 return self._repo.url()
94 return self._repo.url()
95
95
96 def lookup(self, key):
96 def lookup(self, key):
97 return self._repo.lookup(key)
97 return self._repo.lookup(key)
98
98
99 def branchmap(self):
99 def branchmap(self):
100 return self._repo.branchmap()
100 return self._repo.branchmap()
101
101
102 def heads(self):
102 def heads(self):
103 return self._repo.heads()
103 return self._repo.heads()
104
104
105 def known(self, nodes):
105 def known(self, nodes):
106 return self._repo.known(nodes)
106 return self._repo.known(nodes)
107
107
108 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
108 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 format='HG10', **kwargs):
109 format='HG10', **kwargs):
110 cg = exchange.getbundle(self._repo, source, heads=heads,
110 cg = exchange.getbundle(self._repo, source, heads=heads,
111 common=common, bundlecaps=bundlecaps, **kwargs)
111 common=common, bundlecaps=bundlecaps, **kwargs)
112 if bundlecaps is not None and 'HG2Y' in bundlecaps:
112 if bundlecaps is not None and 'HG2Y' in bundlecaps:
113 # When requesting a bundle2, getbundle returns a stream to make the
113 # When requesting a bundle2, getbundle returns a stream to make the
114 # wire level function happier. We need to build a proper object
114 # wire level function happier. We need to build a proper object
115 # from it in local peer.
115 # from it in local peer.
116 cg = bundle2.unbundle20(self.ui, cg)
116 cg = bundle2.unbundle20(self.ui, cg)
117 return cg
117 return cg
118
118
119 # TODO We might want to move the next two calls into legacypeer and add
119 # TODO We might want to move the next two calls into legacypeer and add
120 # unbundle instead.
120 # unbundle instead.
121
121
122 def unbundle(self, cg, heads, url):
122 def unbundle(self, cg, heads, url):
123 """apply a bundle on a repo
123 """apply a bundle on a repo
124
124
125 This function handles the repo locking itself."""
125 This function handles the repo locking itself."""
126 try:
126 try:
127 cg = exchange.readbundle(self.ui, cg, None)
127 cg = exchange.readbundle(self.ui, cg, None)
128 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
128 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
129 if util.safehasattr(ret, 'getchunks'):
129 if util.safehasattr(ret, 'getchunks'):
130 # This is a bundle20 object, turn it into an unbundler.
130 # This is a bundle20 object, turn it into an unbundler.
131 # This little dance should be dropped eventually when the API
131 # This little dance should be dropped eventually when the API
132 # is finally improved.
132 # is finally improved.
133 stream = util.chunkbuffer(ret.getchunks())
133 stream = util.chunkbuffer(ret.getchunks())
134 ret = bundle2.unbundle20(self.ui, stream)
134 ret = bundle2.unbundle20(self.ui, stream)
135 return ret
135 return ret
136 except error.PushRaced, exc:
136 except error.PushRaced, exc:
137 raise error.ResponseError(_('push failed:'), str(exc))
137 raise error.ResponseError(_('push failed:'), str(exc))
138
138
139 def lock(self):
139 def lock(self):
140 return self._repo.lock()
140 return self._repo.lock()
141
141
142 def addchangegroup(self, cg, source, url):
142 def addchangegroup(self, cg, source, url):
143 return changegroup.addchangegroup(self._repo, cg, source, url)
143 return changegroup.addchangegroup(self._repo, cg, source, url)
144
144
145 def pushkey(self, namespace, key, old, new):
145 def pushkey(self, namespace, key, old, new):
146 return self._repo.pushkey(namespace, key, old, new)
146 return self._repo.pushkey(namespace, key, old, new)
147
147
148 def listkeys(self, namespace):
148 def listkeys(self, namespace):
149 return self._repo.listkeys(namespace)
149 return self._repo.listkeys(namespace)
150
150
151 def debugwireargs(self, one, two, three=None, four=None, five=None):
151 def debugwireargs(self, one, two, three=None, four=None, five=None):
152 '''used to test argument passing over the wire'''
152 '''used to test argument passing over the wire'''
153 return "%s %s %s %s %s" % (one, two, three, four, five)
153 return "%s %s %s %s %s" % (one, two, three, four, five)
154
154
155 class locallegacypeer(localpeer):
155 class locallegacypeer(localpeer):
156 '''peer extension which implements legacy methods too; used for tests with
156 '''peer extension which implements legacy methods too; used for tests with
157 restricted capabilities'''
157 restricted capabilities'''
158
158
159 def __init__(self, repo):
159 def __init__(self, repo):
160 localpeer.__init__(self, repo, caps=legacycaps)
160 localpeer.__init__(self, repo, caps=legacycaps)
161
161
162 def branches(self, nodes):
162 def branches(self, nodes):
163 return self._repo.branches(nodes)
163 return self._repo.branches(nodes)
164
164
165 def between(self, pairs):
165 def between(self, pairs):
166 return self._repo.between(pairs)
166 return self._repo.between(pairs)
167
167
168 def changegroup(self, basenodes, source):
168 def changegroup(self, basenodes, source):
169 return changegroup.changegroup(self._repo, basenodes, source)
169 return changegroup.changegroup(self._repo, basenodes, source)
170
170
171 def changegroupsubset(self, bases, heads, source):
171 def changegroupsubset(self, bases, heads, source):
172 return changegroup.changegroupsubset(self._repo, bases, heads, source)
172 return changegroup.changegroupsubset(self._repo, bases, heads, source)
173
173
174 class localrepository(object):
174 class localrepository(object):
175
175
176 supportedformats = set(('revlogv1', 'generaldelta'))
176 supportedformats = set(('revlogv1', 'generaldelta'))
177 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
177 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
178 'dotencode'))
178 'dotencode'))
179 openerreqs = set(('revlogv1', 'generaldelta'))
179 openerreqs = set(('revlogv1', 'generaldelta'))
180 requirements = ['revlogv1']
180 requirements = ['revlogv1']
181 filtername = None
181 filtername = None
182
182
183 # a list of (ui, featureset) functions.
183 # a list of (ui, featureset) functions.
184 # only functions defined in module of enabled extensions are invoked
184 # only functions defined in module of enabled extensions are invoked
185 featuresetupfuncs = set()
185 featuresetupfuncs = set()
186
186
187 def _baserequirements(self, create):
187 def _baserequirements(self, create):
188 return self.requirements[:]
188 return self.requirements[:]
189
189
190 def __init__(self, baseui, path=None, create=False):
190 def __init__(self, baseui, path=None, create=False):
191 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
191 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
192 self.wopener = self.wvfs
192 self.wopener = self.wvfs
193 self.root = self.wvfs.base
193 self.root = self.wvfs.base
194 self.path = self.wvfs.join(".hg")
194 self.path = self.wvfs.join(".hg")
195 self.origroot = path
195 self.origroot = path
196 self.auditor = pathutil.pathauditor(self.root, self._checknested)
196 self.auditor = pathutil.pathauditor(self.root, self._checknested)
197 self.vfs = scmutil.vfs(self.path)
197 self.vfs = scmutil.vfs(self.path)
198 self.opener = self.vfs
198 self.opener = self.vfs
199 self.baseui = baseui
199 self.baseui = baseui
200 self.ui = baseui.copy()
200 self.ui = baseui.copy()
201 self.ui.copy = baseui.copy # prevent copying repo configuration
201 self.ui.copy = baseui.copy # prevent copying repo configuration
202 # A list of callback to shape the phase if no data were found.
202 # A list of callback to shape the phase if no data were found.
203 # Callback are in the form: func(repo, roots) --> processed root.
203 # Callback are in the form: func(repo, roots) --> processed root.
204 # This list it to be filled by extension during repo setup
204 # This list it to be filled by extension during repo setup
205 self._phasedefaults = []
205 self._phasedefaults = []
206 try:
206 try:
207 self.ui.readconfig(self.join("hgrc"), self.root)
207 self.ui.readconfig(self.join("hgrc"), self.root)
208 extensions.loadall(self.ui)
208 extensions.loadall(self.ui)
209 except IOError:
209 except IOError:
210 pass
210 pass
211
211
212 if self.featuresetupfuncs:
212 if self.featuresetupfuncs:
213 self.supported = set(self._basesupported) # use private copy
213 self.supported = set(self._basesupported) # use private copy
214 extmods = set(m.__name__ for n, m
214 extmods = set(m.__name__ for n, m
215 in extensions.extensions(self.ui))
215 in extensions.extensions(self.ui))
216 for setupfunc in self.featuresetupfuncs:
216 for setupfunc in self.featuresetupfuncs:
217 if setupfunc.__module__ in extmods:
217 if setupfunc.__module__ in extmods:
218 setupfunc(self.ui, self.supported)
218 setupfunc(self.ui, self.supported)
219 else:
219 else:
220 self.supported = self._basesupported
220 self.supported = self._basesupported
221
221
222 if not self.vfs.isdir():
222 if not self.vfs.isdir():
223 if create:
223 if create:
224 if not self.wvfs.exists():
224 if not self.wvfs.exists():
225 self.wvfs.makedirs()
225 self.wvfs.makedirs()
226 self.vfs.makedir(notindexed=True)
226 self.vfs.makedir(notindexed=True)
227 requirements = self._baserequirements(create)
227 requirements = self._baserequirements(create)
228 if self.ui.configbool('format', 'usestore', True):
228 if self.ui.configbool('format', 'usestore', True):
229 self.vfs.mkdir("store")
229 self.vfs.mkdir("store")
230 requirements.append("store")
230 requirements.append("store")
231 if self.ui.configbool('format', 'usefncache', True):
231 if self.ui.configbool('format', 'usefncache', True):
232 requirements.append("fncache")
232 requirements.append("fncache")
233 if self.ui.configbool('format', 'dotencode', True):
233 if self.ui.configbool('format', 'dotencode', True):
234 requirements.append('dotencode')
234 requirements.append('dotencode')
235 # create an invalid changelog
235 # create an invalid changelog
236 self.vfs.append(
236 self.vfs.append(
237 "00changelog.i",
237 "00changelog.i",
238 '\0\0\0\2' # represents revlogv2
238 '\0\0\0\2' # represents revlogv2
239 ' dummy changelog to prevent using the old repo layout'
239 ' dummy changelog to prevent using the old repo layout'
240 )
240 )
241 if self.ui.configbool('format', 'generaldelta', False):
241 if self.ui.configbool('format', 'generaldelta', False):
242 requirements.append("generaldelta")
242 requirements.append("generaldelta")
243 requirements = set(requirements)
243 requirements = set(requirements)
244 else:
244 else:
245 raise error.RepoError(_("repository %s not found") % path)
245 raise error.RepoError(_("repository %s not found") % path)
246 elif create:
246 elif create:
247 raise error.RepoError(_("repository %s already exists") % path)
247 raise error.RepoError(_("repository %s already exists") % path)
248 else:
248 else:
249 try:
249 try:
250 requirements = scmutil.readrequires(self.vfs, self.supported)
250 requirements = scmutil.readrequires(self.vfs, self.supported)
251 except IOError, inst:
251 except IOError, inst:
252 if inst.errno != errno.ENOENT:
252 if inst.errno != errno.ENOENT:
253 raise
253 raise
254 requirements = set()
254 requirements = set()
255
255
256 self.sharedpath = self.path
256 self.sharedpath = self.path
257 try:
257 try:
258 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
258 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
259 realpath=True)
259 realpath=True)
260 s = vfs.base
260 s = vfs.base
261 if not vfs.exists():
261 if not vfs.exists():
262 raise error.RepoError(
262 raise error.RepoError(
263 _('.hg/sharedpath points to nonexistent directory %s') % s)
263 _('.hg/sharedpath points to nonexistent directory %s') % s)
264 self.sharedpath = s
264 self.sharedpath = s
265 except IOError, inst:
265 except IOError, inst:
266 if inst.errno != errno.ENOENT:
266 if inst.errno != errno.ENOENT:
267 raise
267 raise
268
268
269 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
269 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
270 self.spath = self.store.path
270 self.spath = self.store.path
271 self.svfs = self.store.vfs
271 self.svfs = self.store.vfs
272 self.sopener = self.svfs
272 self.sopener = self.svfs
273 self.sjoin = self.store.join
273 self.sjoin = self.store.join
274 self.vfs.createmode = self.store.createmode
274 self.vfs.createmode = self.store.createmode
275 self._applyrequirements(requirements)
275 self._applyrequirements(requirements)
276 if create:
276 if create:
277 self._writerequirements()
277 self._writerequirements()
278
278
279
279
280 self._branchcaches = {}
280 self._branchcaches = {}
281 self.filterpats = {}
281 self.filterpats = {}
282 self._datafilters = {}
282 self._datafilters = {}
283 self._transref = self._lockref = self._wlockref = None
283 self._transref = self._lockref = self._wlockref = None
284
284
285 # A cache for various files under .hg/ that tracks file changes,
285 # A cache for various files under .hg/ that tracks file changes,
286 # (used by the filecache decorator)
286 # (used by the filecache decorator)
287 #
287 #
288 # Maps a property name to its util.filecacheentry
288 # Maps a property name to its util.filecacheentry
289 self._filecache = {}
289 self._filecache = {}
290
290
291 # hold sets of revision to be filtered
291 # hold sets of revision to be filtered
292 # should be cleared when something might have changed the filter value:
292 # should be cleared when something might have changed the filter value:
293 # - new changesets,
293 # - new changesets,
294 # - phase change,
294 # - phase change,
295 # - new obsolescence marker,
295 # - new obsolescence marker,
296 # - working directory parent change,
296 # - working directory parent change,
297 # - bookmark changes
297 # - bookmark changes
298 self.filteredrevcache = {}
298 self.filteredrevcache = {}
299
299
300 def close(self):
300 def close(self):
301 pass
301 pass
302
302
303 def _restrictcapabilities(self, caps):
303 def _restrictcapabilities(self, caps):
304 # bundle2 is not ready for prime time, drop it unless explicitly
304 # bundle2 is not ready for prime time, drop it unless explicitly
305 # required by the tests (or some brave tester)
305 # required by the tests (or some brave tester)
306 if self.ui.configbool('experimental', 'bundle2-exp', False):
306 if self.ui.configbool('experimental', 'bundle2-exp', False):
307 caps = set(caps)
307 caps = set(caps)
308 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
308 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
309 caps.add('bundle2-exp=' + urllib.quote(capsblob))
309 caps.add('bundle2-exp=' + urllib.quote(capsblob))
310 return caps
310 return caps
311
311
312 def _applyrequirements(self, requirements):
312 def _applyrequirements(self, requirements):
313 self.requirements = requirements
313 self.requirements = requirements
314 self.sopener.options = dict((r, 1) for r in requirements
314 self.sopener.options = dict((r, 1) for r in requirements
315 if r in self.openerreqs)
315 if r in self.openerreqs)
316 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
316 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
317 if chunkcachesize is not None:
317 if chunkcachesize is not None:
318 self.sopener.options['chunkcachesize'] = chunkcachesize
318 self.sopener.options['chunkcachesize'] = chunkcachesize
319
319
320 def _writerequirements(self):
320 def _writerequirements(self):
321 reqfile = self.opener("requires", "w")
321 reqfile = self.opener("requires", "w")
322 for r in sorted(self.requirements):
322 for r in sorted(self.requirements):
323 reqfile.write("%s\n" % r)
323 reqfile.write("%s\n" % r)
324 reqfile.close()
324 reqfile.close()
325
325
326 def _checknested(self, path):
326 def _checknested(self, path):
327 """Determine if path is a legal nested repository."""
327 """Determine if path is a legal nested repository."""
328 if not path.startswith(self.root):
328 if not path.startswith(self.root):
329 return False
329 return False
330 subpath = path[len(self.root) + 1:]
330 subpath = path[len(self.root) + 1:]
331 normsubpath = util.pconvert(subpath)
331 normsubpath = util.pconvert(subpath)
332
332
333 # XXX: Checking against the current working copy is wrong in
333 # XXX: Checking against the current working copy is wrong in
334 # the sense that it can reject things like
334 # the sense that it can reject things like
335 #
335 #
336 # $ hg cat -r 10 sub/x.txt
336 # $ hg cat -r 10 sub/x.txt
337 #
337 #
338 # if sub/ is no longer a subrepository in the working copy
338 # if sub/ is no longer a subrepository in the working copy
339 # parent revision.
339 # parent revision.
340 #
340 #
341 # However, it can of course also allow things that would have
341 # However, it can of course also allow things that would have
342 # been rejected before, such as the above cat command if sub/
342 # been rejected before, such as the above cat command if sub/
343 # is a subrepository now, but was a normal directory before.
343 # is a subrepository now, but was a normal directory before.
344 # The old path auditor would have rejected by mistake since it
344 # The old path auditor would have rejected by mistake since it
345 # panics when it sees sub/.hg/.
345 # panics when it sees sub/.hg/.
346 #
346 #
347 # All in all, checking against the working copy seems sensible
347 # All in all, checking against the working copy seems sensible
348 # since we want to prevent access to nested repositories on
348 # since we want to prevent access to nested repositories on
349 # the filesystem *now*.
349 # the filesystem *now*.
350 ctx = self[None]
350 ctx = self[None]
351 parts = util.splitpath(subpath)
351 parts = util.splitpath(subpath)
352 while parts:
352 while parts:
353 prefix = '/'.join(parts)
353 prefix = '/'.join(parts)
354 if prefix in ctx.substate:
354 if prefix in ctx.substate:
355 if prefix == normsubpath:
355 if prefix == normsubpath:
356 return True
356 return True
357 else:
357 else:
358 sub = ctx.sub(prefix)
358 sub = ctx.sub(prefix)
359 return sub.checknested(subpath[len(prefix) + 1:])
359 return sub.checknested(subpath[len(prefix) + 1:])
360 else:
360 else:
361 parts.pop()
361 parts.pop()
362 return False
362 return False
363
363
364 def peer(self):
364 def peer(self):
365 return localpeer(self) # not cached to avoid reference cycle
365 return localpeer(self) # not cached to avoid reference cycle
366
366
367 def unfiltered(self):
367 def unfiltered(self):
368 """Return unfiltered version of the repository
368 """Return unfiltered version of the repository
369
369
370 Intended to be overwritten by filtered repo."""
370 Intended to be overwritten by filtered repo."""
371 return self
371 return self
372
372
373 def filtered(self, name):
373 def filtered(self, name):
374 """Return a filtered version of a repository"""
374 """Return a filtered version of a repository"""
375 # build a new class with the mixin and the current class
375 # build a new class with the mixin and the current class
376 # (possibly subclass of the repo)
376 # (possibly subclass of the repo)
377 class proxycls(repoview.repoview, self.unfiltered().__class__):
377 class proxycls(repoview.repoview, self.unfiltered().__class__):
378 pass
378 pass
379 return proxycls(self, name)
379 return proxycls(self, name)
380
380
381 @repofilecache('bookmarks')
381 @repofilecache('bookmarks')
382 def _bookmarks(self):
382 def _bookmarks(self):
383 return bookmarks.bmstore(self)
383 return bookmarks.bmstore(self)
384
384
385 @repofilecache('bookmarks.current')
385 @repofilecache('bookmarks.current')
386 def _bookmarkcurrent(self):
386 def _bookmarkcurrent(self):
387 return bookmarks.readcurrent(self)
387 return bookmarks.readcurrent(self)
388
388
389 def bookmarkheads(self, bookmark):
389 def bookmarkheads(self, bookmark):
390 name = bookmark.split('@', 1)[0]
390 name = bookmark.split('@', 1)[0]
391 heads = []
391 heads = []
392 for mark, n in self._bookmarks.iteritems():
392 for mark, n in self._bookmarks.iteritems():
393 if mark.split('@', 1)[0] == name:
393 if mark.split('@', 1)[0] == name:
394 heads.append(n)
394 heads.append(n)
395 return heads
395 return heads
396
396
397 @storecache('phaseroots')
397 @storecache('phaseroots')
398 def _phasecache(self):
398 def _phasecache(self):
399 return phases.phasecache(self, self._phasedefaults)
399 return phases.phasecache(self, self._phasedefaults)
400
400
401 @storecache('obsstore')
401 @storecache('obsstore')
402 def obsstore(self):
402 def obsstore(self):
403 # read default format for new obsstore.
403 # read default format for new obsstore.
404 defaultformat = self.ui.configint('format', 'obsstore-version', None)
404 defaultformat = self.ui.configint('format', 'obsstore-version', None)
405 # rely on obsstore class default when possible.
405 # rely on obsstore class default when possible.
406 kwargs = {}
406 kwargs = {}
407 if defaultformat is not None:
407 if defaultformat is not None:
408 kwargs['defaultformat'] = defaultformat
408 kwargs['defaultformat'] = defaultformat
409 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
409 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
410 store = obsolete.obsstore(self.sopener, readonly=readonly,
410 store = obsolete.obsstore(self.sopener, readonly=readonly,
411 **kwargs)
411 **kwargs)
412 if store and readonly:
412 if store and readonly:
413 # message is rare enough to not be translated
413 # message is rare enough to not be translated
414 msg = 'obsolete feature not enabled but %i markers found!\n'
414 msg = 'obsolete feature not enabled but %i markers found!\n'
415 self.ui.warn(msg % len(list(store)))
415 self.ui.warn(msg % len(list(store)))
416 return store
416 return store
417
417
418 @storecache('00changelog.i')
418 @storecache('00changelog.i')
419 def changelog(self):
419 def changelog(self):
420 c = changelog.changelog(self.sopener)
420 c = changelog.changelog(self.sopener)
421 if 'HG_PENDING' in os.environ:
421 if 'HG_PENDING' in os.environ:
422 p = os.environ['HG_PENDING']
422 p = os.environ['HG_PENDING']
423 if p.startswith(self.root):
423 if p.startswith(self.root):
424 c.readpending('00changelog.i.a')
424 c.readpending('00changelog.i.a')
425 return c
425 return c
426
426
427 @storecache('00manifest.i')
427 @storecache('00manifest.i')
428 def manifest(self):
428 def manifest(self):
429 return manifest.manifest(self.sopener)
429 return manifest.manifest(self.sopener)
430
430
431 @repofilecache('dirstate')
431 @repofilecache('dirstate')
432 def dirstate(self):
432 def dirstate(self):
433 warned = [0]
433 warned = [0]
434 def validate(node):
434 def validate(node):
435 try:
435 try:
436 self.changelog.rev(node)
436 self.changelog.rev(node)
437 return node
437 return node
438 except error.LookupError:
438 except error.LookupError:
439 if not warned[0]:
439 if not warned[0]:
440 warned[0] = True
440 warned[0] = True
441 self.ui.warn(_("warning: ignoring unknown"
441 self.ui.warn(_("warning: ignoring unknown"
442 " working parent %s!\n") % short(node))
442 " working parent %s!\n") % short(node))
443 return nullid
443 return nullid
444
444
445 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
445 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
446
446
447 def __getitem__(self, changeid):
447 def __getitem__(self, changeid):
448 if changeid is None:
448 if changeid is None:
449 return context.workingctx(self)
449 return context.workingctx(self)
450 return context.changectx(self, changeid)
450 return context.changectx(self, changeid)
451
451
452 def __contains__(self, changeid):
452 def __contains__(self, changeid):
453 try:
453 try:
454 return bool(self.lookup(changeid))
454 return bool(self.lookup(changeid))
455 except error.RepoLookupError:
455 except error.RepoLookupError:
456 return False
456 return False
457
457
458 def __nonzero__(self):
458 def __nonzero__(self):
459 return True
459 return True
460
460
461 def __len__(self):
461 def __len__(self):
462 return len(self.changelog)
462 return len(self.changelog)
463
463
464 def __iter__(self):
464 def __iter__(self):
465 return iter(self.changelog)
465 return iter(self.changelog)
466
466
467 def revs(self, expr, *args):
467 def revs(self, expr, *args):
468 '''Return a list of revisions matching the given revset'''
468 '''Return a list of revisions matching the given revset'''
469 expr = revset.formatspec(expr, *args)
469 expr = revset.formatspec(expr, *args)
470 m = revset.match(None, expr)
470 m = revset.match(None, expr)
471 return m(self, revset.spanset(self))
471 return m(self, revset.spanset(self))
472
472
473 def set(self, expr, *args):
473 def set(self, expr, *args):
474 '''
474 '''
475 Yield a context for each matching revision, after doing arg
475 Yield a context for each matching revision, after doing arg
476 replacement via revset.formatspec
476 replacement via revset.formatspec
477 '''
477 '''
478 for r in self.revs(expr, *args):
478 for r in self.revs(expr, *args):
479 yield self[r]
479 yield self[r]
480
480
481 def url(self):
481 def url(self):
482 return 'file:' + self.root
482 return 'file:' + self.root
483
483
484 def hook(self, name, throw=False, **args):
484 def hook(self, name, throw=False, **args):
485 """Call a hook, passing this repo instance.
485 """Call a hook, passing this repo instance.
486
486
487 This a convenience method to aid invoking hooks. Extensions likely
487 This a convenience method to aid invoking hooks. Extensions likely
488 won't call this unless they have registered a custom hook or are
488 won't call this unless they have registered a custom hook or are
489 replacing code that is expected to call a hook.
489 replacing code that is expected to call a hook.
490 """
490 """
491 return hook.hook(self.ui, self, name, throw, **args)
491 return hook.hook(self.ui, self, name, throw, **args)
492
492
493 @unfilteredmethod
493 @unfilteredmethod
494 def _tag(self, names, node, message, local, user, date, extra={},
494 def _tag(self, names, node, message, local, user, date, extra={},
495 editor=False):
495 editor=False):
496 if isinstance(names, str):
496 if isinstance(names, str):
497 names = (names,)
497 names = (names,)
498
498
499 branches = self.branchmap()
499 branches = self.branchmap()
500 for name in names:
500 for name in names:
501 self.hook('pretag', throw=True, node=hex(node), tag=name,
501 self.hook('pretag', throw=True, node=hex(node), tag=name,
502 local=local)
502 local=local)
503 if name in branches:
503 if name in branches:
504 self.ui.warn(_("warning: tag %s conflicts with existing"
504 self.ui.warn(_("warning: tag %s conflicts with existing"
505 " branch name\n") % name)
505 " branch name\n") % name)
506
506
507 def writetags(fp, names, munge, prevtags):
507 def writetags(fp, names, munge, prevtags):
508 fp.seek(0, 2)
508 fp.seek(0, 2)
509 if prevtags and prevtags[-1] != '\n':
509 if prevtags and prevtags[-1] != '\n':
510 fp.write('\n')
510 fp.write('\n')
511 for name in names:
511 for name in names:
512 m = munge and munge(name) or name
512 m = munge and munge(name) or name
513 if (self._tagscache.tagtypes and
513 if (self._tagscache.tagtypes and
514 name in self._tagscache.tagtypes):
514 name in self._tagscache.tagtypes):
515 old = self.tags().get(name, nullid)
515 old = self.tags().get(name, nullid)
516 fp.write('%s %s\n' % (hex(old), m))
516 fp.write('%s %s\n' % (hex(old), m))
517 fp.write('%s %s\n' % (hex(node), m))
517 fp.write('%s %s\n' % (hex(node), m))
518 fp.close()
518 fp.close()
519
519
520 prevtags = ''
520 prevtags = ''
521 if local:
521 if local:
522 try:
522 try:
523 fp = self.opener('localtags', 'r+')
523 fp = self.opener('localtags', 'r+')
524 except IOError:
524 except IOError:
525 fp = self.opener('localtags', 'a')
525 fp = self.opener('localtags', 'a')
526 else:
526 else:
527 prevtags = fp.read()
527 prevtags = fp.read()
528
528
529 # local tags are stored in the current charset
529 # local tags are stored in the current charset
530 writetags(fp, names, None, prevtags)
530 writetags(fp, names, None, prevtags)
531 for name in names:
531 for name in names:
532 self.hook('tag', node=hex(node), tag=name, local=local)
532 self.hook('tag', node=hex(node), tag=name, local=local)
533 return
533 return
534
534
535 try:
535 try:
536 fp = self.wfile('.hgtags', 'rb+')
536 fp = self.wfile('.hgtags', 'rb+')
537 except IOError, e:
537 except IOError, e:
538 if e.errno != errno.ENOENT:
538 if e.errno != errno.ENOENT:
539 raise
539 raise
540 fp = self.wfile('.hgtags', 'ab')
540 fp = self.wfile('.hgtags', 'ab')
541 else:
541 else:
542 prevtags = fp.read()
542 prevtags = fp.read()
543
543
544 # committed tags are stored in UTF-8
544 # committed tags are stored in UTF-8
545 writetags(fp, names, encoding.fromlocal, prevtags)
545 writetags(fp, names, encoding.fromlocal, prevtags)
546
546
547 fp.close()
547 fp.close()
548
548
549 self.invalidatecaches()
549 self.invalidatecaches()
550
550
551 if '.hgtags' not in self.dirstate:
551 if '.hgtags' not in self.dirstate:
552 self[None].add(['.hgtags'])
552 self[None].add(['.hgtags'])
553
553
554 m = matchmod.exact(self.root, '', ['.hgtags'])
554 m = matchmod.exact(self.root, '', ['.hgtags'])
555 tagnode = self.commit(message, user, date, extra=extra, match=m,
555 tagnode = self.commit(message, user, date, extra=extra, match=m,
556 editor=editor)
556 editor=editor)
557
557
558 for name in names:
558 for name in names:
559 self.hook('tag', node=hex(node), tag=name, local=local)
559 self.hook('tag', node=hex(node), tag=name, local=local)
560
560
561 return tagnode
561 return tagnode
562
562
563 def tag(self, names, node, message, local, user, date, editor=False):
563 def tag(self, names, node, message, local, user, date, editor=False):
564 '''tag a revision with one or more symbolic names.
564 '''tag a revision with one or more symbolic names.
565
565
566 names is a list of strings or, when adding a single tag, names may be a
566 names is a list of strings or, when adding a single tag, names may be a
567 string.
567 string.
568
568
569 if local is True, the tags are stored in a per-repository file.
569 if local is True, the tags are stored in a per-repository file.
570 otherwise, they are stored in the .hgtags file, and a new
570 otherwise, they are stored in the .hgtags file, and a new
571 changeset is committed with the change.
571 changeset is committed with the change.
572
572
573 keyword arguments:
573 keyword arguments:
574
574
575 local: whether to store tags in non-version-controlled file
575 local: whether to store tags in non-version-controlled file
576 (default False)
576 (default False)
577
577
578 message: commit message to use if committing
578 message: commit message to use if committing
579
579
580 user: name of user to use if committing
580 user: name of user to use if committing
581
581
582 date: date tuple to use if committing'''
582 date: date tuple to use if committing'''
583
583
584 if not local:
584 if not local:
585 m = matchmod.exact(self.root, '', ['.hgtags'])
585 m = matchmod.exact(self.root, '', ['.hgtags'])
586 if util.any(self.status(match=m, unknown=True, ignored=True)):
586 if util.any(self.status(match=m, unknown=True, ignored=True)):
587 raise util.Abort(_('working copy of .hgtags is changed'),
587 raise util.Abort(_('working copy of .hgtags is changed'),
588 hint=_('please commit .hgtags manually'))
588 hint=_('please commit .hgtags manually'))
589
589
590 self.tags() # instantiate the cache
590 self.tags() # instantiate the cache
591 self._tag(names, node, message, local, user, date, editor=editor)
591 self._tag(names, node, message, local, user, date, editor=editor)
592
592
593 @filteredpropertycache
593 @filteredpropertycache
594 def _tagscache(self):
594 def _tagscache(self):
595 '''Returns a tagscache object that contains various tags related
595 '''Returns a tagscache object that contains various tags related
596 caches.'''
596 caches.'''
597
597
598 # This simplifies its cache management by having one decorated
598 # This simplifies its cache management by having one decorated
599 # function (this one) and the rest simply fetch things from it.
599 # function (this one) and the rest simply fetch things from it.
600 class tagscache(object):
600 class tagscache(object):
601 def __init__(self):
601 def __init__(self):
602 # These two define the set of tags for this repository. tags
602 # These two define the set of tags for this repository. tags
603 # maps tag name to node; tagtypes maps tag name to 'global' or
603 # maps tag name to node; tagtypes maps tag name to 'global' or
604 # 'local'. (Global tags are defined by .hgtags across all
604 # 'local'. (Global tags are defined by .hgtags across all
605 # heads, and local tags are defined in .hg/localtags.)
605 # heads, and local tags are defined in .hg/localtags.)
606 # They constitute the in-memory cache of tags.
606 # They constitute the in-memory cache of tags.
607 self.tags = self.tagtypes = None
607 self.tags = self.tagtypes = None
608
608
609 self.nodetagscache = self.tagslist = None
609 self.nodetagscache = self.tagslist = None
610
610
611 cache = tagscache()
611 cache = tagscache()
612 cache.tags, cache.tagtypes = self._findtags()
612 cache.tags, cache.tagtypes = self._findtags()
613
613
614 return cache
614 return cache
615
615
616 def tags(self):
616 def tags(self):
617 '''return a mapping of tag to node'''
617 '''return a mapping of tag to node'''
618 t = {}
618 t = {}
619 if self.changelog.filteredrevs:
619 if self.changelog.filteredrevs:
620 tags, tt = self._findtags()
620 tags, tt = self._findtags()
621 else:
621 else:
622 tags = self._tagscache.tags
622 tags = self._tagscache.tags
623 for k, v in tags.iteritems():
623 for k, v in tags.iteritems():
624 try:
624 try:
625 # ignore tags to unknown nodes
625 # ignore tags to unknown nodes
626 self.changelog.rev(v)
626 self.changelog.rev(v)
627 t[k] = v
627 t[k] = v
628 except (error.LookupError, ValueError):
628 except (error.LookupError, ValueError):
629 pass
629 pass
630 return t
630 return t
631
631
632 def _findtags(self):
632 def _findtags(self):
633 '''Do the hard work of finding tags. Return a pair of dicts
633 '''Do the hard work of finding tags. Return a pair of dicts
634 (tags, tagtypes) where tags maps tag name to node, and tagtypes
634 (tags, tagtypes) where tags maps tag name to node, and tagtypes
635 maps tag name to a string like \'global\' or \'local\'.
635 maps tag name to a string like \'global\' or \'local\'.
636 Subclasses or extensions are free to add their own tags, but
636 Subclasses or extensions are free to add their own tags, but
637 should be aware that the returned dicts will be retained for the
637 should be aware that the returned dicts will be retained for the
638 duration of the localrepo object.'''
638 duration of the localrepo object.'''
639
639
640 # XXX what tagtype should subclasses/extensions use? Currently
640 # XXX what tagtype should subclasses/extensions use? Currently
641 # mq and bookmarks add tags, but do not set the tagtype at all.
641 # mq and bookmarks add tags, but do not set the tagtype at all.
642 # Should each extension invent its own tag type? Should there
642 # Should each extension invent its own tag type? Should there
643 # be one tagtype for all such "virtual" tags? Or is the status
643 # be one tagtype for all such "virtual" tags? Or is the status
644 # quo fine?
644 # quo fine?
645
645
646 alltags = {} # map tag name to (node, hist)
646 alltags = {} # map tag name to (node, hist)
647 tagtypes = {}
647 tagtypes = {}
648
648
649 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
649 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
650 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
650 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
651
651
652 # Build the return dicts. Have to re-encode tag names because
652 # Build the return dicts. Have to re-encode tag names because
653 # the tags module always uses UTF-8 (in order not to lose info
653 # the tags module always uses UTF-8 (in order not to lose info
654 # writing to the cache), but the rest of Mercurial wants them in
654 # writing to the cache), but the rest of Mercurial wants them in
655 # local encoding.
655 # local encoding.
656 tags = {}
656 tags = {}
657 for (name, (node, hist)) in alltags.iteritems():
657 for (name, (node, hist)) in alltags.iteritems():
658 if node != nullid:
658 if node != nullid:
659 tags[encoding.tolocal(name)] = node
659 tags[encoding.tolocal(name)] = node
660 tags['tip'] = self.changelog.tip()
660 tags['tip'] = self.changelog.tip()
661 tagtypes = dict([(encoding.tolocal(name), value)
661 tagtypes = dict([(encoding.tolocal(name), value)
662 for (name, value) in tagtypes.iteritems()])
662 for (name, value) in tagtypes.iteritems()])
663 return (tags, tagtypes)
663 return (tags, tagtypes)
664
664
665 def tagtype(self, tagname):
665 def tagtype(self, tagname):
666 '''
666 '''
667 return the type of the given tag. result can be:
667 return the type of the given tag. result can be:
668
668
669 'local' : a local tag
669 'local' : a local tag
670 'global' : a global tag
670 'global' : a global tag
671 None : tag does not exist
671 None : tag does not exist
672 '''
672 '''
673
673
674 return self._tagscache.tagtypes.get(tagname)
674 return self._tagscache.tagtypes.get(tagname)
675
675
676 def tagslist(self):
676 def tagslist(self):
677 '''return a list of tags ordered by revision'''
677 '''return a list of tags ordered by revision'''
678 if not self._tagscache.tagslist:
678 if not self._tagscache.tagslist:
679 l = []
679 l = []
680 for t, n in self.tags().iteritems():
680 for t, n in self.tags().iteritems():
681 l.append((self.changelog.rev(n), t, n))
681 l.append((self.changelog.rev(n), t, n))
682 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
682 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
683
683
684 return self._tagscache.tagslist
684 return self._tagscache.tagslist
685
685
686 def nodetags(self, node):
686 def nodetags(self, node):
687 '''return the tags associated with a node'''
687 '''return the tags associated with a node'''
688 if not self._tagscache.nodetagscache:
688 if not self._tagscache.nodetagscache:
689 nodetagscache = {}
689 nodetagscache = {}
690 for t, n in self._tagscache.tags.iteritems():
690 for t, n in self._tagscache.tags.iteritems():
691 nodetagscache.setdefault(n, []).append(t)
691 nodetagscache.setdefault(n, []).append(t)
692 for tags in nodetagscache.itervalues():
692 for tags in nodetagscache.itervalues():
693 tags.sort()
693 tags.sort()
694 self._tagscache.nodetagscache = nodetagscache
694 self._tagscache.nodetagscache = nodetagscache
695 return self._tagscache.nodetagscache.get(node, [])
695 return self._tagscache.nodetagscache.get(node, [])
696
696
697 def nodebookmarks(self, node):
697 def nodebookmarks(self, node):
698 marks = []
698 marks = []
699 for bookmark, n in self._bookmarks.iteritems():
699 for bookmark, n in self._bookmarks.iteritems():
700 if n == node:
700 if n == node:
701 marks.append(bookmark)
701 marks.append(bookmark)
702 return sorted(marks)
702 return sorted(marks)
703
703
704 def branchmap(self):
704 def branchmap(self):
705 '''returns a dictionary {branch: [branchheads]} with branchheads
705 '''returns a dictionary {branch: [branchheads]} with branchheads
706 ordered by increasing revision number'''
706 ordered by increasing revision number'''
707 branchmap.updatecache(self)
707 branchmap.updatecache(self)
708 return self._branchcaches[self.filtername]
708 return self._branchcaches[self.filtername]
709
709
710 def branchtip(self, branch):
710 def branchtip(self, branch):
711 '''return the tip node for a given branch'''
711 '''return the tip node for a given branch'''
712 try:
712 try:
713 return self.branchmap().branchtip(branch)
713 return self.branchmap().branchtip(branch)
714 except KeyError:
714 except KeyError:
715 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
715 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
716
716
717 def lookup(self, key):
717 def lookup(self, key):
718 return self[key].node()
718 return self[key].node()
719
719
720 def lookupbranch(self, key, remote=None):
720 def lookupbranch(self, key, remote=None):
721 repo = remote or self
721 repo = remote or self
722 if key in repo.branchmap():
722 if key in repo.branchmap():
723 return key
723 return key
724
724
725 repo = (remote and remote.local()) and remote or self
725 repo = (remote and remote.local()) and remote or self
726 return repo[key].branch()
726 return repo[key].branch()
727
727
728 def known(self, nodes):
728 def known(self, nodes):
729 nm = self.changelog.nodemap
729 nm = self.changelog.nodemap
730 pc = self._phasecache
730 pc = self._phasecache
731 result = []
731 result = []
732 for n in nodes:
732 for n in nodes:
733 r = nm.get(n)
733 r = nm.get(n)
734 resp = not (r is None or pc.phase(self, r) >= phases.secret)
734 resp = not (r is None or pc.phase(self, r) >= phases.secret)
735 result.append(resp)
735 result.append(resp)
736 return result
736 return result
737
737
738 def local(self):
738 def local(self):
739 return self
739 return self
740
740
741 def cancopy(self):
741 def cancopy(self):
742 # so statichttprepo's override of local() works
742 # so statichttprepo's override of local() works
743 if not self.local():
743 if not self.local():
744 return False
744 return False
745 if not self.ui.configbool('phases', 'publish', True):
745 if not self.ui.configbool('phases', 'publish', True):
746 return True
746 return True
747 # if publishing we can't copy if there is filtered content
747 # if publishing we can't copy if there is filtered content
748 return not self.filtered('visible').changelog.filteredrevs
748 return not self.filtered('visible').changelog.filteredrevs
749
749
750 def join(self, f, *insidef):
750 def join(self, f, *insidef):
751 return os.path.join(self.path, f, *insidef)
751 return os.path.join(self.path, f, *insidef)
752
752
753 def wjoin(self, f, *insidef):
753 def wjoin(self, f, *insidef):
754 return os.path.join(self.root, f, *insidef)
754 return os.path.join(self.root, f, *insidef)
755
755
756 def file(self, f):
756 def file(self, f):
757 if f[0] == '/':
757 if f[0] == '/':
758 f = f[1:]
758 f = f[1:]
759 return filelog.filelog(self.sopener, f)
759 return filelog.filelog(self.sopener, f)
760
760
761 def changectx(self, changeid):
761 def changectx(self, changeid):
762 return self[changeid]
762 return self[changeid]
763
763
764 def parents(self, changeid=None):
764 def parents(self, changeid=None):
765 '''get list of changectxs for parents of changeid'''
765 '''get list of changectxs for parents of changeid'''
766 return self[changeid].parents()
766 return self[changeid].parents()
767
767
768 def setparents(self, p1, p2=nullid):
768 def setparents(self, p1, p2=nullid):
769 self.dirstate.beginparentchange()
769 self.dirstate.beginparentchange()
770 copies = self.dirstate.setparents(p1, p2)
770 copies = self.dirstate.setparents(p1, p2)
771 pctx = self[p1]
771 pctx = self[p1]
772 if copies:
772 if copies:
773 # Adjust copy records, the dirstate cannot do it, it
773 # Adjust copy records, the dirstate cannot do it, it
774 # requires access to parents manifests. Preserve them
774 # requires access to parents manifests. Preserve them
775 # only for entries added to first parent.
775 # only for entries added to first parent.
776 for f in copies:
776 for f in copies:
777 if f not in pctx and copies[f] in pctx:
777 if f not in pctx and copies[f] in pctx:
778 self.dirstate.copy(copies[f], f)
778 self.dirstate.copy(copies[f], f)
779 if p2 == nullid:
779 if p2 == nullid:
780 for f, s in sorted(self.dirstate.copies().items()):
780 for f, s in sorted(self.dirstate.copies().items()):
781 if f not in pctx and s not in pctx:
781 if f not in pctx and s not in pctx:
782 self.dirstate.copy(None, f)
782 self.dirstate.copy(None, f)
783 self.dirstate.endparentchange()
783 self.dirstate.endparentchange()
784
784
785 def filectx(self, path, changeid=None, fileid=None):
785 def filectx(self, path, changeid=None, fileid=None):
786 """changeid can be a changeset revision, node, or tag.
786 """changeid can be a changeset revision, node, or tag.
787 fileid can be a file revision or node."""
787 fileid can be a file revision or node."""
788 return context.filectx(self, path, changeid, fileid)
788 return context.filectx(self, path, changeid, fileid)
789
789
790 def getcwd(self):
790 def getcwd(self):
791 return self.dirstate.getcwd()
791 return self.dirstate.getcwd()
792
792
793 def pathto(self, f, cwd=None):
793 def pathto(self, f, cwd=None):
794 return self.dirstate.pathto(f, cwd)
794 return self.dirstate.pathto(f, cwd)
795
795
796 def wfile(self, f, mode='r'):
796 def wfile(self, f, mode='r'):
797 return self.wopener(f, mode)
797 return self.wopener(f, mode)
798
798
799 def _link(self, f):
799 def _link(self, f):
800 return self.wvfs.islink(f)
800 return self.wvfs.islink(f)
801
801
802 def _loadfilter(self, filter):
802 def _loadfilter(self, filter):
803 if filter not in self.filterpats:
803 if filter not in self.filterpats:
804 l = []
804 l = []
805 for pat, cmd in self.ui.configitems(filter):
805 for pat, cmd in self.ui.configitems(filter):
806 if cmd == '!':
806 if cmd == '!':
807 continue
807 continue
808 mf = matchmod.match(self.root, '', [pat])
808 mf = matchmod.match(self.root, '', [pat])
809 fn = None
809 fn = None
810 params = cmd
810 params = cmd
811 for name, filterfn in self._datafilters.iteritems():
811 for name, filterfn in self._datafilters.iteritems():
812 if cmd.startswith(name):
812 if cmd.startswith(name):
813 fn = filterfn
813 fn = filterfn
814 params = cmd[len(name):].lstrip()
814 params = cmd[len(name):].lstrip()
815 break
815 break
816 if not fn:
816 if not fn:
817 fn = lambda s, c, **kwargs: util.filter(s, c)
817 fn = lambda s, c, **kwargs: util.filter(s, c)
818 # Wrap old filters not supporting keyword arguments
818 # Wrap old filters not supporting keyword arguments
819 if not inspect.getargspec(fn)[2]:
819 if not inspect.getargspec(fn)[2]:
820 oldfn = fn
820 oldfn = fn
821 fn = lambda s, c, **kwargs: oldfn(s, c)
821 fn = lambda s, c, **kwargs: oldfn(s, c)
822 l.append((mf, fn, params))
822 l.append((mf, fn, params))
823 self.filterpats[filter] = l
823 self.filterpats[filter] = l
824 return self.filterpats[filter]
824 return self.filterpats[filter]
825
825
826 def _filter(self, filterpats, filename, data):
826 def _filter(self, filterpats, filename, data):
827 for mf, fn, cmd in filterpats:
827 for mf, fn, cmd in filterpats:
828 if mf(filename):
828 if mf(filename):
829 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
829 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
830 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
830 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
831 break
831 break
832
832
833 return data
833 return data
834
834
835 @unfilteredpropertycache
835 @unfilteredpropertycache
836 def _encodefilterpats(self):
836 def _encodefilterpats(self):
837 return self._loadfilter('encode')
837 return self._loadfilter('encode')
838
838
839 @unfilteredpropertycache
839 @unfilteredpropertycache
840 def _decodefilterpats(self):
840 def _decodefilterpats(self):
841 return self._loadfilter('decode')
841 return self._loadfilter('decode')
842
842
843 def adddatafilter(self, name, filter):
843 def adddatafilter(self, name, filter):
844 self._datafilters[name] = filter
844 self._datafilters[name] = filter
845
845
846 def wread(self, filename):
846 def wread(self, filename):
847 if self._link(filename):
847 if self._link(filename):
848 data = self.wvfs.readlink(filename)
848 data = self.wvfs.readlink(filename)
849 else:
849 else:
850 data = self.wopener.read(filename)
850 data = self.wopener.read(filename)
851 return self._filter(self._encodefilterpats, filename, data)
851 return self._filter(self._encodefilterpats, filename, data)
852
852
853 def wwrite(self, filename, data, flags):
853 def wwrite(self, filename, data, flags):
854 data = self._filter(self._decodefilterpats, filename, data)
854 data = self._filter(self._decodefilterpats, filename, data)
855 if 'l' in flags:
855 if 'l' in flags:
856 self.wopener.symlink(data, filename)
856 self.wopener.symlink(data, filename)
857 else:
857 else:
858 self.wopener.write(filename, data)
858 self.wopener.write(filename, data)
859 if 'x' in flags:
859 if 'x' in flags:
860 self.wvfs.setflags(filename, False, True)
860 self.wvfs.setflags(filename, False, True)
861
861
862 def wwritedata(self, filename, data):
862 def wwritedata(self, filename, data):
863 return self._filter(self._decodefilterpats, filename, data)
863 return self._filter(self._decodefilterpats, filename, data)
864
864
865 def transaction(self, desc, report=None):
865 def transaction(self, desc, report=None):
866 tr = self._transref and self._transref() or None
866 tr = self._transref and self._transref() or None
867 if tr and tr.running():
867 if tr and tr.running():
868 return tr.nest()
868 return tr.nest()
869
869
870 # abort here if the journal already exists
870 # abort here if the journal already exists
871 if self.svfs.exists("journal"):
871 if self.svfs.exists("journal"):
872 raise error.RepoError(
872 raise error.RepoError(
873 _("abandoned transaction found"),
873 _("abandoned transaction found"),
874 hint=_("run 'hg recover' to clean up transaction"))
874 hint=_("run 'hg recover' to clean up transaction"))
875
875
876 def onclose():
876 def onclose():
877 self.store.write(self._transref())
877 self.store.write(self._transref())
878
878
879 self._writejournal(desc)
879 self._writejournal(desc)
880 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
880 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
881 rp = report and report or self.ui.warn
881 rp = report and report or self.ui.warn
882 tr = transaction.transaction(rp, self.sopener,
882 tr = transaction.transaction(rp, self.sopener,
883 "journal",
883 "journal",
884 aftertrans(renames),
884 aftertrans(renames),
885 self.store.createmode,
885 self.store.createmode,
886 onclose)
886 onclose)
887 self._transref = weakref.ref(tr)
887 self._transref = weakref.ref(tr)
888 return tr
888 return tr
889
889
890 def _journalfiles(self):
890 def _journalfiles(self):
891 return ((self.svfs, 'journal'),
891 return ((self.svfs, 'journal'),
892 (self.vfs, 'journal.dirstate'),
892 (self.vfs, 'journal.dirstate'),
893 (self.vfs, 'journal.branch'),
893 (self.vfs, 'journal.branch'),
894 (self.vfs, 'journal.desc'),
894 (self.vfs, 'journal.desc'),
895 (self.vfs, 'journal.bookmarks'),
895 (self.vfs, 'journal.bookmarks'),
896 (self.svfs, 'journal.phaseroots'))
896 (self.svfs, 'journal.phaseroots'))
897
897
898 def undofiles(self):
898 def undofiles(self):
899 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
899 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
900
900
901 def _writejournal(self, desc):
901 def _writejournal(self, desc):
902 self.opener.write("journal.dirstate",
902 self.opener.write("journal.dirstate",
903 self.opener.tryread("dirstate"))
903 self.opener.tryread("dirstate"))
904 self.opener.write("journal.branch",
904 self.opener.write("journal.branch",
905 encoding.fromlocal(self.dirstate.branch()))
905 encoding.fromlocal(self.dirstate.branch()))
906 self.opener.write("journal.desc",
906 self.opener.write("journal.desc",
907 "%d\n%s\n" % (len(self), desc))
907 "%d\n%s\n" % (len(self), desc))
908 self.opener.write("journal.bookmarks",
908 self.opener.write("journal.bookmarks",
909 self.opener.tryread("bookmarks"))
909 self.opener.tryread("bookmarks"))
910 self.sopener.write("journal.phaseroots",
910 self.sopener.write("journal.phaseroots",
911 self.sopener.tryread("phaseroots"))
911 self.sopener.tryread("phaseroots"))
912
912
913 def recover(self):
913 def recover(self):
914 lock = self.lock()
914 lock = self.lock()
915 try:
915 try:
916 if self.svfs.exists("journal"):
916 if self.svfs.exists("journal"):
917 self.ui.status(_("rolling back interrupted transaction\n"))
917 self.ui.status(_("rolling back interrupted transaction\n"))
918 transaction.rollback(self.sopener, "journal",
918 transaction.rollback(self.sopener, "journal",
919 self.ui.warn)
919 self.ui.warn)
920 self.invalidate()
920 self.invalidate()
921 return True
921 return True
922 else:
922 else:
923 self.ui.warn(_("no interrupted transaction available\n"))
923 self.ui.warn(_("no interrupted transaction available\n"))
924 return False
924 return False
925 finally:
925 finally:
926 lock.release()
926 lock.release()
927
927
928 def rollback(self, dryrun=False, force=False):
928 def rollback(self, dryrun=False, force=False):
929 wlock = lock = None
929 wlock = lock = None
930 try:
930 try:
931 wlock = self.wlock()
931 wlock = self.wlock()
932 lock = self.lock()
932 lock = self.lock()
933 if self.svfs.exists("undo"):
933 if self.svfs.exists("undo"):
934 return self._rollback(dryrun, force)
934 return self._rollback(dryrun, force)
935 else:
935 else:
936 self.ui.warn(_("no rollback information available\n"))
936 self.ui.warn(_("no rollback information available\n"))
937 return 1
937 return 1
938 finally:
938 finally:
939 release(lock, wlock)
939 release(lock, wlock)
940
940
941 @unfilteredmethod # Until we get smarter cache management
941 @unfilteredmethod # Until we get smarter cache management
942 def _rollback(self, dryrun, force):
942 def _rollback(self, dryrun, force):
943 ui = self.ui
943 ui = self.ui
944 try:
944 try:
945 args = self.opener.read('undo.desc').splitlines()
945 args = self.opener.read('undo.desc').splitlines()
946 (oldlen, desc, detail) = (int(args[0]), args[1], None)
946 (oldlen, desc, detail) = (int(args[0]), args[1], None)
947 if len(args) >= 3:
947 if len(args) >= 3:
948 detail = args[2]
948 detail = args[2]
949 oldtip = oldlen - 1
949 oldtip = oldlen - 1
950
950
951 if detail and ui.verbose:
951 if detail and ui.verbose:
952 msg = (_('repository tip rolled back to revision %s'
952 msg = (_('repository tip rolled back to revision %s'
953 ' (undo %s: %s)\n')
953 ' (undo %s: %s)\n')
954 % (oldtip, desc, detail))
954 % (oldtip, desc, detail))
955 else:
955 else:
956 msg = (_('repository tip rolled back to revision %s'
956 msg = (_('repository tip rolled back to revision %s'
957 ' (undo %s)\n')
957 ' (undo %s)\n')
958 % (oldtip, desc))
958 % (oldtip, desc))
959 except IOError:
959 except IOError:
960 msg = _('rolling back unknown transaction\n')
960 msg = _('rolling back unknown transaction\n')
961 desc = None
961 desc = None
962
962
963 if not force and self['.'] != self['tip'] and desc == 'commit':
963 if not force and self['.'] != self['tip'] and desc == 'commit':
964 raise util.Abort(
964 raise util.Abort(
965 _('rollback of last commit while not checked out '
965 _('rollback of last commit while not checked out '
966 'may lose data'), hint=_('use -f to force'))
966 'may lose data'), hint=_('use -f to force'))
967
967
968 ui.status(msg)
968 ui.status(msg)
969 if dryrun:
969 if dryrun:
970 return 0
970 return 0
971
971
972 parents = self.dirstate.parents()
972 parents = self.dirstate.parents()
973 self.destroying()
973 self.destroying()
974 transaction.rollback(self.sopener, 'undo', ui.warn)
974 transaction.rollback(self.sopener, 'undo', ui.warn)
975 if self.vfs.exists('undo.bookmarks'):
975 if self.vfs.exists('undo.bookmarks'):
976 self.vfs.rename('undo.bookmarks', 'bookmarks')
976 self.vfs.rename('undo.bookmarks', 'bookmarks')
977 if self.svfs.exists('undo.phaseroots'):
977 if self.svfs.exists('undo.phaseroots'):
978 self.svfs.rename('undo.phaseroots', 'phaseroots')
978 self.svfs.rename('undo.phaseroots', 'phaseroots')
979 self.invalidate()
979 self.invalidate()
980
980
981 parentgone = (parents[0] not in self.changelog.nodemap or
981 parentgone = (parents[0] not in self.changelog.nodemap or
982 parents[1] not in self.changelog.nodemap)
982 parents[1] not in self.changelog.nodemap)
983 if parentgone:
983 if parentgone:
984 self.vfs.rename('undo.dirstate', 'dirstate')
984 self.vfs.rename('undo.dirstate', 'dirstate')
985 try:
985 try:
986 branch = self.opener.read('undo.branch')
986 branch = self.opener.read('undo.branch')
987 self.dirstate.setbranch(encoding.tolocal(branch))
987 self.dirstate.setbranch(encoding.tolocal(branch))
988 except IOError:
988 except IOError:
989 ui.warn(_('named branch could not be reset: '
989 ui.warn(_('named branch could not be reset: '
990 'current branch is still \'%s\'\n')
990 'current branch is still \'%s\'\n')
991 % self.dirstate.branch())
991 % self.dirstate.branch())
992
992
993 self.dirstate.invalidate()
993 self.dirstate.invalidate()
994 parents = tuple([p.rev() for p in self.parents()])
994 parents = tuple([p.rev() for p in self.parents()])
995 if len(parents) > 1:
995 if len(parents) > 1:
996 ui.status(_('working directory now based on '
996 ui.status(_('working directory now based on '
997 'revisions %d and %d\n') % parents)
997 'revisions %d and %d\n') % parents)
998 else:
998 else:
999 ui.status(_('working directory now based on '
999 ui.status(_('working directory now based on '
1000 'revision %d\n') % parents)
1000 'revision %d\n') % parents)
1001 # TODO: if we know which new heads may result from this rollback, pass
1001 # TODO: if we know which new heads may result from this rollback, pass
1002 # them to destroy(), which will prevent the branchhead cache from being
1002 # them to destroy(), which will prevent the branchhead cache from being
1003 # invalidated.
1003 # invalidated.
1004 self.destroyed()
1004 self.destroyed()
1005 return 0
1005 return 0
1006
1006
1007 def invalidatecaches(self):
1007 def invalidatecaches(self):
1008
1008
1009 if '_tagscache' in vars(self):
1009 if '_tagscache' in vars(self):
1010 # can't use delattr on proxy
1010 # can't use delattr on proxy
1011 del self.__dict__['_tagscache']
1011 del self.__dict__['_tagscache']
1012
1012
1013 self.unfiltered()._branchcaches.clear()
1013 self.unfiltered()._branchcaches.clear()
1014 self.invalidatevolatilesets()
1014 self.invalidatevolatilesets()
1015
1015
1016 def invalidatevolatilesets(self):
1016 def invalidatevolatilesets(self):
1017 self.filteredrevcache.clear()
1017 self.filteredrevcache.clear()
1018 obsolete.clearobscaches(self)
1018 obsolete.clearobscaches(self)
1019
1019
1020 def invalidatedirstate(self):
1020 def invalidatedirstate(self):
1021 '''Invalidates the dirstate, causing the next call to dirstate
1021 '''Invalidates the dirstate, causing the next call to dirstate
1022 to check if it was modified since the last time it was read,
1022 to check if it was modified since the last time it was read,
1023 rereading it if it has.
1023 rereading it if it has.
1024
1024
1025 This is different to dirstate.invalidate() that it doesn't always
1025 This is different to dirstate.invalidate() that it doesn't always
1026 rereads the dirstate. Use dirstate.invalidate() if you want to
1026 rereads the dirstate. Use dirstate.invalidate() if you want to
1027 explicitly read the dirstate again (i.e. restoring it to a previous
1027 explicitly read the dirstate again (i.e. restoring it to a previous
1028 known good state).'''
1028 known good state).'''
1029 if hasunfilteredcache(self, 'dirstate'):
1029 if hasunfilteredcache(self, 'dirstate'):
1030 for k in self.dirstate._filecache:
1030 for k in self.dirstate._filecache:
1031 try:
1031 try:
1032 delattr(self.dirstate, k)
1032 delattr(self.dirstate, k)
1033 except AttributeError:
1033 except AttributeError:
1034 pass
1034 pass
1035 delattr(self.unfiltered(), 'dirstate')
1035 delattr(self.unfiltered(), 'dirstate')
1036
1036
1037 def invalidate(self):
1037 def invalidate(self):
1038 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1038 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1039 for k in self._filecache:
1039 for k in self._filecache:
1040 # dirstate is invalidated separately in invalidatedirstate()
1040 # dirstate is invalidated separately in invalidatedirstate()
1041 if k == 'dirstate':
1041 if k == 'dirstate':
1042 continue
1042 continue
1043
1043
1044 try:
1044 try:
1045 delattr(unfiltered, k)
1045 delattr(unfiltered, k)
1046 except AttributeError:
1046 except AttributeError:
1047 pass
1047 pass
1048 self.invalidatecaches()
1048 self.invalidatecaches()
1049 self.store.invalidatecaches()
1049 self.store.invalidatecaches()
1050
1050
1051 def invalidateall(self):
1051 def invalidateall(self):
1052 '''Fully invalidates both store and non-store parts, causing the
1052 '''Fully invalidates both store and non-store parts, causing the
1053 subsequent operation to reread any outside changes.'''
1053 subsequent operation to reread any outside changes.'''
1054 # extension should hook this to invalidate its caches
1054 # extension should hook this to invalidate its caches
1055 self.invalidate()
1055 self.invalidate()
1056 self.invalidatedirstate()
1056 self.invalidatedirstate()
1057
1057
1058 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1058 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1059 try:
1059 try:
1060 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1060 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1061 except error.LockHeld, inst:
1061 except error.LockHeld, inst:
1062 if not wait:
1062 if not wait:
1063 raise
1063 raise
1064 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1064 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1065 (desc, inst.locker))
1065 (desc, inst.locker))
1066 # default to 600 seconds timeout
1066 # default to 600 seconds timeout
1067 l = lockmod.lock(vfs, lockname,
1067 l = lockmod.lock(vfs, lockname,
1068 int(self.ui.config("ui", "timeout", "600")),
1068 int(self.ui.config("ui", "timeout", "600")),
1069 releasefn, desc=desc)
1069 releasefn, desc=desc)
1070 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1070 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1071 if acquirefn:
1071 if acquirefn:
1072 acquirefn()
1072 acquirefn()
1073 return l
1073 return l
1074
1074
1075 def _afterlock(self, callback):
1075 def _afterlock(self, callback):
1076 """add a callback to the current repository lock.
1076 """add a callback to the current repository lock.
1077
1077
1078 The callback will be executed on lock release."""
1078 The callback will be executed on lock release."""
1079 l = self._lockref and self._lockref()
1079 l = self._lockref and self._lockref()
1080 if l:
1080 if l:
1081 l.postrelease.append(callback)
1081 l.postrelease.append(callback)
1082 else:
1082 else:
1083 callback()
1083 callback()
1084
1084
1085 def lock(self, wait=True):
1085 def lock(self, wait=True):
1086 '''Lock the repository store (.hg/store) and return a weak reference
1086 '''Lock the repository store (.hg/store) and return a weak reference
1087 to the lock. Use this before modifying the store (e.g. committing or
1087 to the lock. Use this before modifying the store (e.g. committing or
1088 stripping). If you are opening a transaction, get a lock as well.)'''
1088 stripping). If you are opening a transaction, get a lock as well.)'''
1089 l = self._lockref and self._lockref()
1089 l = self._lockref and self._lockref()
1090 if l is not None and l.held:
1090 if l is not None and l.held:
1091 l.lock()
1091 l.lock()
1092 return l
1092 return l
1093
1093
1094 def unlock():
1094 def unlock():
1095 for k, ce in self._filecache.items():
1095 for k, ce in self._filecache.items():
1096 if k == 'dirstate' or k not in self.__dict__:
1096 if k == 'dirstate' or k not in self.__dict__:
1097 continue
1097 continue
1098 ce.refresh()
1098 ce.refresh()
1099
1099
1100 l = self._lock(self.svfs, "lock", wait, unlock,
1100 l = self._lock(self.svfs, "lock", wait, unlock,
1101 self.invalidate, _('repository %s') % self.origroot)
1101 self.invalidate, _('repository %s') % self.origroot)
1102 self._lockref = weakref.ref(l)
1102 self._lockref = weakref.ref(l)
1103 return l
1103 return l
1104
1104
1105 def wlock(self, wait=True):
1105 def wlock(self, wait=True):
1106 '''Lock the non-store parts of the repository (everything under
1106 '''Lock the non-store parts of the repository (everything under
1107 .hg except .hg/store) and return a weak reference to the lock.
1107 .hg except .hg/store) and return a weak reference to the lock.
1108 Use this before modifying files in .hg.'''
1108 Use this before modifying files in .hg.'''
1109 l = self._wlockref and self._wlockref()
1109 l = self._wlockref and self._wlockref()
1110 if l is not None and l.held:
1110 if l is not None and l.held:
1111 l.lock()
1111 l.lock()
1112 return l
1112 return l
1113
1113
1114 def unlock():
1114 def unlock():
1115 if self.dirstate.pendingparentchange():
1115 if self.dirstate.pendingparentchange():
1116 self.dirstate.invalidate()
1116 self.dirstate.invalidate()
1117 else:
1117 else:
1118 self.dirstate.write()
1118 self.dirstate.write()
1119
1119
1120 self._filecache['dirstate'].refresh()
1120 self._filecache['dirstate'].refresh()
1121
1121
1122 l = self._lock(self.vfs, "wlock", wait, unlock,
1122 l = self._lock(self.vfs, "wlock", wait, unlock,
1123 self.invalidatedirstate, _('working directory of %s') %
1123 self.invalidatedirstate, _('working directory of %s') %
1124 self.origroot)
1124 self.origroot)
1125 self._wlockref = weakref.ref(l)
1125 self._wlockref = weakref.ref(l)
1126 return l
1126 return l
1127
1127
1128 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1128 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1129 """
1129 """
1130 commit an individual file as part of a larger transaction
1130 commit an individual file as part of a larger transaction
1131 """
1131 """
1132
1132
1133 fname = fctx.path()
1133 fname = fctx.path()
1134 text = fctx.data()
1134 text = fctx.data()
1135 flog = self.file(fname)
1135 flog = self.file(fname)
1136 fparent1 = manifest1.get(fname, nullid)
1136 fparent1 = manifest1.get(fname, nullid)
1137 fparent2 = manifest2.get(fname, nullid)
1137 fparent2 = manifest2.get(fname, nullid)
1138
1138
1139 meta = {}
1139 meta = {}
1140 copy = fctx.renamed()
1140 copy = fctx.renamed()
1141 if copy and copy[0] != fname:
1141 if copy and copy[0] != fname:
1142 # Mark the new revision of this file as a copy of another
1142 # Mark the new revision of this file as a copy of another
1143 # file. This copy data will effectively act as a parent
1143 # file. This copy data will effectively act as a parent
1144 # of this new revision. If this is a merge, the first
1144 # of this new revision. If this is a merge, the first
1145 # parent will be the nullid (meaning "look up the copy data")
1145 # parent will be the nullid (meaning "look up the copy data")
1146 # and the second one will be the other parent. For example:
1146 # and the second one will be the other parent. For example:
1147 #
1147 #
1148 # 0 --- 1 --- 3 rev1 changes file foo
1148 # 0 --- 1 --- 3 rev1 changes file foo
1149 # \ / rev2 renames foo to bar and changes it
1149 # \ / rev2 renames foo to bar and changes it
1150 # \- 2 -/ rev3 should have bar with all changes and
1150 # \- 2 -/ rev3 should have bar with all changes and
1151 # should record that bar descends from
1151 # should record that bar descends from
1152 # bar in rev2 and foo in rev1
1152 # bar in rev2 and foo in rev1
1153 #
1153 #
1154 # this allows this merge to succeed:
1154 # this allows this merge to succeed:
1155 #
1155 #
1156 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1156 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1157 # \ / merging rev3 and rev4 should use bar@rev2
1157 # \ / merging rev3 and rev4 should use bar@rev2
1158 # \- 2 --- 4 as the merge base
1158 # \- 2 --- 4 as the merge base
1159 #
1159 #
1160
1160
1161 cfname = copy[0]
1161 cfname = copy[0]
1162 crev = manifest1.get(cfname)
1162 crev = manifest1.get(cfname)
1163 newfparent = fparent2
1163 newfparent = fparent2
1164
1164
1165 if manifest2: # branch merge
1165 if manifest2: # branch merge
1166 if fparent2 == nullid or crev is None: # copied on remote side
1166 if fparent2 == nullid or crev is None: # copied on remote side
1167 if cfname in manifest2:
1167 if cfname in manifest2:
1168 crev = manifest2[cfname]
1168 crev = manifest2[cfname]
1169 newfparent = fparent1
1169 newfparent = fparent1
1170
1170
1171 # find source in nearest ancestor if we've lost track
1171 # find source in nearest ancestor if we've lost track
1172 if not crev:
1172 if not crev:
1173 self.ui.debug(" %s: searching for copy revision for %s\n" %
1173 self.ui.debug(" %s: searching for copy revision for %s\n" %
1174 (fname, cfname))
1174 (fname, cfname))
1175 for ancestor in self[None].ancestors():
1175 for ancestor in self[None].ancestors():
1176 if cfname in ancestor:
1176 if cfname in ancestor:
1177 crev = ancestor[cfname].filenode()
1177 crev = ancestor[cfname].filenode()
1178 break
1178 break
1179
1179
1180 if crev:
1180 if crev:
1181 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1181 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1182 meta["copy"] = cfname
1182 meta["copy"] = cfname
1183 meta["copyrev"] = hex(crev)
1183 meta["copyrev"] = hex(crev)
1184 fparent1, fparent2 = nullid, newfparent
1184 fparent1, fparent2 = nullid, newfparent
1185 else:
1185 else:
1186 self.ui.warn(_("warning: can't find ancestor for '%s' "
1186 self.ui.warn(_("warning: can't find ancestor for '%s' "
1187 "copied from '%s'!\n") % (fname, cfname))
1187 "copied from '%s'!\n") % (fname, cfname))
1188
1188
1189 elif fparent1 == nullid:
1189 elif fparent1 == nullid:
1190 fparent1, fparent2 = fparent2, nullid
1190 fparent1, fparent2 = fparent2, nullid
1191 elif fparent2 != nullid:
1191 elif fparent2 != nullid:
1192 # is one parent an ancestor of the other?
1192 # is one parent an ancestor of the other?
1193 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1193 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1194 if fparent1 in fparentancestors:
1194 if fparent1 in fparentancestors:
1195 fparent1, fparent2 = fparent2, nullid
1195 fparent1, fparent2 = fparent2, nullid
1196 elif fparent2 in fparentancestors:
1196 elif fparent2 in fparentancestors:
1197 fparent2 = nullid
1197 fparent2 = nullid
1198
1198
1199 # is the file changed?
1199 # is the file changed?
1200 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1200 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1201 changelist.append(fname)
1201 changelist.append(fname)
1202 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1202 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1203 # are just the flags changed during merge?
1203 # are just the flags changed during merge?
1204 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1204 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1205 changelist.append(fname)
1205 changelist.append(fname)
1206
1206
1207 return fparent1
1207 return fparent1
1208
1208
1209 @unfilteredmethod
1209 @unfilteredmethod
1210 def commit(self, text="", user=None, date=None, match=None, force=False,
1210 def commit(self, text="", user=None, date=None, match=None, force=False,
1211 editor=False, extra={}):
1211 editor=False, extra={}):
1212 """Add a new revision to current repository.
1212 """Add a new revision to current repository.
1213
1213
1214 Revision information is gathered from the working directory,
1214 Revision information is gathered from the working directory,
1215 match can be used to filter the committed files. If editor is
1215 match can be used to filter the committed files. If editor is
1216 supplied, it is called to get a commit message.
1216 supplied, it is called to get a commit message.
1217 """
1217 """
1218
1218
1219 def fail(f, msg):
1219 def fail(f, msg):
1220 raise util.Abort('%s: %s' % (f, msg))
1220 raise util.Abort('%s: %s' % (f, msg))
1221
1221
1222 if not match:
1222 if not match:
1223 match = matchmod.always(self.root, '')
1223 match = matchmod.always(self.root, '')
1224
1224
1225 if not force:
1225 if not force:
1226 vdirs = []
1226 vdirs = []
1227 match.explicitdir = vdirs.append
1227 match.explicitdir = vdirs.append
1228 match.bad = fail
1228 match.bad = fail
1229
1229
1230 wlock = self.wlock()
1230 wlock = self.wlock()
1231 try:
1231 try:
1232 wctx = self[None]
1232 wctx = self[None]
1233 merge = len(wctx.parents()) > 1
1233 merge = len(wctx.parents()) > 1
1234
1234
1235 if (not force and merge and match and
1235 if (not force and merge and match and
1236 (match.files() or match.anypats())):
1236 (match.files() or match.anypats())):
1237 raise util.Abort(_('cannot partially commit a merge '
1237 raise util.Abort(_('cannot partially commit a merge '
1238 '(do not specify files or patterns)'))
1238 '(do not specify files or patterns)'))
1239
1239
1240 status = self.status(match=match, clean=force)
1240 status = self.status(match=match, clean=force)
1241 if force:
1241 if force:
1242 status.modified.extend(status.clean) # mq may commit clean files
1242 status.modified.extend(status.clean) # mq may commit clean files
1243
1243
1244 # check subrepos
1244 # check subrepos
1245 subs = []
1245 subs = []
1246 commitsubs = set()
1246 commitsubs = set()
1247 newstate = wctx.substate.copy()
1247 newstate = wctx.substate.copy()
1248 # only manage subrepos and .hgsubstate if .hgsub is present
1248 # only manage subrepos and .hgsubstate if .hgsub is present
1249 if '.hgsub' in wctx:
1249 if '.hgsub' in wctx:
1250 # we'll decide whether to track this ourselves, thanks
1250 # we'll decide whether to track this ourselves, thanks
1251 for c in status.modified, status.added, status.removed:
1251 for c in status.modified, status.added, status.removed:
1252 if '.hgsubstate' in c:
1252 if '.hgsubstate' in c:
1253 c.remove('.hgsubstate')
1253 c.remove('.hgsubstate')
1254
1254
1255 # compare current state to last committed state
1255 # compare current state to last committed state
1256 # build new substate based on last committed state
1256 # build new substate based on last committed state
1257 oldstate = wctx.p1().substate
1257 oldstate = wctx.p1().substate
1258 for s in sorted(newstate.keys()):
1258 for s in sorted(newstate.keys()):
1259 if not match(s):
1259 if not match(s):
1260 # ignore working copy, use old state if present
1260 # ignore working copy, use old state if present
1261 if s in oldstate:
1261 if s in oldstate:
1262 newstate[s] = oldstate[s]
1262 newstate[s] = oldstate[s]
1263 continue
1263 continue
1264 if not force:
1264 if not force:
1265 raise util.Abort(
1265 raise util.Abort(
1266 _("commit with new subrepo %s excluded") % s)
1266 _("commit with new subrepo %s excluded") % s)
1267 if wctx.sub(s).dirty(True):
1267 if wctx.sub(s).dirty(True):
1268 if not self.ui.configbool('ui', 'commitsubrepos'):
1268 if not self.ui.configbool('ui', 'commitsubrepos'):
1269 raise util.Abort(
1269 raise util.Abort(
1270 _("uncommitted changes in subrepo %s") % s,
1270 _("uncommitted changes in subrepo %s") % s,
1271 hint=_("use --subrepos for recursive commit"))
1271 hint=_("use --subrepos for recursive commit"))
1272 subs.append(s)
1272 subs.append(s)
1273 commitsubs.add(s)
1273 commitsubs.add(s)
1274 else:
1274 else:
1275 bs = wctx.sub(s).basestate()
1275 bs = wctx.sub(s).basestate()
1276 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1276 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1277 if oldstate.get(s, (None, None, None))[1] != bs:
1277 if oldstate.get(s, (None, None, None))[1] != bs:
1278 subs.append(s)
1278 subs.append(s)
1279
1279
1280 # check for removed subrepos
1280 # check for removed subrepos
1281 for p in wctx.parents():
1281 for p in wctx.parents():
1282 r = [s for s in p.substate if s not in newstate]
1282 r = [s for s in p.substate if s not in newstate]
1283 subs += [s for s in r if match(s)]
1283 subs += [s for s in r if match(s)]
1284 if subs:
1284 if subs:
1285 if (not match('.hgsub') and
1285 if (not match('.hgsub') and
1286 '.hgsub' in (wctx.modified() + wctx.added())):
1286 '.hgsub' in (wctx.modified() + wctx.added())):
1287 raise util.Abort(
1287 raise util.Abort(
1288 _("can't commit subrepos without .hgsub"))
1288 _("can't commit subrepos without .hgsub"))
1289 status.modified.insert(0, '.hgsubstate')
1289 status.modified.insert(0, '.hgsubstate')
1290
1290
1291 elif '.hgsub' in status.removed:
1291 elif '.hgsub' in status.removed:
1292 # clean up .hgsubstate when .hgsub is removed
1292 # clean up .hgsubstate when .hgsub is removed
1293 if ('.hgsubstate' in wctx and
1293 if ('.hgsubstate' in wctx and
1294 '.hgsubstate' not in (status.modified + status.added +
1294 '.hgsubstate' not in (status.modified + status.added +
1295 status.removed)):
1295 status.removed)):
1296 status.removed.insert(0, '.hgsubstate')
1296 status.removed.insert(0, '.hgsubstate')
1297
1297
1298 # make sure all explicit patterns are matched
1298 # make sure all explicit patterns are matched
1299 if not force and match.files():
1299 if not force and match.files():
1300 matched = set(status.modified + status.added + status.removed)
1300 matched = set(status.modified + status.added + status.removed)
1301
1301
1302 for f in match.files():
1302 for f in match.files():
1303 f = self.dirstate.normalize(f)
1303 f = self.dirstate.normalize(f)
1304 if f == '.' or f in matched or f in wctx.substate:
1304 if f == '.' or f in matched or f in wctx.substate:
1305 continue
1305 continue
1306 if f in status.deleted:
1306 if f in status.deleted:
1307 fail(f, _('file not found!'))
1307 fail(f, _('file not found!'))
1308 if f in vdirs: # visited directory
1308 if f in vdirs: # visited directory
1309 d = f + '/'
1309 d = f + '/'
1310 for mf in matched:
1310 for mf in matched:
1311 if mf.startswith(d):
1311 if mf.startswith(d):
1312 break
1312 break
1313 else:
1313 else:
1314 fail(f, _("no match under directory!"))
1314 fail(f, _("no match under directory!"))
1315 elif f not in self.dirstate:
1315 elif f not in self.dirstate:
1316 fail(f, _("file not tracked!"))
1316 fail(f, _("file not tracked!"))
1317
1317
1318 cctx = context.workingctx(self, text, user, date, extra, status)
1318 cctx = context.workingctx(self, text, user, date, extra, status)
1319
1319
1320 if (not force and not extra.get("close") and not merge
1320 if (not force and not extra.get("close") and not merge
1321 and not cctx.files()
1321 and not cctx.files()
1322 and wctx.branch() == wctx.p1().branch()):
1322 and wctx.branch() == wctx.p1().branch()):
1323 return None
1323 return None
1324
1324
1325 if merge and cctx.deleted():
1325 if merge and cctx.deleted():
1326 raise util.Abort(_("cannot commit merge with missing files"))
1326 raise util.Abort(_("cannot commit merge with missing files"))
1327
1327
1328 ms = mergemod.mergestate(self)
1328 ms = mergemod.mergestate(self)
1329 for f in status.modified:
1329 for f in status.modified:
1330 if f in ms and ms[f] == 'u':
1330 if f in ms and ms[f] == 'u':
1331 raise util.Abort(_("unresolved merge conflicts "
1331 raise util.Abort(_("unresolved merge conflicts "
1332 "(see hg help resolve)"))
1332 "(see hg help resolve)"))
1333
1333
1334 if editor:
1334 if editor:
1335 cctx._text = editor(self, cctx, subs)
1335 cctx._text = editor(self, cctx, subs)
1336 edited = (text != cctx._text)
1336 edited = (text != cctx._text)
1337
1337
1338 # Save commit message in case this transaction gets rolled back
1338 # Save commit message in case this transaction gets rolled back
1339 # (e.g. by a pretxncommit hook). Leave the content alone on
1339 # (e.g. by a pretxncommit hook). Leave the content alone on
1340 # the assumption that the user will use the same editor again.
1340 # the assumption that the user will use the same editor again.
1341 msgfn = self.savecommitmessage(cctx._text)
1341 msgfn = self.savecommitmessage(cctx._text)
1342
1342
1343 # commit subs and write new state
1343 # commit subs and write new state
1344 if subs:
1344 if subs:
1345 for s in sorted(commitsubs):
1345 for s in sorted(commitsubs):
1346 sub = wctx.sub(s)
1346 sub = wctx.sub(s)
1347 self.ui.status(_('committing subrepository %s\n') %
1347 self.ui.status(_('committing subrepository %s\n') %
1348 subrepo.subrelpath(sub))
1348 subrepo.subrelpath(sub))
1349 sr = sub.commit(cctx._text, user, date)
1349 sr = sub.commit(cctx._text, user, date)
1350 newstate[s] = (newstate[s][0], sr)
1350 newstate[s] = (newstate[s][0], sr)
1351 subrepo.writestate(self, newstate)
1351 subrepo.writestate(self, newstate)
1352
1352
1353 p1, p2 = self.dirstate.parents()
1353 p1, p2 = self.dirstate.parents()
1354 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1354 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1355 try:
1355 try:
1356 self.hook("precommit", throw=True, parent1=hookp1,
1356 self.hook("precommit", throw=True, parent1=hookp1,
1357 parent2=hookp2)
1357 parent2=hookp2)
1358 ret = self.commitctx(cctx, True)
1358 ret = self.commitctx(cctx, True)
1359 except: # re-raises
1359 except: # re-raises
1360 if edited:
1360 if edited:
1361 self.ui.write(
1361 self.ui.write(
1362 _('note: commit message saved in %s\n') % msgfn)
1362 _('note: commit message saved in %s\n') % msgfn)
1363 raise
1363 raise
1364
1364
1365 # update bookmarks, dirstate and mergestate
1365 # update bookmarks, dirstate and mergestate
1366 bookmarks.update(self, [p1, p2], ret)
1366 bookmarks.update(self, [p1, p2], ret)
1367 cctx.markcommitted(ret)
1367 cctx.markcommitted(ret)
1368 ms.reset()
1368 ms.reset()
1369 finally:
1369 finally:
1370 wlock.release()
1370 wlock.release()
1371
1371
1372 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1372 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1373 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1373 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1374 self._afterlock(commithook)
1374 self._afterlock(commithook)
1375 return ret
1375 return ret
1376
1376
1377 @unfilteredmethod
1377 @unfilteredmethod
1378 def commitctx(self, ctx, error=False):
1378 def commitctx(self, ctx, error=False):
1379 """Add a new revision to current repository.
1379 """Add a new revision to current repository.
1380 Revision information is passed via the context argument.
1380 Revision information is passed via the context argument.
1381 """
1381 """
1382
1382
1383 tr = None
1383 tr = None
1384 p1, p2 = ctx.p1(), ctx.p2()
1384 p1, p2 = ctx.p1(), ctx.p2()
1385 user = ctx.user()
1385 user = ctx.user()
1386
1386
1387 lock = self.lock()
1387 lock = self.lock()
1388 try:
1388 try:
1389 tr = self.transaction("commit")
1389 tr = self.transaction("commit")
1390 trp = weakref.proxy(tr)
1390 trp = weakref.proxy(tr)
1391
1391
1392 if ctx.files():
1392 if ctx.files():
1393 m1 = p1.manifest()
1393 m1 = p1.manifest()
1394 m2 = p2.manifest()
1394 m2 = p2.manifest()
1395 m = m1.copy()
1395 m = m1.copy()
1396
1396
1397 # check in files
1397 # check in files
1398 added = []
1398 added = []
1399 changed = []
1399 changed = []
1400 removed = list(ctx.removed())
1400 removed = list(ctx.removed())
1401 linkrev = len(self)
1401 linkrev = len(self)
1402 for f in sorted(ctx.modified() + ctx.added()):
1402 for f in sorted(ctx.modified() + ctx.added()):
1403 self.ui.note(f + "\n")
1403 self.ui.note(f + "\n")
1404 try:
1404 try:
1405 fctx = ctx[f]
1405 fctx = ctx[f]
1406 if fctx is None:
1406 if fctx is None:
1407 removed.append(f)
1407 removed.append(f)
1408 else:
1408 else:
1409 added.append(f)
1409 added.append(f)
1410 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1410 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1411 trp, changed)
1411 trp, changed)
1412 m.setflag(f, fctx.flags())
1412 m.setflag(f, fctx.flags())
1413 except OSError, inst:
1413 except OSError, inst:
1414 self.ui.warn(_("trouble committing %s!\n") % f)
1414 self.ui.warn(_("trouble committing %s!\n") % f)
1415 raise
1415 raise
1416 except IOError, inst:
1416 except IOError, inst:
1417 errcode = getattr(inst, 'errno', errno.ENOENT)
1417 errcode = getattr(inst, 'errno', errno.ENOENT)
1418 if error or errcode and errcode != errno.ENOENT:
1418 if error or errcode and errcode != errno.ENOENT:
1419 self.ui.warn(_("trouble committing %s!\n") % f)
1419 self.ui.warn(_("trouble committing %s!\n") % f)
1420 raise
1420 raise
1421
1421
1422 # update manifest
1422 # update manifest
1423 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1423 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1424 drop = [f for f in removed if f in m]
1424 drop = [f for f in removed if f in m]
1425 for f in drop:
1425 for f in drop:
1426 del m[f]
1426 del m[f]
1427 mn = self.manifest.add(m, trp, linkrev,
1427 mn = self.manifest.add(m, trp, linkrev,
1428 p1.manifestnode(), p2.manifestnode(),
1428 p1.manifestnode(), p2.manifestnode(),
1429 added, drop)
1429 added, drop)
1430 files = changed + removed
1430 files = changed + removed
1431 else:
1431 else:
1432 mn = p1.manifestnode()
1432 mn = p1.manifestnode()
1433 files = []
1433 files = []
1434
1434
1435 # update changelog
1435 # update changelog
1436 self.changelog.delayupdate()
1436 self.changelog.delayupdate()
1437 n = self.changelog.add(mn, files, ctx.description(),
1437 n = self.changelog.add(mn, files, ctx.description(),
1438 trp, p1.node(), p2.node(),
1438 trp, p1.node(), p2.node(),
1439 user, ctx.date(), ctx.extra().copy())
1439 user, ctx.date(), ctx.extra().copy())
1440 p = lambda: self.changelog.writepending() and self.root or ""
1440 p = lambda: self.changelog.writepending() and self.root or ""
1441 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1441 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1442 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1442 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1443 parent2=xp2, pending=p)
1443 parent2=xp2, pending=p)
1444 self.changelog.finalize(trp)
1444 self.changelog.finalize(trp)
1445 # set the new commit is proper phase
1445 # set the new commit is proper phase
1446 targetphase = subrepo.newcommitphase(self.ui, ctx)
1446 targetphase = subrepo.newcommitphase(self.ui, ctx)
1447 if targetphase:
1447 if targetphase:
1448 # retract boundary do not alter parent changeset.
1448 # retract boundary do not alter parent changeset.
1449 # if a parent have higher the resulting phase will
1449 # if a parent have higher the resulting phase will
1450 # be compliant anyway
1450 # be compliant anyway
1451 #
1451 #
1452 # if minimal phase was 0 we don't need to retract anything
1452 # if minimal phase was 0 we don't need to retract anything
1453 phases.retractboundary(self, tr, targetphase, [n])
1453 phases.retractboundary(self, tr, targetphase, [n])
1454 tr.close()
1454 tr.close()
1455 branchmap.updatecache(self.filtered('served'))
1455 branchmap.updatecache(self.filtered('served'))
1456 return n
1456 return n
1457 finally:
1457 finally:
1458 if tr:
1458 if tr:
1459 tr.release()
1459 tr.release()
1460 lock.release()
1460 lock.release()
1461
1461
1462 @unfilteredmethod
1462 @unfilteredmethod
1463 def destroying(self):
1463 def destroying(self):
1464 '''Inform the repository that nodes are about to be destroyed.
1464 '''Inform the repository that nodes are about to be destroyed.
1465 Intended for use by strip and rollback, so there's a common
1465 Intended for use by strip and rollback, so there's a common
1466 place for anything that has to be done before destroying history.
1466 place for anything that has to be done before destroying history.
1467
1467
1468 This is mostly useful for saving state that is in memory and waiting
1468 This is mostly useful for saving state that is in memory and waiting
1469 to be flushed when the current lock is released. Because a call to
1469 to be flushed when the current lock is released. Because a call to
1470 destroyed is imminent, the repo will be invalidated causing those
1470 destroyed is imminent, the repo will be invalidated causing those
1471 changes to stay in memory (waiting for the next unlock), or vanish
1471 changes to stay in memory (waiting for the next unlock), or vanish
1472 completely.
1472 completely.
1473 '''
1473 '''
1474 # When using the same lock to commit and strip, the phasecache is left
1474 # When using the same lock to commit and strip, the phasecache is left
1475 # dirty after committing. Then when we strip, the repo is invalidated,
1475 # dirty after committing. Then when we strip, the repo is invalidated,
1476 # causing those changes to disappear.
1476 # causing those changes to disappear.
1477 if '_phasecache' in vars(self):
1477 if '_phasecache' in vars(self):
1478 self._phasecache.write()
1478 self._phasecache.write()
1479
1479
1480 @unfilteredmethod
1480 @unfilteredmethod
1481 def destroyed(self):
1481 def destroyed(self):
1482 '''Inform the repository that nodes have been destroyed.
1482 '''Inform the repository that nodes have been destroyed.
1483 Intended for use by strip and rollback, so there's a common
1483 Intended for use by strip and rollback, so there's a common
1484 place for anything that has to be done after destroying history.
1484 place for anything that has to be done after destroying history.
1485 '''
1485 '''
1486 # When one tries to:
1486 # When one tries to:
1487 # 1) destroy nodes thus calling this method (e.g. strip)
1487 # 1) destroy nodes thus calling this method (e.g. strip)
1488 # 2) use phasecache somewhere (e.g. commit)
1488 # 2) use phasecache somewhere (e.g. commit)
1489 #
1489 #
1490 # then 2) will fail because the phasecache contains nodes that were
1490 # then 2) will fail because the phasecache contains nodes that were
1491 # removed. We can either remove phasecache from the filecache,
1491 # removed. We can either remove phasecache from the filecache,
1492 # causing it to reload next time it is accessed, or simply filter
1492 # causing it to reload next time it is accessed, or simply filter
1493 # the removed nodes now and write the updated cache.
1493 # the removed nodes now and write the updated cache.
1494 self._phasecache.filterunknown(self)
1494 self._phasecache.filterunknown(self)
1495 self._phasecache.write()
1495 self._phasecache.write()
1496
1496
1497 # update the 'served' branch cache to help read only server process
1497 # update the 'served' branch cache to help read only server process
1498 # Thanks to branchcache collaboration this is done from the nearest
1498 # Thanks to branchcache collaboration this is done from the nearest
1499 # filtered subset and it is expected to be fast.
1499 # filtered subset and it is expected to be fast.
1500 branchmap.updatecache(self.filtered('served'))
1500 branchmap.updatecache(self.filtered('served'))
1501
1501
1502 # Ensure the persistent tag cache is updated. Doing it now
1502 # Ensure the persistent tag cache is updated. Doing it now
1503 # means that the tag cache only has to worry about destroyed
1503 # means that the tag cache only has to worry about destroyed
1504 # heads immediately after a strip/rollback. That in turn
1504 # heads immediately after a strip/rollback. That in turn
1505 # guarantees that "cachetip == currenttip" (comparing both rev
1505 # guarantees that "cachetip == currenttip" (comparing both rev
1506 # and node) always means no nodes have been added or destroyed.
1506 # and node) always means no nodes have been added or destroyed.
1507
1507
1508 # XXX this is suboptimal when qrefresh'ing: we strip the current
1508 # XXX this is suboptimal when qrefresh'ing: we strip the current
1509 # head, refresh the tag cache, then immediately add a new head.
1509 # head, refresh the tag cache, then immediately add a new head.
1510 # But I think doing it this way is necessary for the "instant
1510 # But I think doing it this way is necessary for the "instant
1511 # tag cache retrieval" case to work.
1511 # tag cache retrieval" case to work.
1512 self.invalidate()
1512 self.invalidate()
1513
1513
1514 def walk(self, match, node=None):
1514 def walk(self, match, node=None):
1515 '''
1515 '''
1516 walk recursively through the directory tree or a given
1516 walk recursively through the directory tree or a given
1517 changeset, finding all files matched by the match
1517 changeset, finding all files matched by the match
1518 function
1518 function
1519 '''
1519 '''
1520 return self[node].walk(match)
1520 return self[node].walk(match)
1521
1521
1522 def status(self, node1='.', node2=None, match=None,
1522 def status(self, node1='.', node2=None, match=None,
1523 ignored=False, clean=False, unknown=False,
1523 ignored=False, clean=False, unknown=False,
1524 listsubrepos=False):
1524 listsubrepos=False):
1525 '''a convenience method that calls node1.status(node2)'''
1525 '''a convenience method that calls node1.status(node2)'''
1526 return self[node1].status(node2, match, ignored, clean, unknown,
1526 return self[node1].status(node2, match, ignored, clean, unknown,
1527 listsubrepos)
1527 listsubrepos)
1528
1528
1529 def heads(self, start=None):
1529 def heads(self, start=None):
1530 heads = self.changelog.heads(start)
1530 heads = self.changelog.heads(start)
1531 # sort the output in rev descending order
1531 # sort the output in rev descending order
1532 return sorted(heads, key=self.changelog.rev, reverse=True)
1532 return sorted(heads, key=self.changelog.rev, reverse=True)
1533
1533
1534 def branchheads(self, branch=None, start=None, closed=False):
1534 def branchheads(self, branch=None, start=None, closed=False):
1535 '''return a (possibly filtered) list of heads for the given branch
1535 '''return a (possibly filtered) list of heads for the given branch
1536
1536
1537 Heads are returned in topological order, from newest to oldest.
1537 Heads are returned in topological order, from newest to oldest.
1538 If branch is None, use the dirstate branch.
1538 If branch is None, use the dirstate branch.
1539 If start is not None, return only heads reachable from start.
1539 If start is not None, return only heads reachable from start.
1540 If closed is True, return heads that are marked as closed as well.
1540 If closed is True, return heads that are marked as closed as well.
1541 '''
1541 '''
1542 if branch is None:
1542 if branch is None:
1543 branch = self[None].branch()
1543 branch = self[None].branch()
1544 branches = self.branchmap()
1544 branches = self.branchmap()
1545 if branch not in branches:
1545 if branch not in branches:
1546 return []
1546 return []
1547 # the cache returns heads ordered lowest to highest
1547 # the cache returns heads ordered lowest to highest
1548 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1548 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1549 if start is not None:
1549 if start is not None:
1550 # filter out the heads that cannot be reached from startrev
1550 # filter out the heads that cannot be reached from startrev
1551 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1551 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1552 bheads = [h for h in bheads if h in fbheads]
1552 bheads = [h for h in bheads if h in fbheads]
1553 return bheads
1553 return bheads
1554
1554
1555 def branches(self, nodes):
1555 def branches(self, nodes):
1556 if not nodes:
1556 if not nodes:
1557 nodes = [self.changelog.tip()]
1557 nodes = [self.changelog.tip()]
1558 b = []
1558 b = []
1559 for n in nodes:
1559 for n in nodes:
1560 t = n
1560 t = n
1561 while True:
1561 while True:
1562 p = self.changelog.parents(n)
1562 p = self.changelog.parents(n)
1563 if p[1] != nullid or p[0] == nullid:
1563 if p[1] != nullid or p[0] == nullid:
1564 b.append((t, n, p[0], p[1]))
1564 b.append((t, n, p[0], p[1]))
1565 break
1565 break
1566 n = p[0]
1566 n = p[0]
1567 return b
1567 return b
1568
1568
1569 def between(self, pairs):
1569 def between(self, pairs):
1570 r = []
1570 r = []
1571
1571
1572 for top, bottom in pairs:
1572 for top, bottom in pairs:
1573 n, l, i = top, [], 0
1573 n, l, i = top, [], 0
1574 f = 1
1574 f = 1
1575
1575
1576 while n != bottom and n != nullid:
1576 while n != bottom and n != nullid:
1577 p = self.changelog.parents(n)[0]
1577 p = self.changelog.parents(n)[0]
1578 if i == f:
1578 if i == f:
1579 l.append(n)
1579 l.append(n)
1580 f = f * 2
1580 f = f * 2
1581 n = p
1581 n = p
1582 i += 1
1582 i += 1
1583
1583
1584 r.append(l)
1584 r.append(l)
1585
1585
1586 return r
1586 return r
1587
1587
1588 def checkpush(self, pushop):
1588 def checkpush(self, pushop):
1589 """Extensions can override this function if additional checks have
1589 """Extensions can override this function if additional checks have
1590 to be performed before pushing, or call it if they override push
1590 to be performed before pushing, or call it if they override push
1591 command.
1591 command.
1592 """
1592 """
1593 pass
1593 pass
1594
1594
1595 @unfilteredpropertycache
1595 @unfilteredpropertycache
1596 def prepushoutgoinghooks(self):
1596 def prepushoutgoinghooks(self):
1597 """Return util.hooks consists of "(repo, remote, outgoing)"
1597 """Return util.hooks consists of "(repo, remote, outgoing)"
1598 functions, which are called before pushing changesets.
1598 functions, which are called before pushing changesets.
1599 """
1599 """
1600 return util.hooks()
1600 return util.hooks()
1601
1601
1602 def stream_in(self, remote, requirements):
1602 def stream_in(self, remote, requirements):
1603 lock = self.lock()
1603 lock = self.lock()
1604 try:
1604 try:
1605 # Save remote branchmap. We will use it later
1605 # Save remote branchmap. We will use it later
1606 # to speed up branchcache creation
1606 # to speed up branchcache creation
1607 rbranchmap = None
1607 rbranchmap = None
1608 if remote.capable("branchmap"):
1608 if remote.capable("branchmap"):
1609 rbranchmap = remote.branchmap()
1609 rbranchmap = remote.branchmap()
1610
1610
1611 fp = remote.stream_out()
1611 fp = remote.stream_out()
1612 l = fp.readline()
1612 l = fp.readline()
1613 try:
1613 try:
1614 resp = int(l)
1614 resp = int(l)
1615 except ValueError:
1615 except ValueError:
1616 raise error.ResponseError(
1616 raise error.ResponseError(
1617 _('unexpected response from remote server:'), l)
1617 _('unexpected response from remote server:'), l)
1618 if resp == 1:
1618 if resp == 1:
1619 raise util.Abort(_('operation forbidden by server'))
1619 raise util.Abort(_('operation forbidden by server'))
1620 elif resp == 2:
1620 elif resp == 2:
1621 raise util.Abort(_('locking the remote repository failed'))
1621 raise util.Abort(_('locking the remote repository failed'))
1622 elif resp != 0:
1622 elif resp != 0:
1623 raise util.Abort(_('the server sent an unknown error code'))
1623 raise util.Abort(_('the server sent an unknown error code'))
1624 self.ui.status(_('streaming all changes\n'))
1624 self.ui.status(_('streaming all changes\n'))
1625 l = fp.readline()
1625 l = fp.readline()
1626 try:
1626 try:
1627 total_files, total_bytes = map(int, l.split(' ', 1))
1627 total_files, total_bytes = map(int, l.split(' ', 1))
1628 except (ValueError, TypeError):
1628 except (ValueError, TypeError):
1629 raise error.ResponseError(
1629 raise error.ResponseError(
1630 _('unexpected response from remote server:'), l)
1630 _('unexpected response from remote server:'), l)
1631 self.ui.status(_('%d files to transfer, %s of data\n') %
1631 self.ui.status(_('%d files to transfer, %s of data\n') %
1632 (total_files, util.bytecount(total_bytes)))
1632 (total_files, util.bytecount(total_bytes)))
1633 handled_bytes = 0
1633 handled_bytes = 0
1634 self.ui.progress(_('clone'), 0, total=total_bytes)
1634 self.ui.progress(_('clone'), 0, total=total_bytes)
1635 start = time.time()
1635 start = time.time()
1636
1636
1637 tr = self.transaction(_('clone'))
1637 tr = self.transaction(_('clone'))
1638 try:
1638 try:
1639 for i in xrange(total_files):
1639 for i in xrange(total_files):
1640 # XXX doesn't support '\n' or '\r' in filenames
1640 # XXX doesn't support '\n' or '\r' in filenames
1641 l = fp.readline()
1641 l = fp.readline()
1642 try:
1642 try:
1643 name, size = l.split('\0', 1)
1643 name, size = l.split('\0', 1)
1644 size = int(size)
1644 size = int(size)
1645 except (ValueError, TypeError):
1645 except (ValueError, TypeError):
1646 raise error.ResponseError(
1646 raise error.ResponseError(
1647 _('unexpected response from remote server:'), l)
1647 _('unexpected response from remote server:'), l)
1648 if self.ui.debugflag:
1648 if self.ui.debugflag:
1649 self.ui.debug('adding %s (%s)\n' %
1649 self.ui.debug('adding %s (%s)\n' %
1650 (name, util.bytecount(size)))
1650 (name, util.bytecount(size)))
1651 # for backwards compat, name was partially encoded
1651 # for backwards compat, name was partially encoded
1652 ofp = self.sopener(store.decodedir(name), 'w')
1652 ofp = self.sopener(store.decodedir(name), 'w')
1653 for chunk in util.filechunkiter(fp, limit=size):
1653 for chunk in util.filechunkiter(fp, limit=size):
1654 handled_bytes += len(chunk)
1654 handled_bytes += len(chunk)
1655 self.ui.progress(_('clone'), handled_bytes,
1655 self.ui.progress(_('clone'), handled_bytes,
1656 total=total_bytes)
1656 total=total_bytes)
1657 ofp.write(chunk)
1657 ofp.write(chunk)
1658 ofp.close()
1658 ofp.close()
1659 tr.close()
1659 tr.close()
1660 finally:
1660 finally:
1661 tr.release()
1661 tr.release()
1662
1662
1663 # Writing straight to files circumvented the inmemory caches
1663 # Writing straight to files circumvented the inmemory caches
1664 self.invalidate()
1664 self.invalidate()
1665
1665
1666 elapsed = time.time() - start
1666 elapsed = time.time() - start
1667 if elapsed <= 0:
1667 if elapsed <= 0:
1668 elapsed = 0.001
1668 elapsed = 0.001
1669 self.ui.progress(_('clone'), None)
1669 self.ui.progress(_('clone'), None)
1670 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1670 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1671 (util.bytecount(total_bytes), elapsed,
1671 (util.bytecount(total_bytes), elapsed,
1672 util.bytecount(total_bytes / elapsed)))
1672 util.bytecount(total_bytes / elapsed)))
1673
1673
1674 # new requirements = old non-format requirements +
1674 # new requirements = old non-format requirements +
1675 # new format-related
1675 # new format-related
1676 # requirements from the streamed-in repository
1676 # requirements from the streamed-in repository
1677 requirements.update(set(self.requirements) - self.supportedformats)
1677 requirements.update(set(self.requirements) - self.supportedformats)
1678 self._applyrequirements(requirements)
1678 self._applyrequirements(requirements)
1679 self._writerequirements()
1679 self._writerequirements()
1680
1680
1681 if rbranchmap:
1681 if rbranchmap:
1682 rbheads = []
1682 rbheads = []
1683 for bheads in rbranchmap.itervalues():
1683 for bheads in rbranchmap.itervalues():
1684 rbheads.extend(bheads)
1684 rbheads.extend(bheads)
1685
1685
1686 if rbheads:
1686 if rbheads:
1687 rtiprev = max((int(self.changelog.rev(node))
1687 rtiprev = max((int(self.changelog.rev(node))
1688 for node in rbheads))
1688 for node in rbheads))
1689 cache = branchmap.branchcache(rbranchmap,
1689 cache = branchmap.branchcache(rbranchmap,
1690 self[rtiprev].node(),
1690 self[rtiprev].node(),
1691 rtiprev)
1691 rtiprev)
1692 # Try to stick it as low as possible
1692 # Try to stick it as low as possible
1693 # filter above served are unlikely to be fetch from a clone
1693 # filter above served are unlikely to be fetch from a clone
1694 for candidate in ('base', 'immutable', 'served'):
1694 for candidate in ('base', 'immutable', 'served'):
1695 rview = self.filtered(candidate)
1695 rview = self.filtered(candidate)
1696 if cache.validfor(rview):
1696 if cache.validfor(rview):
1697 self._branchcaches[candidate] = cache
1697 self._branchcaches[candidate] = cache
1698 cache.write(rview)
1698 cache.write(rview)
1699 break
1699 break
1700 self.invalidate()
1700 self.invalidate()
1701 return len(self.heads()) + 1
1701 return len(self.heads()) + 1
1702 finally:
1702 finally:
1703 lock.release()
1703 lock.release()
1704
1704
1705 def clone(self, remote, heads=[], stream=False):
1705 def clone(self, remote, heads=[], stream=False):
1706 '''clone remote repository.
1706 '''clone remote repository.
1707
1707
1708 keyword arguments:
1708 keyword arguments:
1709 heads: list of revs to clone (forces use of pull)
1709 heads: list of revs to clone (forces use of pull)
1710 stream: use streaming clone if possible'''
1710 stream: use streaming clone if possible'''
1711
1711
1712 # now, all clients that can request uncompressed clones can
1712 # now, all clients that can request uncompressed clones can
1713 # read repo formats supported by all servers that can serve
1713 # read repo formats supported by all servers that can serve
1714 # them.
1714 # them.
1715
1715
1716 # if revlog format changes, client will have to check version
1716 # if revlog format changes, client will have to check version
1717 # and format flags on "stream" capability, and use
1717 # and format flags on "stream" capability, and use
1718 # uncompressed only if compatible.
1718 # uncompressed only if compatible.
1719
1719
1720 if not stream:
1720 if not stream:
1721 # if the server explicitly prefers to stream (for fast LANs)
1721 # if the server explicitly prefers to stream (for fast LANs)
1722 stream = remote.capable('stream-preferred')
1722 stream = remote.capable('stream-preferred')
1723
1723
1724 if stream and not heads:
1724 if stream and not heads:
1725 # 'stream' means remote revlog format is revlogv1 only
1725 # 'stream' means remote revlog format is revlogv1 only
1726 if remote.capable('stream'):
1726 if remote.capable('stream'):
1727 return self.stream_in(remote, set(('revlogv1',)))
1727 self.stream_in(remote, set(('revlogv1',)))
1728 # otherwise, 'streamreqs' contains the remote revlog format
1728 else:
1729 streamreqs = remote.capable('streamreqs')
1729 # otherwise, 'streamreqs' contains the remote revlog format
1730 if streamreqs:
1730 streamreqs = remote.capable('streamreqs')
1731 streamreqs = set(streamreqs.split(','))
1731 if streamreqs:
1732 # if we support it, stream in and adjust our requirements
1732 streamreqs = set(streamreqs.split(','))
1733 if not streamreqs - self.supportedformats:
1733 # if we support it, stream in and adjust our requirements
1734 return self.stream_in(remote, streamreqs)
1734 if not streamreqs - self.supportedformats:
1735 self.stream_in(remote, streamreqs)
1735
1736
1736 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1737 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1737 try:
1738 try:
1738 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1739 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1739 ret = exchange.pull(self, remote, heads).cgresult
1740 ret = exchange.pull(self, remote, heads).cgresult
1740 finally:
1741 finally:
1741 self.ui.restoreconfig(quiet)
1742 self.ui.restoreconfig(quiet)
1742 return ret
1743 return ret
1743
1744
1744 def pushkey(self, namespace, key, old, new):
1745 def pushkey(self, namespace, key, old, new):
1745 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1746 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1746 old=old, new=new)
1747 old=old, new=new)
1747 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1748 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1748 ret = pushkey.push(self, namespace, key, old, new)
1749 ret = pushkey.push(self, namespace, key, old, new)
1749 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1750 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1750 ret=ret)
1751 ret=ret)
1751 return ret
1752 return ret
1752
1753
1753 def listkeys(self, namespace):
1754 def listkeys(self, namespace):
1754 self.hook('prelistkeys', throw=True, namespace=namespace)
1755 self.hook('prelistkeys', throw=True, namespace=namespace)
1755 self.ui.debug('listing keys for "%s"\n' % namespace)
1756 self.ui.debug('listing keys for "%s"\n' % namespace)
1756 values = pushkey.list(self, namespace)
1757 values = pushkey.list(self, namespace)
1757 self.hook('listkeys', namespace=namespace, values=values)
1758 self.hook('listkeys', namespace=namespace, values=values)
1758 return values
1759 return values
1759
1760
1760 def debugwireargs(self, one, two, three=None, four=None, five=None):
1761 def debugwireargs(self, one, two, three=None, four=None, five=None):
1761 '''used to test argument passing over the wire'''
1762 '''used to test argument passing over the wire'''
1762 return "%s %s %s %s %s" % (one, two, three, four, five)
1763 return "%s %s %s %s %s" % (one, two, three, four, five)
1763
1764
1764 def savecommitmessage(self, text):
1765 def savecommitmessage(self, text):
1765 fp = self.opener('last-message.txt', 'wb')
1766 fp = self.opener('last-message.txt', 'wb')
1766 try:
1767 try:
1767 fp.write(text)
1768 fp.write(text)
1768 finally:
1769 finally:
1769 fp.close()
1770 fp.close()
1770 return self.pathto(fp.name[len(self.root) + 1:])
1771 return self.pathto(fp.name[len(self.root) + 1:])
1771
1772
1772 # used to avoid circular references so destructors work
1773 # used to avoid circular references so destructors work
1773 def aftertrans(files):
1774 def aftertrans(files):
1774 renamefiles = [tuple(t) for t in files]
1775 renamefiles = [tuple(t) for t in files]
1775 def a():
1776 def a():
1776 for vfs, src, dest in renamefiles:
1777 for vfs, src, dest in renamefiles:
1777 try:
1778 try:
1778 vfs.rename(src, dest)
1779 vfs.rename(src, dest)
1779 except OSError: # journal file does not yet exist
1780 except OSError: # journal file does not yet exist
1780 pass
1781 pass
1781 return a
1782 return a
1782
1783
1783 def undoname(fn):
1784 def undoname(fn):
1784 base, name = os.path.split(fn)
1785 base, name = os.path.split(fn)
1785 assert name.startswith('journal')
1786 assert name.startswith('journal')
1786 return os.path.join(base, name.replace('journal', 'undo', 1))
1787 return os.path.join(base, name.replace('journal', 'undo', 1))
1787
1788
1788 def instance(ui, path, create):
1789 def instance(ui, path, create):
1789 return localrepository(ui, util.urllocalpath(path), create)
1790 return localrepository(ui, util.urllocalpath(path), create)
1790
1791
1791 def islocal(path):
1792 def islocal(path):
1792 return True
1793 return True
@@ -1,123 +1,128 b''
1 #require serve
1 #require serve
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ echo a > a
5 $ echo a > a
6 $ hg ci -Ama -d '1123456789 0'
6 $ hg ci -Ama -d '1123456789 0'
7 adding a
7 adding a
8 $ hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid
8 $ hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid
9 $ cat hg.pid >> $DAEMON_PIDS
9 $ cat hg.pid >> $DAEMON_PIDS
10 $ cd ..
10 $ cd ..
11 $ "$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
11 $ "$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
12 $ while [ ! -f proxy.pid ]; do sleep 0; done
12 $ while [ ! -f proxy.pid ]; do sleep 0; done
13 $ cat proxy.pid >> $DAEMON_PIDS
13 $ cat proxy.pid >> $DAEMON_PIDS
14
14
15 url for proxy, stream
15 url for proxy, stream
16
16
17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b
17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b
18 streaming all changes
18 streaming all changes
19 3 files to transfer, 303 bytes of data
19 3 files to transfer, 303 bytes of data
20 transferred * bytes in * seconds (*/sec) (glob)
20 transferred * bytes in * seconds (*/sec) (glob)
21 searching for changes
22 no changes found
21 updating to branch default
23 updating to branch default
22 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
24 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 $ cd b
25 $ cd b
24 $ hg verify
26 $ hg verify
25 checking changesets
27 checking changesets
26 checking manifests
28 checking manifests
27 crosschecking files in changesets and manifests
29 crosschecking files in changesets and manifests
28 checking files
30 checking files
29 1 files, 1 changesets, 1 total revisions
31 1 files, 1 changesets, 1 total revisions
30 $ cd ..
32 $ cd ..
31
33
32 url for proxy, pull
34 url for proxy, pull
33
35
34 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
36 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
35 requesting all changes
37 requesting all changes
36 adding changesets
38 adding changesets
37 adding manifests
39 adding manifests
38 adding file changes
40 adding file changes
39 added 1 changesets with 1 changes to 1 files
41 added 1 changesets with 1 changes to 1 files
40 updating to branch default
42 updating to branch default
41 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 $ cd b-pull
44 $ cd b-pull
43 $ hg verify
45 $ hg verify
44 checking changesets
46 checking changesets
45 checking manifests
47 checking manifests
46 crosschecking files in changesets and manifests
48 crosschecking files in changesets and manifests
47 checking files
49 checking files
48 1 files, 1 changesets, 1 total revisions
50 1 files, 1 changesets, 1 total revisions
49 $ cd ..
51 $ cd ..
50
52
51 host:port for proxy
53 host:port for proxy
52
54
53 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
55 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
54 requesting all changes
56 requesting all changes
55 adding changesets
57 adding changesets
56 adding manifests
58 adding manifests
57 adding file changes
59 adding file changes
58 added 1 changesets with 1 changes to 1 files
60 added 1 changesets with 1 changes to 1 files
59 updating to branch default
61 updating to branch default
60 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
61
63
62 proxy url with user name and password
64 proxy url with user name and password
63
65
64 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
66 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
65 requesting all changes
67 requesting all changes
66 adding changesets
68 adding changesets
67 adding manifests
69 adding manifests
68 adding file changes
70 adding file changes
69 added 1 changesets with 1 changes to 1 files
71 added 1 changesets with 1 changes to 1 files
70 updating to branch default
72 updating to branch default
71 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
73 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
74
73 url with user name and password
75 url with user name and password
74
76
75 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
77 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
76 requesting all changes
78 requesting all changes
77 adding changesets
79 adding changesets
78 adding manifests
80 adding manifests
79 adding file changes
81 adding file changes
80 added 1 changesets with 1 changes to 1 files
82 added 1 changesets with 1 changes to 1 files
81 updating to branch default
83 updating to branch default
82 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
83
85
84 bad host:port for proxy
86 bad host:port for proxy
85
87
86 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
88 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
87 abort: error: Connection refused
89 abort: error: Connection refused
88 [255]
90 [255]
89
91
90 do not use the proxy if it is in the no list
92 do not use the proxy if it is in the no list
91
93
92 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
94 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
93 requesting all changes
95 requesting all changes
94 adding changesets
96 adding changesets
95 adding manifests
97 adding manifests
96 adding file changes
98 adding file changes
97 added 1 changesets with 1 changes to 1 files
99 added 1 changesets with 1 changes to 1 files
98 updating to branch default
100 updating to branch default
99 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
100 $ cat proxy.log
102 $ cat proxy.log
101 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
103 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
102 * - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - (glob)
104 * - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - (glob)
103 * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob)
105 * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob)
106 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
107 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
108 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
104 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
109 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
105 *- - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
110 *- - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
106 *- - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
111 *- - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
107 *- - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
112 *- - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
108 *- - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
113 *- - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
109 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
110 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
115 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
111 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
116 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
112 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
113 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
118 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
119 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
115 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
120 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
116 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
121 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
122 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
118 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
123 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
119 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
124 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
120 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
125 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
121 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
126 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
122 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
127 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
123 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
128 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
@@ -1,306 +1,313 b''
1 #require serve
1 #require serve
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo foo>foo
5 $ echo foo>foo
6 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
6 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
7 $ echo foo>foo.d/foo
7 $ echo foo>foo.d/foo
8 $ echo bar>foo.d/bAr.hg.d/BaR
8 $ echo bar>foo.d/bAr.hg.d/BaR
9 $ echo bar>foo.d/baR.d.hg/bAR
9 $ echo bar>foo.d/baR.d.hg/bAR
10 $ hg commit -A -m 1
10 $ hg commit -A -m 1
11 adding foo
11 adding foo
12 adding foo.d/bAr.hg.d/BaR
12 adding foo.d/bAr.hg.d/BaR
13 adding foo.d/baR.d.hg/bAR
13 adding foo.d/baR.d.hg/bAR
14 adding foo.d/foo
14 adding foo.d/foo
15 $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log
15 $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log
16 $ hg --config server.uncompressed=False serve -p $HGPORT1 -d --pid-file=../hg2.pid
16 $ hg --config server.uncompressed=False serve -p $HGPORT1 -d --pid-file=../hg2.pid
17
17
18 Test server address cannot be reused
18 Test server address cannot be reused
19
19
20 #if windows
20 #if windows
21 $ hg serve -p $HGPORT1 2>&1
21 $ hg serve -p $HGPORT1 2>&1
22 abort: cannot start server at ':$HGPORT1': * (glob)
22 abort: cannot start server at ':$HGPORT1': * (glob)
23 [255]
23 [255]
24 #else
24 #else
25 $ hg serve -p $HGPORT1 2>&1
25 $ hg serve -p $HGPORT1 2>&1
26 abort: cannot start server at ':$HGPORT1': Address already in use
26 abort: cannot start server at ':$HGPORT1': Address already in use
27 [255]
27 [255]
28 #endif
28 #endif
29 $ cd ..
29 $ cd ..
30 $ cat hg1.pid hg2.pid >> $DAEMON_PIDS
30 $ cat hg1.pid hg2.pid >> $DAEMON_PIDS
31
31
32 clone via stream
32 clone via stream
33
33
34 $ hg clone --uncompressed http://localhost:$HGPORT/ copy 2>&1
34 $ hg clone --uncompressed http://localhost:$HGPORT/ copy 2>&1
35 streaming all changes
35 streaming all changes
36 6 files to transfer, 606 bytes of data
36 6 files to transfer, 606 bytes of data
37 transferred * bytes in * seconds (*/sec) (glob)
37 transferred * bytes in * seconds (*/sec) (glob)
38 searching for changes
39 no changes found
38 updating to branch default
40 updating to branch default
39 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 $ hg verify -R copy
42 $ hg verify -R copy
41 checking changesets
43 checking changesets
42 checking manifests
44 checking manifests
43 crosschecking files in changesets and manifests
45 crosschecking files in changesets and manifests
44 checking files
46 checking files
45 4 files, 1 changesets, 4 total revisions
47 4 files, 1 changesets, 4 total revisions
46
48
47 try to clone via stream, should use pull instead
49 try to clone via stream, should use pull instead
48
50
49 $ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
51 $ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
50 requesting all changes
52 requesting all changes
51 adding changesets
53 adding changesets
52 adding manifests
54 adding manifests
53 adding file changes
55 adding file changes
54 added 1 changesets with 4 changes to 4 files
56 added 1 changesets with 4 changes to 4 files
55 updating to branch default
57 updating to branch default
56 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
57
59
58 clone via pull
60 clone via pull
59
61
60 $ hg clone http://localhost:$HGPORT1/ copy-pull
62 $ hg clone http://localhost:$HGPORT1/ copy-pull
61 requesting all changes
63 requesting all changes
62 adding changesets
64 adding changesets
63 adding manifests
65 adding manifests
64 adding file changes
66 adding file changes
65 added 1 changesets with 4 changes to 4 files
67 added 1 changesets with 4 changes to 4 files
66 updating to branch default
68 updating to branch default
67 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
69 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
68 $ hg verify -R copy-pull
70 $ hg verify -R copy-pull
69 checking changesets
71 checking changesets
70 checking manifests
72 checking manifests
71 crosschecking files in changesets and manifests
73 crosschecking files in changesets and manifests
72 checking files
74 checking files
73 4 files, 1 changesets, 4 total revisions
75 4 files, 1 changesets, 4 total revisions
74 $ cd test
76 $ cd test
75 $ echo bar > bar
77 $ echo bar > bar
76 $ hg commit -A -d '1 0' -m 2
78 $ hg commit -A -d '1 0' -m 2
77 adding bar
79 adding bar
78 $ cd ..
80 $ cd ..
79
81
80 clone over http with --update
82 clone over http with --update
81
83
82 $ hg clone http://localhost:$HGPORT1/ updated --update 0
84 $ hg clone http://localhost:$HGPORT1/ updated --update 0
83 requesting all changes
85 requesting all changes
84 adding changesets
86 adding changesets
85 adding manifests
87 adding manifests
86 adding file changes
88 adding file changes
87 added 2 changesets with 5 changes to 5 files
89 added 2 changesets with 5 changes to 5 files
88 updating to branch default
90 updating to branch default
89 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 $ hg log -r . -R updated
92 $ hg log -r . -R updated
91 changeset: 0:8b6053c928fe
93 changeset: 0:8b6053c928fe
92 user: test
94 user: test
93 date: Thu Jan 01 00:00:00 1970 +0000
95 date: Thu Jan 01 00:00:00 1970 +0000
94 summary: 1
96 summary: 1
95
97
96 $ rm -rf updated
98 $ rm -rf updated
97
99
98 incoming via HTTP
100 incoming via HTTP
99
101
100 $ hg clone http://localhost:$HGPORT1/ --rev 0 partial
102 $ hg clone http://localhost:$HGPORT1/ --rev 0 partial
101 adding changesets
103 adding changesets
102 adding manifests
104 adding manifests
103 adding file changes
105 adding file changes
104 added 1 changesets with 4 changes to 4 files
106 added 1 changesets with 4 changes to 4 files
105 updating to branch default
107 updating to branch default
106 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
108 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 $ cd partial
109 $ cd partial
108 $ touch LOCAL
110 $ touch LOCAL
109 $ hg ci -qAm LOCAL
111 $ hg ci -qAm LOCAL
110 $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n'
112 $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n'
111 comparing with http://localhost:$HGPORT1/
113 comparing with http://localhost:$HGPORT1/
112 searching for changes
114 searching for changes
113 2
115 2
114 $ cd ..
116 $ cd ..
115
117
116 pull
118 pull
117
119
118 $ cd copy-pull
120 $ cd copy-pull
119 $ echo '[hooks]' >> .hg/hgrc
121 $ echo '[hooks]' >> .hg/hgrc
120 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup" >> .hg/hgrc
122 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup" >> .hg/hgrc
121 $ hg pull
123 $ hg pull
122 pulling from http://localhost:$HGPORT1/
124 pulling from http://localhost:$HGPORT1/
123 searching for changes
125 searching for changes
124 adding changesets
126 adding changesets
125 adding manifests
127 adding manifests
126 adding file changes
128 adding file changes
127 added 1 changesets with 1 changes to 1 files
129 added 1 changesets with 1 changes to 1 files
128 changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=http://localhost:$HGPORT1/
130 changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=http://localhost:$HGPORT1/
129 (run 'hg update' to get a working copy)
131 (run 'hg update' to get a working copy)
130 $ cd ..
132 $ cd ..
131
133
132 clone from invalid URL
134 clone from invalid URL
133
135
134 $ hg clone http://localhost:$HGPORT/bad
136 $ hg clone http://localhost:$HGPORT/bad
135 abort: HTTP Error 404: Not Found
137 abort: HTTP Error 404: Not Found
136 [255]
138 [255]
137
139
138 test http authentication
140 test http authentication
139 + use the same server to test server side streaming preference
141 + use the same server to test server side streaming preference
140
142
141 $ cd test
143 $ cd test
142 $ cat << EOT > userpass.py
144 $ cat << EOT > userpass.py
143 > import base64
145 > import base64
144 > from mercurial.hgweb import common
146 > from mercurial.hgweb import common
145 > def perform_authentication(hgweb, req, op):
147 > def perform_authentication(hgweb, req, op):
146 > auth = req.env.get('HTTP_AUTHORIZATION')
148 > auth = req.env.get('HTTP_AUTHORIZATION')
147 > if not auth:
149 > if not auth:
148 > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, 'who',
150 > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, 'who',
149 > [('WWW-Authenticate', 'Basic Realm="mercurial"')])
151 > [('WWW-Authenticate', 'Basic Realm="mercurial"')])
150 > if base64.b64decode(auth.split()[1]).split(':', 1) != ['user', 'pass']:
152 > if base64.b64decode(auth.split()[1]).split(':', 1) != ['user', 'pass']:
151 > raise common.ErrorResponse(common.HTTP_FORBIDDEN, 'no')
153 > raise common.ErrorResponse(common.HTTP_FORBIDDEN, 'no')
152 > def extsetup():
154 > def extsetup():
153 > common.permhooks.insert(0, perform_authentication)
155 > common.permhooks.insert(0, perform_authentication)
154 > EOT
156 > EOT
155 $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \
157 $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \
156 > --config server.preferuncompressed=True \
158 > --config server.preferuncompressed=True \
157 > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
159 > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
158 $ cat pid >> $DAEMON_PIDS
160 $ cat pid >> $DAEMON_PIDS
159
161
160 $ cat << EOF > get_pass.py
162 $ cat << EOF > get_pass.py
161 > import getpass
163 > import getpass
162 > def newgetpass(arg):
164 > def newgetpass(arg):
163 > return "pass"
165 > return "pass"
164 > getpass.getpass = newgetpass
166 > getpass.getpass = newgetpass
165 > EOF
167 > EOF
166
168
167 #if python243
169 #if python243
168 $ hg id http://localhost:$HGPORT2/
170 $ hg id http://localhost:$HGPORT2/
169 abort: http authorization required for http://localhost:$HGPORT2/
171 abort: http authorization required for http://localhost:$HGPORT2/
170 [255]
172 [255]
171 $ hg id http://localhost:$HGPORT2/
173 $ hg id http://localhost:$HGPORT2/
172 abort: http authorization required for http://localhost:$HGPORT2/
174 abort: http authorization required for http://localhost:$HGPORT2/
173 [255]
175 [255]
174 $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/
176 $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/
175 http authorization required for http://localhost:$HGPORT2/
177 http authorization required for http://localhost:$HGPORT2/
176 realm: mercurial
178 realm: mercurial
177 user: user
179 user: user
178 password: 5fed3813f7f5
180 password: 5fed3813f7f5
179 $ hg id http://user:pass@localhost:$HGPORT2/
181 $ hg id http://user:pass@localhost:$HGPORT2/
180 5fed3813f7f5
182 5fed3813f7f5
181 #endif
183 #endif
182 $ echo '[auth]' >> .hg/hgrc
184 $ echo '[auth]' >> .hg/hgrc
183 $ echo 'l.schemes=http' >> .hg/hgrc
185 $ echo 'l.schemes=http' >> .hg/hgrc
184 $ echo 'l.prefix=lo' >> .hg/hgrc
186 $ echo 'l.prefix=lo' >> .hg/hgrc
185 $ echo 'l.username=user' >> .hg/hgrc
187 $ echo 'l.username=user' >> .hg/hgrc
186 $ echo 'l.password=pass' >> .hg/hgrc
188 $ echo 'l.password=pass' >> .hg/hgrc
187 $ hg id http://localhost:$HGPORT2/
189 $ hg id http://localhost:$HGPORT2/
188 5fed3813f7f5
190 5fed3813f7f5
189 $ hg id http://localhost:$HGPORT2/
191 $ hg id http://localhost:$HGPORT2/
190 5fed3813f7f5
192 5fed3813f7f5
191 $ hg id http://user@localhost:$HGPORT2/
193 $ hg id http://user@localhost:$HGPORT2/
192 5fed3813f7f5
194 5fed3813f7f5
193 #if python243
195 #if python243
194 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
196 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
195 streaming all changes
197 streaming all changes
196 7 files to transfer, 916 bytes of data
198 7 files to transfer, 916 bytes of data
197 transferred * bytes in * seconds (*/sec) (glob)
199 transferred * bytes in * seconds (*/sec) (glob)
200 searching for changes
201 no changes found
198 updating to branch default
202 updating to branch default
199 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
203 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
200
204
201 $ hg id http://user2@localhost:$HGPORT2/
205 $ hg id http://user2@localhost:$HGPORT2/
202 abort: http authorization required for http://localhost:$HGPORT2/
206 abort: http authorization required for http://localhost:$HGPORT2/
203 [255]
207 [255]
204 $ hg id http://user:pass2@localhost:$HGPORT2/
208 $ hg id http://user:pass2@localhost:$HGPORT2/
205 abort: HTTP Error 403: no
209 abort: HTTP Error 403: no
206 [255]
210 [255]
207
211
208 $ hg -R dest tag -r tip top
212 $ hg -R dest tag -r tip top
209 $ hg -R dest push http://user:pass@localhost:$HGPORT2/
213 $ hg -R dest push http://user:pass@localhost:$HGPORT2/
210 pushing to http://user:***@localhost:$HGPORT2/
214 pushing to http://user:***@localhost:$HGPORT2/
211 searching for changes
215 searching for changes
212 remote: adding changesets
216 remote: adding changesets
213 remote: adding manifests
217 remote: adding manifests
214 remote: adding file changes
218 remote: adding file changes
215 remote: added 1 changesets with 1 changes to 1 files
219 remote: added 1 changesets with 1 changes to 1 files
216 $ hg rollback -q
220 $ hg rollback -q
217
221
218 $ cut -c38- ../access.log
222 $ cut -c38- ../access.log
219 "GET /?cmd=capabilities HTTP/1.1" 200 -
223 "GET /?cmd=capabilities HTTP/1.1" 200 -
220 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
224 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
221 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
225 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
222 "GET /?cmd=capabilities HTTP/1.1" 200 -
226 "GET /?cmd=capabilities HTTP/1.1" 200 -
223 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
227 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
224 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
228 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
225 "GET /?cmd=capabilities HTTP/1.1" 200 -
229 "GET /?cmd=capabilities HTTP/1.1" 200 -
226 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
230 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
227 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
231 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
228 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
232 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
229 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
233 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
230 "GET /?cmd=capabilities HTTP/1.1" 200 -
234 "GET /?cmd=capabilities HTTP/1.1" 200 -
231 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
235 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
232 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
236 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
233 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
237 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
234 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
238 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
235 "GET /?cmd=capabilities HTTP/1.1" 200 -
239 "GET /?cmd=capabilities HTTP/1.1" 200 -
236 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
240 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
237 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
241 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
238 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
242 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
239 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
243 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
240 "GET /?cmd=capabilities HTTP/1.1" 200 -
244 "GET /?cmd=capabilities HTTP/1.1" 200 -
241 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
245 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
242 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
246 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
243 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
247 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
244 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
248 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
245 "GET /?cmd=capabilities HTTP/1.1" 200 -
249 "GET /?cmd=capabilities HTTP/1.1" 200 -
246 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
250 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
247 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
251 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
248 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
252 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
249 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
253 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
250 "GET /?cmd=capabilities HTTP/1.1" 200 -
254 "GET /?cmd=capabilities HTTP/1.1" 200 -
251 "GET /?cmd=branchmap HTTP/1.1" 200 -
255 "GET /?cmd=branchmap HTTP/1.1" 200 -
252 "GET /?cmd=stream_out HTTP/1.1" 401 -
256 "GET /?cmd=stream_out HTTP/1.1" 401 -
253 "GET /?cmd=stream_out HTTP/1.1" 200 -
257 "GET /?cmd=stream_out HTTP/1.1" 200 -
258 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
259 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d
260 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
254 "GET /?cmd=capabilities HTTP/1.1" 200 -
261 "GET /?cmd=capabilities HTTP/1.1" 200 -
255 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
262 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
256 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
263 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
257 "GET /?cmd=capabilities HTTP/1.1" 200 -
264 "GET /?cmd=capabilities HTTP/1.1" 200 -
258 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
265 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
259 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
266 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
260 "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces
267 "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces
261 "GET /?cmd=capabilities HTTP/1.1" 200 -
268 "GET /?cmd=capabilities HTTP/1.1" 200 -
262 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872
269 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872
263 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases
270 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases
264 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
271 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
265 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
272 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
266 "GET /?cmd=branchmap HTTP/1.1" 200 -
273 "GET /?cmd=branchmap HTTP/1.1" 200 -
267 "GET /?cmd=branchmap HTTP/1.1" 200 -
274 "GET /?cmd=branchmap HTTP/1.1" 200 -
268 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
275 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
269 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524
276 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524
270 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
277 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
271
278
272 #endif
279 #endif
273 $ cd ..
280 $ cd ..
274
281
275 clone of serve with repo in root and unserved subrepo (issue2970)
282 clone of serve with repo in root and unserved subrepo (issue2970)
276
283
277 $ hg --cwd test init sub
284 $ hg --cwd test init sub
278 $ echo empty > test/sub/empty
285 $ echo empty > test/sub/empty
279 $ hg --cwd test/sub add empty
286 $ hg --cwd test/sub add empty
280 $ hg --cwd test/sub commit -qm 'add empty'
287 $ hg --cwd test/sub commit -qm 'add empty'
281 $ hg --cwd test/sub tag -r 0 something
288 $ hg --cwd test/sub tag -r 0 something
282 $ echo sub = sub > test/.hgsub
289 $ echo sub = sub > test/.hgsub
283 $ hg --cwd test add .hgsub
290 $ hg --cwd test add .hgsub
284 $ hg --cwd test commit -qm 'add subrepo'
291 $ hg --cwd test commit -qm 'add subrepo'
285 $ hg clone http://localhost:$HGPORT noslash-clone
292 $ hg clone http://localhost:$HGPORT noslash-clone
286 requesting all changes
293 requesting all changes
287 adding changesets
294 adding changesets
288 adding manifests
295 adding manifests
289 adding file changes
296 adding file changes
290 added 3 changesets with 7 changes to 7 files
297 added 3 changesets with 7 changes to 7 files
291 updating to branch default
298 updating to branch default
292 abort: HTTP Error 404: Not Found
299 abort: HTTP Error 404: Not Found
293 [255]
300 [255]
294 $ hg clone http://localhost:$HGPORT/ slash-clone
301 $ hg clone http://localhost:$HGPORT/ slash-clone
295 requesting all changes
302 requesting all changes
296 adding changesets
303 adding changesets
297 adding manifests
304 adding manifests
298 adding file changes
305 adding file changes
299 added 3 changesets with 7 changes to 7 files
306 added 3 changesets with 7 changes to 7 files
300 updating to branch default
307 updating to branch default
301 abort: HTTP Error 404: Not Found
308 abort: HTTP Error 404: Not Found
302 [255]
309 [255]
303
310
304 check error log
311 check error log
305
312
306 $ cat error.log
313 $ cat error.log
@@ -1,431 +1,451 b''
1
1
2
2
3 This test tries to exercise the ssh functionality with a dummy script
3 This test tries to exercise the ssh functionality with a dummy script
4
4
5 creating 'remote' repo
5 creating 'remote' repo
6
6
7 $ hg init remote
7 $ hg init remote
8 $ cd remote
8 $ cd remote
9 $ echo this > foo
9 $ echo this > foo
10 $ echo this > fooO
10 $ echo this > fooO
11 $ hg ci -A -m "init" foo fooO
11 $ hg ci -A -m "init" foo fooO
12 $ cat <<EOF > .hg/hgrc
12 $ cat <<EOF > .hg/hgrc
13 > [server]
13 > [server]
14 > uncompressed = True
14 > uncompressed = True
15 >
15 >
16 > [hooks]
16 > [hooks]
17 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
17 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
18 > EOF
18 > EOF
19 $ cd ..
19 $ cd ..
20
20
21 repo not found error
21 repo not found error
22
22
23 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
23 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
24 remote: abort: there is no Mercurial repository here (.hg not found)!
24 remote: abort: there is no Mercurial repository here (.hg not found)!
25 abort: no suitable response from remote hg!
25 abort: no suitable response from remote hg!
26 [255]
26 [255]
27
27
28 non-existent absolute path
28 non-existent absolute path
29
29
30 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
30 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
31 remote: abort: there is no Mercurial repository here (.hg not found)!
31 remote: abort: there is no Mercurial repository here (.hg not found)!
32 abort: no suitable response from remote hg!
32 abort: no suitable response from remote hg!
33 [255]
33 [255]
34
34
35 clone remote via stream
35 clone remote via stream
36
36
37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
38 streaming all changes
38 streaming all changes
39 4 files to transfer, 392 bytes of data
39 4 files to transfer, 392 bytes of data
40 transferred 392 bytes in * seconds (*/sec) (glob)
40 transferred 392 bytes in * seconds (*/sec) (glob)
41 searching for changes
42 no changes found
41 updating to branch default
43 updating to branch default
42 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
44 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 $ cd local-stream
45 $ cd local-stream
44 $ hg verify
46 $ hg verify
45 checking changesets
47 checking changesets
46 checking manifests
48 checking manifests
47 crosschecking files in changesets and manifests
49 crosschecking files in changesets and manifests
48 checking files
50 checking files
49 2 files, 1 changesets, 2 total revisions
51 2 files, 1 changesets, 2 total revisions
50 $ cd ..
52 $ cd ..
51
53
54 clone bookmarks via stream
55
56 $ hg -R local-stream book mybook
57 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
58 streaming all changes
59 4 files to transfer, 392 bytes of data
60 transferred 392 bytes in * seconds (* KB/sec) (glob)
61 searching for changes
62 no changes found
63 updating to branch default
64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 $ cd stream2
66 $ hg book
67 mybook 0:1160648e36ce
68 $ cd ..
69 $ rm -rf local-stream stream2
70
52 clone remote via pull
71 clone remote via pull
53
72
54 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
73 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
55 requesting all changes
74 requesting all changes
56 adding changesets
75 adding changesets
57 adding manifests
76 adding manifests
58 adding file changes
77 adding file changes
59 added 1 changesets with 2 changes to 2 files
78 added 1 changesets with 2 changes to 2 files
60 updating to branch default
79 updating to branch default
61 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
80 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
62
81
63 verify
82 verify
64
83
65 $ cd local
84 $ cd local
66 $ hg verify
85 $ hg verify
67 checking changesets
86 checking changesets
68 checking manifests
87 checking manifests
69 crosschecking files in changesets and manifests
88 crosschecking files in changesets and manifests
70 checking files
89 checking files
71 2 files, 1 changesets, 2 total revisions
90 2 files, 1 changesets, 2 total revisions
72 $ echo '[hooks]' >> .hg/hgrc
91 $ echo '[hooks]' >> .hg/hgrc
73 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
92 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
74
93
75 empty default pull
94 empty default pull
76
95
77 $ hg paths
96 $ hg paths
78 default = ssh://user@dummy/remote
97 default = ssh://user@dummy/remote
79 $ hg pull -e "python \"$TESTDIR/dummyssh\""
98 $ hg pull -e "python \"$TESTDIR/dummyssh\""
80 pulling from ssh://user@dummy/remote
99 pulling from ssh://user@dummy/remote
81 searching for changes
100 searching for changes
82 no changes found
101 no changes found
83
102
84 local change
103 local change
85
104
86 $ echo bleah > foo
105 $ echo bleah > foo
87 $ hg ci -m "add"
106 $ hg ci -m "add"
88
107
89 updating rc
108 updating rc
90
109
91 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
110 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
92 $ echo "[ui]" >> .hg/hgrc
111 $ echo "[ui]" >> .hg/hgrc
93 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
112 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
94
113
95 find outgoing
114 find outgoing
96
115
97 $ hg out ssh://user@dummy/remote
116 $ hg out ssh://user@dummy/remote
98 comparing with ssh://user@dummy/remote
117 comparing with ssh://user@dummy/remote
99 searching for changes
118 searching for changes
100 changeset: 1:a28a9d1a809c
119 changeset: 1:a28a9d1a809c
101 tag: tip
120 tag: tip
102 user: test
121 user: test
103 date: Thu Jan 01 00:00:00 1970 +0000
122 date: Thu Jan 01 00:00:00 1970 +0000
104 summary: add
123 summary: add
105
124
106
125
107 find incoming on the remote side
126 find incoming on the remote side
108
127
109 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
128 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
110 comparing with ssh://user@dummy/local
129 comparing with ssh://user@dummy/local
111 searching for changes
130 searching for changes
112 changeset: 1:a28a9d1a809c
131 changeset: 1:a28a9d1a809c
113 tag: tip
132 tag: tip
114 user: test
133 user: test
115 date: Thu Jan 01 00:00:00 1970 +0000
134 date: Thu Jan 01 00:00:00 1970 +0000
116 summary: add
135 summary: add
117
136
118
137
119 find incoming on the remote side (using absolute path)
138 find incoming on the remote side (using absolute path)
120
139
121 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
140 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
122 comparing with ssh://user@dummy/$TESTTMP/local
141 comparing with ssh://user@dummy/$TESTTMP/local
123 searching for changes
142 searching for changes
124 changeset: 1:a28a9d1a809c
143 changeset: 1:a28a9d1a809c
125 tag: tip
144 tag: tip
126 user: test
145 user: test
127 date: Thu Jan 01 00:00:00 1970 +0000
146 date: Thu Jan 01 00:00:00 1970 +0000
128 summary: add
147 summary: add
129
148
130
149
131 push
150 push
132
151
133 $ hg push
152 $ hg push
134 pushing to ssh://user@dummy/remote
153 pushing to ssh://user@dummy/remote
135 searching for changes
154 searching for changes
136 remote: adding changesets
155 remote: adding changesets
137 remote: adding manifests
156 remote: adding manifests
138 remote: adding file changes
157 remote: adding file changes
139 remote: added 1 changesets with 1 changes to 1 files
158 remote: added 1 changesets with 1 changes to 1 files
140 $ cd ../remote
159 $ cd ../remote
141
160
142 check remote tip
161 check remote tip
143
162
144 $ hg tip
163 $ hg tip
145 changeset: 1:a28a9d1a809c
164 changeset: 1:a28a9d1a809c
146 tag: tip
165 tag: tip
147 user: test
166 user: test
148 date: Thu Jan 01 00:00:00 1970 +0000
167 date: Thu Jan 01 00:00:00 1970 +0000
149 summary: add
168 summary: add
150
169
151 $ hg verify
170 $ hg verify
152 checking changesets
171 checking changesets
153 checking manifests
172 checking manifests
154 crosschecking files in changesets and manifests
173 crosschecking files in changesets and manifests
155 checking files
174 checking files
156 2 files, 2 changesets, 3 total revisions
175 2 files, 2 changesets, 3 total revisions
157 $ hg cat -r tip foo
176 $ hg cat -r tip foo
158 bleah
177 bleah
159 $ echo z > z
178 $ echo z > z
160 $ hg ci -A -m z z
179 $ hg ci -A -m z z
161 created new head
180 created new head
162
181
163 test pushkeys and bookmarks
182 test pushkeys and bookmarks
164
183
165 $ cd ../local
184 $ cd ../local
166 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
185 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
167 bookmarks
186 bookmarks
168 namespaces
187 namespaces
169 phases
188 phases
170 $ hg book foo -r 0
189 $ hg book foo -r 0
171 $ hg out -B
190 $ hg out -B
172 comparing with ssh://user@dummy/remote
191 comparing with ssh://user@dummy/remote
173 searching for changed bookmarks
192 searching for changed bookmarks
174 foo 1160648e36ce
193 foo 1160648e36ce
175 $ hg push -B foo
194 $ hg push -B foo
176 pushing to ssh://user@dummy/remote
195 pushing to ssh://user@dummy/remote
177 searching for changes
196 searching for changes
178 no changes found
197 no changes found
179 exporting bookmark foo
198 exporting bookmark foo
180 [1]
199 [1]
181 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
200 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
182 foo 1160648e36cec0054048a7edc4110c6f84fde594
201 foo 1160648e36cec0054048a7edc4110c6f84fde594
183 $ hg book -f foo
202 $ hg book -f foo
184 $ hg push --traceback
203 $ hg push --traceback
185 pushing to ssh://user@dummy/remote
204 pushing to ssh://user@dummy/remote
186 searching for changes
205 searching for changes
187 no changes found
206 no changes found
188 updating bookmark foo
207 updating bookmark foo
189 [1]
208 [1]
190 $ hg book -d foo
209 $ hg book -d foo
191 $ hg in -B
210 $ hg in -B
192 comparing with ssh://user@dummy/remote
211 comparing with ssh://user@dummy/remote
193 searching for changed bookmarks
212 searching for changed bookmarks
194 foo a28a9d1a809c
213 foo a28a9d1a809c
195 $ hg book -f -r 0 foo
214 $ hg book -f -r 0 foo
196 $ hg pull -B foo
215 $ hg pull -B foo
197 pulling from ssh://user@dummy/remote
216 pulling from ssh://user@dummy/remote
198 no changes found
217 no changes found
199 updating bookmark foo
218 updating bookmark foo
200 $ hg book -d foo
219 $ hg book -d foo
201 $ hg push -B foo
220 $ hg push -B foo
202 pushing to ssh://user@dummy/remote
221 pushing to ssh://user@dummy/remote
203 searching for changes
222 searching for changes
204 no changes found
223 no changes found
205 deleting remote bookmark foo
224 deleting remote bookmark foo
206 [1]
225 [1]
207
226
208 a bad, evil hook that prints to stdout
227 a bad, evil hook that prints to stdout
209
228
210 $ cat <<EOF > $TESTTMP/badhook
229 $ cat <<EOF > $TESTTMP/badhook
211 > import sys
230 > import sys
212 > sys.stdout.write("KABOOM\n")
231 > sys.stdout.write("KABOOM\n")
213 > EOF
232 > EOF
214
233
215 $ echo '[hooks]' >> ../remote/.hg/hgrc
234 $ echo '[hooks]' >> ../remote/.hg/hgrc
216 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
235 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
217 $ echo r > r
236 $ echo r > r
218 $ hg ci -A -m z r
237 $ hg ci -A -m z r
219
238
220 push should succeed even though it has an unexpected response
239 push should succeed even though it has an unexpected response
221
240
222 $ hg push
241 $ hg push
223 pushing to ssh://user@dummy/remote
242 pushing to ssh://user@dummy/remote
224 searching for changes
243 searching for changes
225 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
244 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
226 remote: adding changesets
245 remote: adding changesets
227 remote: adding manifests
246 remote: adding manifests
228 remote: adding file changes
247 remote: adding file changes
229 remote: added 1 changesets with 1 changes to 1 files
248 remote: added 1 changesets with 1 changes to 1 files
230 remote: KABOOM
249 remote: KABOOM
231 $ hg -R ../remote heads
250 $ hg -R ../remote heads
232 changeset: 3:1383141674ec
251 changeset: 3:1383141674ec
233 tag: tip
252 tag: tip
234 parent: 1:a28a9d1a809c
253 parent: 1:a28a9d1a809c
235 user: test
254 user: test
236 date: Thu Jan 01 00:00:00 1970 +0000
255 date: Thu Jan 01 00:00:00 1970 +0000
237 summary: z
256 summary: z
238
257
239 changeset: 2:6c0482d977a3
258 changeset: 2:6c0482d977a3
240 parent: 0:1160648e36ce
259 parent: 0:1160648e36ce
241 user: test
260 user: test
242 date: Thu Jan 01 00:00:00 1970 +0000
261 date: Thu Jan 01 00:00:00 1970 +0000
243 summary: z
262 summary: z
244
263
245
264
246 clone bookmarks
265 clone bookmarks
247
266
248 $ hg -R ../remote bookmark test
267 $ hg -R ../remote bookmark test
249 $ hg -R ../remote bookmarks
268 $ hg -R ../remote bookmarks
250 * test 2:6c0482d977a3
269 * test 2:6c0482d977a3
251 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
270 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
252 requesting all changes
271 requesting all changes
253 adding changesets
272 adding changesets
254 adding manifests
273 adding manifests
255 adding file changes
274 adding file changes
256 added 4 changesets with 5 changes to 4 files (+1 heads)
275 added 4 changesets with 5 changes to 4 files (+1 heads)
257 updating to branch default
276 updating to branch default
258 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
277 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
259 $ hg -R local-bookmarks bookmarks
278 $ hg -R local-bookmarks bookmarks
260 test 2:6c0482d977a3
279 test 2:6c0482d977a3
261
280
262 passwords in ssh urls are not supported
281 passwords in ssh urls are not supported
263 (we use a glob here because different Python versions give different
282 (we use a glob here because different Python versions give different
264 results here)
283 results here)
265
284
266 $ hg push ssh://user:erroneouspwd@dummy/remote
285 $ hg push ssh://user:erroneouspwd@dummy/remote
267 pushing to ssh://user:*@dummy/remote (glob)
286 pushing to ssh://user:*@dummy/remote (glob)
268 abort: password in URL not supported!
287 abort: password in URL not supported!
269 [255]
288 [255]
270
289
271 $ cd ..
290 $ cd ..
272
291
273 hide outer repo
292 hide outer repo
274 $ hg init
293 $ hg init
275
294
276 Test remote paths with spaces (issue2983):
295 Test remote paths with spaces (issue2983):
277
296
278 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
297 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
279 $ touch "$TESTTMP/a repo/test"
298 $ touch "$TESTTMP/a repo/test"
280 $ hg -R 'a repo' commit -A -m "test"
299 $ hg -R 'a repo' commit -A -m "test"
281 adding test
300 adding test
282 $ hg -R 'a repo' tag tag
301 $ hg -R 'a repo' tag tag
283 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
302 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
284 73649e48688a
303 73649e48688a
285
304
286 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
305 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
287 abort: unknown revision 'noNoNO'!
306 abort: unknown revision 'noNoNO'!
288 [255]
307 [255]
289
308
290 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
309 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
291
310
292 $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
311 $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
293 destination directory: a repo
312 destination directory: a repo
294 abort: destination 'a repo' is not empty
313 abort: destination 'a repo' is not empty
295 [255]
314 [255]
296
315
297 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
316 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
298 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
317 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
299 parameters:
318 parameters:
300
319
301 $ cat > ssh.sh << EOF
320 $ cat > ssh.sh << EOF
302 > userhost="\$1"
321 > userhost="\$1"
303 > SSH_ORIGINAL_COMMAND="\$2"
322 > SSH_ORIGINAL_COMMAND="\$2"
304 > export SSH_ORIGINAL_COMMAND
323 > export SSH_ORIGINAL_COMMAND
305 > PYTHONPATH="$PYTHONPATH"
324 > PYTHONPATH="$PYTHONPATH"
306 > export PYTHONPATH
325 > export PYTHONPATH
307 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
326 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
308 > EOF
327 > EOF
309
328
310 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
329 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
311 73649e48688a
330 73649e48688a
312
331
313 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
332 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
314 remote: Illegal repository "$TESTTMP/a'repo" (glob)
333 remote: Illegal repository "$TESTTMP/a'repo" (glob)
315 abort: no suitable response from remote hg!
334 abort: no suitable response from remote hg!
316 [255]
335 [255]
317
336
318 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
337 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
319 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
338 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
320 abort: no suitable response from remote hg!
339 abort: no suitable response from remote hg!
321 [255]
340 [255]
322
341
323 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
342 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
324 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
343 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
325 [255]
344 [255]
326
345
327 Test hg-ssh in read-only mode:
346 Test hg-ssh in read-only mode:
328
347
329 $ cat > ssh.sh << EOF
348 $ cat > ssh.sh << EOF
330 > userhost="\$1"
349 > userhost="\$1"
331 > SSH_ORIGINAL_COMMAND="\$2"
350 > SSH_ORIGINAL_COMMAND="\$2"
332 > export SSH_ORIGINAL_COMMAND
351 > export SSH_ORIGINAL_COMMAND
333 > PYTHONPATH="$PYTHONPATH"
352 > PYTHONPATH="$PYTHONPATH"
334 > export PYTHONPATH
353 > export PYTHONPATH
335 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
354 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
336 > EOF
355 > EOF
337
356
338 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
357 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
339 requesting all changes
358 requesting all changes
340 adding changesets
359 adding changesets
341 adding manifests
360 adding manifests
342 adding file changes
361 adding file changes
343 added 4 changesets with 5 changes to 4 files (+1 heads)
362 added 4 changesets with 5 changes to 4 files (+1 heads)
344 updating to branch default
363 updating to branch default
345 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
364 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
346
365
347 $ cd read-only-local
366 $ cd read-only-local
348 $ echo "baz" > bar
367 $ echo "baz" > bar
349 $ hg ci -A -m "unpushable commit" bar
368 $ hg ci -A -m "unpushable commit" bar
350 $ hg push --ssh "sh ../ssh.sh"
369 $ hg push --ssh "sh ../ssh.sh"
351 pushing to ssh://user@dummy/*/remote (glob)
370 pushing to ssh://user@dummy/*/remote (glob)
352 searching for changes
371 searching for changes
353 remote: Permission denied
372 remote: Permission denied
354 remote: abort: prechangegroup.hg-ssh hook failed
373 remote: abort: prechangegroup.hg-ssh hook failed
355 remote: Permission denied
374 remote: Permission denied
356 remote: abort: prepushkey.hg-ssh hook failed
375 remote: abort: prepushkey.hg-ssh hook failed
357 abort: unexpected response: empty string
376 abort: unexpected response: empty string
358 [255]
377 [255]
359
378
360 $ cd ..
379 $ cd ..
361
380
362 stderr from remote commands should be printed before stdout from local code (issue4336)
381 stderr from remote commands should be printed before stdout from local code (issue4336)
363
382
364 $ hg clone remote stderr-ordering
383 $ hg clone remote stderr-ordering
365 updating to branch default
384 updating to branch default
366 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
385 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
367 $ cd stderr-ordering
386 $ cd stderr-ordering
368 $ cat >> localwrite.py << EOF
387 $ cat >> localwrite.py << EOF
369 > from mercurial import exchange, extensions
388 > from mercurial import exchange, extensions
370 >
389 >
371 > def wrappedpush(orig, repo, *args, **kwargs):
390 > def wrappedpush(orig, repo, *args, **kwargs):
372 > res = orig(repo, *args, **kwargs)
391 > res = orig(repo, *args, **kwargs)
373 > repo.ui.write('local stdout\n')
392 > repo.ui.write('local stdout\n')
374 > return res
393 > return res
375 >
394 >
376 > def extsetup(ui):
395 > def extsetup(ui):
377 > extensions.wrapfunction(exchange, 'push', wrappedpush)
396 > extensions.wrapfunction(exchange, 'push', wrappedpush)
378 > EOF
397 > EOF
379
398
380 $ cat >> .hg/hgrc << EOF
399 $ cat >> .hg/hgrc << EOF
381 > [paths]
400 > [paths]
382 > default-push = ssh://user@dummy/remote
401 > default-push = ssh://user@dummy/remote
383 > [ui]
402 > [ui]
384 > ssh = python "$TESTDIR/dummyssh"
403 > ssh = python "$TESTDIR/dummyssh"
385 > [extensions]
404 > [extensions]
386 > localwrite = localwrite.py
405 > localwrite = localwrite.py
387 > EOF
406 > EOF
388
407
389 $ echo localwrite > foo
408 $ echo localwrite > foo
390 $ hg commit -m 'testing localwrite'
409 $ hg commit -m 'testing localwrite'
391 $ hg push
410 $ hg push
392 pushing to ssh://user@dummy/remote
411 pushing to ssh://user@dummy/remote
393 searching for changes
412 searching for changes
394 remote: adding changesets
413 remote: adding changesets
395 remote: adding manifests
414 remote: adding manifests
396 remote: adding file changes
415 remote: adding file changes
397 remote: added 1 changesets with 1 changes to 1 files
416 remote: added 1 changesets with 1 changes to 1 files
398 remote: KABOOM
417 remote: KABOOM
399 local stdout
418 local stdout
400
419
401 $ cd ..
420 $ cd ..
402
421
403 $ cat dummylog
422 $ cat dummylog
404 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
423 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
405 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
424 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
406 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
425 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
426 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
407 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
427 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
408 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
428 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
409 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
429 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
410 Got arguments 1:user@dummy 2:hg -R local serve --stdio
430 Got arguments 1:user@dummy 2:hg -R local serve --stdio
411 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
431 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
412 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
432 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
413 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
433 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
414 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
434 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
415 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
435 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
416 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
436 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
417 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
437 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
418 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
438 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
419 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
439 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
420 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
440 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
421 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
441 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
422 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
442 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
423 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
443 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
424 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
444 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
425 Got arguments 1:user@dummy 2:hg init 'a repo'
445 Got arguments 1:user@dummy 2:hg init 'a repo'
426 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
446 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
427 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
447 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
428 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
448 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
429 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
449 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
430 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
450 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
431 changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
451 changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
General Comments 0
You need to be logged in to leave comments. Login now