##// END OF EJS Templates
subrepo: extract preprocess of repo.commit() to free function...
Yuya Nishihara -
r35018:5c6b96b8 stable
parent child Browse files
Show More
@@ -1,2346 +1,2296
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import inspect
12 import inspect
13 import os
13 import os
14 import random
14 import random
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 )
23 )
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 branchmap,
26 branchmap,
27 bundle2,
27 bundle2,
28 changegroup,
28 changegroup,
29 changelog,
29 changelog,
30 color,
30 color,
31 context,
31 context,
32 dirstate,
32 dirstate,
33 dirstateguard,
33 dirstateguard,
34 discovery,
34 discovery,
35 encoding,
35 encoding,
36 error,
36 error,
37 exchange,
37 exchange,
38 extensions,
38 extensions,
39 filelog,
39 filelog,
40 hook,
40 hook,
41 lock as lockmod,
41 lock as lockmod,
42 manifest,
42 manifest,
43 match as matchmod,
43 match as matchmod,
44 merge as mergemod,
44 merge as mergemod,
45 mergeutil,
45 mergeutil,
46 namespaces,
46 namespaces,
47 obsolete,
47 obsolete,
48 pathutil,
48 pathutil,
49 peer,
49 peer,
50 phases,
50 phases,
51 pushkey,
51 pushkey,
52 pycompat,
52 pycompat,
53 repository,
53 repository,
54 repoview,
54 repoview,
55 revset,
55 revset,
56 revsetlang,
56 revsetlang,
57 scmutil,
57 scmutil,
58 sparse,
58 sparse,
59 store,
59 store,
60 subrepo,
60 subrepo,
61 tags as tagsmod,
61 tags as tagsmod,
62 transaction,
62 transaction,
63 txnutil,
63 txnutil,
64 util,
64 util,
65 vfs as vfsmod,
65 vfs as vfsmod,
66 )
66 )
67
67
68 release = lockmod.release
68 release = lockmod.release
69 urlerr = util.urlerr
69 urlerr = util.urlerr
70 urlreq = util.urlreq
70 urlreq = util.urlreq
71
71
72 # set of (path, vfs-location) tuples. vfs-location is:
72 # set of (path, vfs-location) tuples. vfs-location is:
73 # - 'plain for vfs relative paths
73 # - 'plain for vfs relative paths
74 # - '' for svfs relative paths
74 # - '' for svfs relative paths
75 _cachedfiles = set()
75 _cachedfiles = set()
76
76
77 class _basefilecache(scmutil.filecache):
77 class _basefilecache(scmutil.filecache):
78 """All filecache usage on repo are done for logic that should be unfiltered
78 """All filecache usage on repo are done for logic that should be unfiltered
79 """
79 """
80 def __get__(self, repo, type=None):
80 def __get__(self, repo, type=None):
81 if repo is None:
81 if repo is None:
82 return self
82 return self
83 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
83 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
84 def __set__(self, repo, value):
84 def __set__(self, repo, value):
85 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
85 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
86 def __delete__(self, repo):
86 def __delete__(self, repo):
87 return super(_basefilecache, self).__delete__(repo.unfiltered())
87 return super(_basefilecache, self).__delete__(repo.unfiltered())
88
88
89 class repofilecache(_basefilecache):
89 class repofilecache(_basefilecache):
90 """filecache for files in .hg but outside of .hg/store"""
90 """filecache for files in .hg but outside of .hg/store"""
91 def __init__(self, *paths):
91 def __init__(self, *paths):
92 super(repofilecache, self).__init__(*paths)
92 super(repofilecache, self).__init__(*paths)
93 for path in paths:
93 for path in paths:
94 _cachedfiles.add((path, 'plain'))
94 _cachedfiles.add((path, 'plain'))
95
95
96 def join(self, obj, fname):
96 def join(self, obj, fname):
97 return obj.vfs.join(fname)
97 return obj.vfs.join(fname)
98
98
99 class storecache(_basefilecache):
99 class storecache(_basefilecache):
100 """filecache for files in the store"""
100 """filecache for files in the store"""
101 def __init__(self, *paths):
101 def __init__(self, *paths):
102 super(storecache, self).__init__(*paths)
102 super(storecache, self).__init__(*paths)
103 for path in paths:
103 for path in paths:
104 _cachedfiles.add((path, ''))
104 _cachedfiles.add((path, ''))
105
105
106 def join(self, obj, fname):
106 def join(self, obj, fname):
107 return obj.sjoin(fname)
107 return obj.sjoin(fname)
108
108
109 def isfilecached(repo, name):
109 def isfilecached(repo, name):
110 """check if a repo has already cached "name" filecache-ed property
110 """check if a repo has already cached "name" filecache-ed property
111
111
112 This returns (cachedobj-or-None, iscached) tuple.
112 This returns (cachedobj-or-None, iscached) tuple.
113 """
113 """
114 cacheentry = repo.unfiltered()._filecache.get(name, None)
114 cacheentry = repo.unfiltered()._filecache.get(name, None)
115 if not cacheentry:
115 if not cacheentry:
116 return None, False
116 return None, False
117 return cacheentry.obj, True
117 return cacheentry.obj, True
118
118
119 class unfilteredpropertycache(util.propertycache):
119 class unfilteredpropertycache(util.propertycache):
120 """propertycache that apply to unfiltered repo only"""
120 """propertycache that apply to unfiltered repo only"""
121
121
122 def __get__(self, repo, type=None):
122 def __get__(self, repo, type=None):
123 unfi = repo.unfiltered()
123 unfi = repo.unfiltered()
124 if unfi is repo:
124 if unfi is repo:
125 return super(unfilteredpropertycache, self).__get__(unfi)
125 return super(unfilteredpropertycache, self).__get__(unfi)
126 return getattr(unfi, self.name)
126 return getattr(unfi, self.name)
127
127
128 class filteredpropertycache(util.propertycache):
128 class filteredpropertycache(util.propertycache):
129 """propertycache that must take filtering in account"""
129 """propertycache that must take filtering in account"""
130
130
131 def cachevalue(self, obj, value):
131 def cachevalue(self, obj, value):
132 object.__setattr__(obj, self.name, value)
132 object.__setattr__(obj, self.name, value)
133
133
134
134
135 def hasunfilteredcache(repo, name):
135 def hasunfilteredcache(repo, name):
136 """check if a repo has an unfilteredpropertycache value for <name>"""
136 """check if a repo has an unfilteredpropertycache value for <name>"""
137 return name in vars(repo.unfiltered())
137 return name in vars(repo.unfiltered())
138
138
139 def unfilteredmethod(orig):
139 def unfilteredmethod(orig):
140 """decorate method that always need to be run on unfiltered version"""
140 """decorate method that always need to be run on unfiltered version"""
141 def wrapper(repo, *args, **kwargs):
141 def wrapper(repo, *args, **kwargs):
142 return orig(repo.unfiltered(), *args, **kwargs)
142 return orig(repo.unfiltered(), *args, **kwargs)
143 return wrapper
143 return wrapper
144
144
145 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
145 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
146 'unbundle'}
146 'unbundle'}
147 legacycaps = moderncaps.union({'changegroupsubset'})
147 legacycaps = moderncaps.union({'changegroupsubset'})
148
148
149 class localpeer(repository.peer):
149 class localpeer(repository.peer):
150 '''peer for a local repo; reflects only the most recent API'''
150 '''peer for a local repo; reflects only the most recent API'''
151
151
152 def __init__(self, repo, caps=None):
152 def __init__(self, repo, caps=None):
153 super(localpeer, self).__init__()
153 super(localpeer, self).__init__()
154
154
155 if caps is None:
155 if caps is None:
156 caps = moderncaps.copy()
156 caps = moderncaps.copy()
157 self._repo = repo.filtered('served')
157 self._repo = repo.filtered('served')
158 self._ui = repo.ui
158 self._ui = repo.ui
159 self._caps = repo._restrictcapabilities(caps)
159 self._caps = repo._restrictcapabilities(caps)
160
160
161 # Begin of _basepeer interface.
161 # Begin of _basepeer interface.
162
162
163 @util.propertycache
163 @util.propertycache
164 def ui(self):
164 def ui(self):
165 return self._ui
165 return self._ui
166
166
167 def url(self):
167 def url(self):
168 return self._repo.url()
168 return self._repo.url()
169
169
170 def local(self):
170 def local(self):
171 return self._repo
171 return self._repo
172
172
173 def peer(self):
173 def peer(self):
174 return self
174 return self
175
175
176 def canpush(self):
176 def canpush(self):
177 return True
177 return True
178
178
179 def close(self):
179 def close(self):
180 self._repo.close()
180 self._repo.close()
181
181
182 # End of _basepeer interface.
182 # End of _basepeer interface.
183
183
184 # Begin of _basewirecommands interface.
184 # Begin of _basewirecommands interface.
185
185
186 def branchmap(self):
186 def branchmap(self):
187 return self._repo.branchmap()
187 return self._repo.branchmap()
188
188
189 def capabilities(self):
189 def capabilities(self):
190 return self._caps
190 return self._caps
191
191
192 def debugwireargs(self, one, two, three=None, four=None, five=None):
192 def debugwireargs(self, one, two, three=None, four=None, five=None):
193 """Used to test argument passing over the wire"""
193 """Used to test argument passing over the wire"""
194 return "%s %s %s %s %s" % (one, two, three, four, five)
194 return "%s %s %s %s %s" % (one, two, three, four, five)
195
195
196 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
196 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
197 **kwargs):
197 **kwargs):
198 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
198 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
199 common=common, bundlecaps=bundlecaps,
199 common=common, bundlecaps=bundlecaps,
200 **kwargs)
200 **kwargs)
201 cb = util.chunkbuffer(chunks)
201 cb = util.chunkbuffer(chunks)
202
202
203 if exchange.bundle2requested(bundlecaps):
203 if exchange.bundle2requested(bundlecaps):
204 # When requesting a bundle2, getbundle returns a stream to make the
204 # When requesting a bundle2, getbundle returns a stream to make the
205 # wire level function happier. We need to build a proper object
205 # wire level function happier. We need to build a proper object
206 # from it in local peer.
206 # from it in local peer.
207 return bundle2.getunbundler(self.ui, cb)
207 return bundle2.getunbundler(self.ui, cb)
208 else:
208 else:
209 return changegroup.getunbundler('01', cb, None)
209 return changegroup.getunbundler('01', cb, None)
210
210
211 def heads(self):
211 def heads(self):
212 return self._repo.heads()
212 return self._repo.heads()
213
213
214 def known(self, nodes):
214 def known(self, nodes):
215 return self._repo.known(nodes)
215 return self._repo.known(nodes)
216
216
217 def listkeys(self, namespace):
217 def listkeys(self, namespace):
218 return self._repo.listkeys(namespace)
218 return self._repo.listkeys(namespace)
219
219
220 def lookup(self, key):
220 def lookup(self, key):
221 return self._repo.lookup(key)
221 return self._repo.lookup(key)
222
222
223 def pushkey(self, namespace, key, old, new):
223 def pushkey(self, namespace, key, old, new):
224 return self._repo.pushkey(namespace, key, old, new)
224 return self._repo.pushkey(namespace, key, old, new)
225
225
226 def stream_out(self):
226 def stream_out(self):
227 raise error.Abort(_('cannot perform stream clone against local '
227 raise error.Abort(_('cannot perform stream clone against local '
228 'peer'))
228 'peer'))
229
229
230 def unbundle(self, cg, heads, url):
230 def unbundle(self, cg, heads, url):
231 """apply a bundle on a repo
231 """apply a bundle on a repo
232
232
233 This function handles the repo locking itself."""
233 This function handles the repo locking itself."""
234 try:
234 try:
235 try:
235 try:
236 cg = exchange.readbundle(self.ui, cg, None)
236 cg = exchange.readbundle(self.ui, cg, None)
237 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
237 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
238 if util.safehasattr(ret, 'getchunks'):
238 if util.safehasattr(ret, 'getchunks'):
239 # This is a bundle20 object, turn it into an unbundler.
239 # This is a bundle20 object, turn it into an unbundler.
240 # This little dance should be dropped eventually when the
240 # This little dance should be dropped eventually when the
241 # API is finally improved.
241 # API is finally improved.
242 stream = util.chunkbuffer(ret.getchunks())
242 stream = util.chunkbuffer(ret.getchunks())
243 ret = bundle2.getunbundler(self.ui, stream)
243 ret = bundle2.getunbundler(self.ui, stream)
244 return ret
244 return ret
245 except Exception as exc:
245 except Exception as exc:
246 # If the exception contains output salvaged from a bundle2
246 # If the exception contains output salvaged from a bundle2
247 # reply, we need to make sure it is printed before continuing
247 # reply, we need to make sure it is printed before continuing
248 # to fail. So we build a bundle2 with such output and consume
248 # to fail. So we build a bundle2 with such output and consume
249 # it directly.
249 # it directly.
250 #
250 #
251 # This is not very elegant but allows a "simple" solution for
251 # This is not very elegant but allows a "simple" solution for
252 # issue4594
252 # issue4594
253 output = getattr(exc, '_bundle2salvagedoutput', ())
253 output = getattr(exc, '_bundle2salvagedoutput', ())
254 if output:
254 if output:
255 bundler = bundle2.bundle20(self._repo.ui)
255 bundler = bundle2.bundle20(self._repo.ui)
256 for out in output:
256 for out in output:
257 bundler.addpart(out)
257 bundler.addpart(out)
258 stream = util.chunkbuffer(bundler.getchunks())
258 stream = util.chunkbuffer(bundler.getchunks())
259 b = bundle2.getunbundler(self.ui, stream)
259 b = bundle2.getunbundler(self.ui, stream)
260 bundle2.processbundle(self._repo, b)
260 bundle2.processbundle(self._repo, b)
261 raise
261 raise
262 except error.PushRaced as exc:
262 except error.PushRaced as exc:
263 raise error.ResponseError(_('push failed:'), str(exc))
263 raise error.ResponseError(_('push failed:'), str(exc))
264
264
265 # End of _basewirecommands interface.
265 # End of _basewirecommands interface.
266
266
267 # Begin of peer interface.
267 # Begin of peer interface.
268
268
269 def iterbatch(self):
269 def iterbatch(self):
270 return peer.localiterbatcher(self)
270 return peer.localiterbatcher(self)
271
271
272 # End of peer interface.
272 # End of peer interface.
273
273
274 class locallegacypeer(repository.legacypeer, localpeer):
274 class locallegacypeer(repository.legacypeer, localpeer):
275 '''peer extension which implements legacy methods too; used for tests with
275 '''peer extension which implements legacy methods too; used for tests with
276 restricted capabilities'''
276 restricted capabilities'''
277
277
278 def __init__(self, repo):
278 def __init__(self, repo):
279 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
279 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
280
280
281 # Begin of baselegacywirecommands interface.
281 # Begin of baselegacywirecommands interface.
282
282
283 def between(self, pairs):
283 def between(self, pairs):
284 return self._repo.between(pairs)
284 return self._repo.between(pairs)
285
285
286 def branches(self, nodes):
286 def branches(self, nodes):
287 return self._repo.branches(nodes)
287 return self._repo.branches(nodes)
288
288
289 def changegroup(self, basenodes, source):
289 def changegroup(self, basenodes, source):
290 outgoing = discovery.outgoing(self._repo, missingroots=basenodes,
290 outgoing = discovery.outgoing(self._repo, missingroots=basenodes,
291 missingheads=self._repo.heads())
291 missingheads=self._repo.heads())
292 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
292 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
293
293
294 def changegroupsubset(self, bases, heads, source):
294 def changegroupsubset(self, bases, heads, source):
295 outgoing = discovery.outgoing(self._repo, missingroots=bases,
295 outgoing = discovery.outgoing(self._repo, missingroots=bases,
296 missingheads=heads)
296 missingheads=heads)
297 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
297 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
298
298
299 # End of baselegacywirecommands interface.
299 # End of baselegacywirecommands interface.
300
300
301 # Increment the sub-version when the revlog v2 format changes to lock out old
301 # Increment the sub-version when the revlog v2 format changes to lock out old
302 # clients.
302 # clients.
303 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
303 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
304
304
305 class localrepository(object):
305 class localrepository(object):
306
306
307 supportedformats = {
307 supportedformats = {
308 'revlogv1',
308 'revlogv1',
309 'generaldelta',
309 'generaldelta',
310 'treemanifest',
310 'treemanifest',
311 'manifestv2',
311 'manifestv2',
312 REVLOGV2_REQUIREMENT,
312 REVLOGV2_REQUIREMENT,
313 }
313 }
314 _basesupported = supportedformats | {
314 _basesupported = supportedformats | {
315 'store',
315 'store',
316 'fncache',
316 'fncache',
317 'shared',
317 'shared',
318 'relshared',
318 'relshared',
319 'dotencode',
319 'dotencode',
320 'exp-sparse',
320 'exp-sparse',
321 }
321 }
322 openerreqs = {
322 openerreqs = {
323 'revlogv1',
323 'revlogv1',
324 'generaldelta',
324 'generaldelta',
325 'treemanifest',
325 'treemanifest',
326 'manifestv2',
326 'manifestv2',
327 }
327 }
328
328
329 # a list of (ui, featureset) functions.
329 # a list of (ui, featureset) functions.
330 # only functions defined in module of enabled extensions are invoked
330 # only functions defined in module of enabled extensions are invoked
331 featuresetupfuncs = set()
331 featuresetupfuncs = set()
332
332
333 # list of prefix for file which can be written without 'wlock'
333 # list of prefix for file which can be written without 'wlock'
334 # Extensions should extend this list when needed
334 # Extensions should extend this list when needed
335 _wlockfreeprefix = {
335 _wlockfreeprefix = {
336 # We migh consider requiring 'wlock' for the next
336 # We migh consider requiring 'wlock' for the next
337 # two, but pretty much all the existing code assume
337 # two, but pretty much all the existing code assume
338 # wlock is not needed so we keep them excluded for
338 # wlock is not needed so we keep them excluded for
339 # now.
339 # now.
340 'hgrc',
340 'hgrc',
341 'requires',
341 'requires',
342 # XXX cache is a complicatged business someone
342 # XXX cache is a complicatged business someone
343 # should investigate this in depth at some point
343 # should investigate this in depth at some point
344 'cache/',
344 'cache/',
345 # XXX shouldn't be dirstate covered by the wlock?
345 # XXX shouldn't be dirstate covered by the wlock?
346 'dirstate',
346 'dirstate',
347 # XXX bisect was still a bit too messy at the time
347 # XXX bisect was still a bit too messy at the time
348 # this changeset was introduced. Someone should fix
348 # this changeset was introduced. Someone should fix
349 # the remainig bit and drop this line
349 # the remainig bit and drop this line
350 'bisect.state',
350 'bisect.state',
351 }
351 }
352
352
353 def __init__(self, baseui, path, create=False):
353 def __init__(self, baseui, path, create=False):
354 self.requirements = set()
354 self.requirements = set()
355 self.filtername = None
355 self.filtername = None
356 # wvfs: rooted at the repository root, used to access the working copy
356 # wvfs: rooted at the repository root, used to access the working copy
357 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
357 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
358 # vfs: rooted at .hg, used to access repo files outside of .hg/store
358 # vfs: rooted at .hg, used to access repo files outside of .hg/store
359 self.vfs = None
359 self.vfs = None
360 # svfs: usually rooted at .hg/store, used to access repository history
360 # svfs: usually rooted at .hg/store, used to access repository history
361 # If this is a shared repository, this vfs may point to another
361 # If this is a shared repository, this vfs may point to another
362 # repository's .hg/store directory.
362 # repository's .hg/store directory.
363 self.svfs = None
363 self.svfs = None
364 self.root = self.wvfs.base
364 self.root = self.wvfs.base
365 self.path = self.wvfs.join(".hg")
365 self.path = self.wvfs.join(".hg")
366 self.origroot = path
366 self.origroot = path
367 # These auditor are not used by the vfs,
367 # These auditor are not used by the vfs,
368 # only used when writing this comment: basectx.match
368 # only used when writing this comment: basectx.match
369 self.auditor = pathutil.pathauditor(self.root, self._checknested)
369 self.auditor = pathutil.pathauditor(self.root, self._checknested)
370 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
370 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
371 realfs=False, cached=True)
371 realfs=False, cached=True)
372 self.baseui = baseui
372 self.baseui = baseui
373 self.ui = baseui.copy()
373 self.ui = baseui.copy()
374 self.ui.copy = baseui.copy # prevent copying repo configuration
374 self.ui.copy = baseui.copy # prevent copying repo configuration
375 self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
375 self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
376 if (self.ui.configbool('devel', 'all-warnings') or
376 if (self.ui.configbool('devel', 'all-warnings') or
377 self.ui.configbool('devel', 'check-locks')):
377 self.ui.configbool('devel', 'check-locks')):
378 self.vfs.audit = self._getvfsward(self.vfs.audit)
378 self.vfs.audit = self._getvfsward(self.vfs.audit)
379 # A list of callback to shape the phase if no data were found.
379 # A list of callback to shape the phase if no data were found.
380 # Callback are in the form: func(repo, roots) --> processed root.
380 # Callback are in the form: func(repo, roots) --> processed root.
381 # This list it to be filled by extension during repo setup
381 # This list it to be filled by extension during repo setup
382 self._phasedefaults = []
382 self._phasedefaults = []
383 try:
383 try:
384 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
384 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
385 self._loadextensions()
385 self._loadextensions()
386 except IOError:
386 except IOError:
387 pass
387 pass
388
388
389 if self.featuresetupfuncs:
389 if self.featuresetupfuncs:
390 self.supported = set(self._basesupported) # use private copy
390 self.supported = set(self._basesupported) # use private copy
391 extmods = set(m.__name__ for n, m
391 extmods = set(m.__name__ for n, m
392 in extensions.extensions(self.ui))
392 in extensions.extensions(self.ui))
393 for setupfunc in self.featuresetupfuncs:
393 for setupfunc in self.featuresetupfuncs:
394 if setupfunc.__module__ in extmods:
394 if setupfunc.__module__ in extmods:
395 setupfunc(self.ui, self.supported)
395 setupfunc(self.ui, self.supported)
396 else:
396 else:
397 self.supported = self._basesupported
397 self.supported = self._basesupported
398 color.setup(self.ui)
398 color.setup(self.ui)
399
399
400 # Add compression engines.
400 # Add compression engines.
401 for name in util.compengines:
401 for name in util.compengines:
402 engine = util.compengines[name]
402 engine = util.compengines[name]
403 if engine.revlogheader():
403 if engine.revlogheader():
404 self.supported.add('exp-compression-%s' % name)
404 self.supported.add('exp-compression-%s' % name)
405
405
406 if not self.vfs.isdir():
406 if not self.vfs.isdir():
407 if create:
407 if create:
408 self.requirements = newreporequirements(self)
408 self.requirements = newreporequirements(self)
409
409
410 if not self.wvfs.exists():
410 if not self.wvfs.exists():
411 self.wvfs.makedirs()
411 self.wvfs.makedirs()
412 self.vfs.makedir(notindexed=True)
412 self.vfs.makedir(notindexed=True)
413
413
414 if 'store' in self.requirements:
414 if 'store' in self.requirements:
415 self.vfs.mkdir("store")
415 self.vfs.mkdir("store")
416
416
417 # create an invalid changelog
417 # create an invalid changelog
418 self.vfs.append(
418 self.vfs.append(
419 "00changelog.i",
419 "00changelog.i",
420 '\0\0\0\2' # represents revlogv2
420 '\0\0\0\2' # represents revlogv2
421 ' dummy changelog to prevent using the old repo layout'
421 ' dummy changelog to prevent using the old repo layout'
422 )
422 )
423 else:
423 else:
424 raise error.RepoError(_("repository %s not found") % path)
424 raise error.RepoError(_("repository %s not found") % path)
425 elif create:
425 elif create:
426 raise error.RepoError(_("repository %s already exists") % path)
426 raise error.RepoError(_("repository %s already exists") % path)
427 else:
427 else:
428 try:
428 try:
429 self.requirements = scmutil.readrequires(
429 self.requirements = scmutil.readrequires(
430 self.vfs, self.supported)
430 self.vfs, self.supported)
431 except IOError as inst:
431 except IOError as inst:
432 if inst.errno != errno.ENOENT:
432 if inst.errno != errno.ENOENT:
433 raise
433 raise
434
434
435 cachepath = self.vfs.join('cache')
435 cachepath = self.vfs.join('cache')
436 self.sharedpath = self.path
436 self.sharedpath = self.path
437 try:
437 try:
438 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
438 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
439 if 'relshared' in self.requirements:
439 if 'relshared' in self.requirements:
440 sharedpath = self.vfs.join(sharedpath)
440 sharedpath = self.vfs.join(sharedpath)
441 vfs = vfsmod.vfs(sharedpath, realpath=True)
441 vfs = vfsmod.vfs(sharedpath, realpath=True)
442 cachepath = vfs.join('cache')
442 cachepath = vfs.join('cache')
443 s = vfs.base
443 s = vfs.base
444 if not vfs.exists():
444 if not vfs.exists():
445 raise error.RepoError(
445 raise error.RepoError(
446 _('.hg/sharedpath points to nonexistent directory %s') % s)
446 _('.hg/sharedpath points to nonexistent directory %s') % s)
447 self.sharedpath = s
447 self.sharedpath = s
448 except IOError as inst:
448 except IOError as inst:
449 if inst.errno != errno.ENOENT:
449 if inst.errno != errno.ENOENT:
450 raise
450 raise
451
451
452 if 'exp-sparse' in self.requirements and not sparse.enabled:
452 if 'exp-sparse' in self.requirements and not sparse.enabled:
453 raise error.RepoError(_('repository is using sparse feature but '
453 raise error.RepoError(_('repository is using sparse feature but '
454 'sparse is not enabled; enable the '
454 'sparse is not enabled; enable the '
455 '"sparse" extensions to access'))
455 '"sparse" extensions to access'))
456
456
457 self.store = store.store(
457 self.store = store.store(
458 self.requirements, self.sharedpath,
458 self.requirements, self.sharedpath,
459 lambda base: vfsmod.vfs(base, cacheaudited=True))
459 lambda base: vfsmod.vfs(base, cacheaudited=True))
460 self.spath = self.store.path
460 self.spath = self.store.path
461 self.svfs = self.store.vfs
461 self.svfs = self.store.vfs
462 self.sjoin = self.store.join
462 self.sjoin = self.store.join
463 self.vfs.createmode = self.store.createmode
463 self.vfs.createmode = self.store.createmode
464 self.cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
464 self.cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
465 self.cachevfs.createmode = self.store.createmode
465 self.cachevfs.createmode = self.store.createmode
466 if (self.ui.configbool('devel', 'all-warnings') or
466 if (self.ui.configbool('devel', 'all-warnings') or
467 self.ui.configbool('devel', 'check-locks')):
467 self.ui.configbool('devel', 'check-locks')):
468 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
468 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
469 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
469 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
470 else: # standard vfs
470 else: # standard vfs
471 self.svfs.audit = self._getsvfsward(self.svfs.audit)
471 self.svfs.audit = self._getsvfsward(self.svfs.audit)
472 self._applyopenerreqs()
472 self._applyopenerreqs()
473 if create:
473 if create:
474 self._writerequirements()
474 self._writerequirements()
475
475
476 self._dirstatevalidatewarned = False
476 self._dirstatevalidatewarned = False
477
477
478 self._branchcaches = {}
478 self._branchcaches = {}
479 self._revbranchcache = None
479 self._revbranchcache = None
480 self.filterpats = {}
480 self.filterpats = {}
481 self._datafilters = {}
481 self._datafilters = {}
482 self._transref = self._lockref = self._wlockref = None
482 self._transref = self._lockref = self._wlockref = None
483
483
484 # A cache for various files under .hg/ that tracks file changes,
484 # A cache for various files under .hg/ that tracks file changes,
485 # (used by the filecache decorator)
485 # (used by the filecache decorator)
486 #
486 #
487 # Maps a property name to its util.filecacheentry
487 # Maps a property name to its util.filecacheentry
488 self._filecache = {}
488 self._filecache = {}
489
489
490 # hold sets of revision to be filtered
490 # hold sets of revision to be filtered
491 # should be cleared when something might have changed the filter value:
491 # should be cleared when something might have changed the filter value:
492 # - new changesets,
492 # - new changesets,
493 # - phase change,
493 # - phase change,
494 # - new obsolescence marker,
494 # - new obsolescence marker,
495 # - working directory parent change,
495 # - working directory parent change,
496 # - bookmark changes
496 # - bookmark changes
497 self.filteredrevcache = {}
497 self.filteredrevcache = {}
498
498
499 # post-dirstate-status hooks
499 # post-dirstate-status hooks
500 self._postdsstatus = []
500 self._postdsstatus = []
501
501
502 # Cache of types representing filtered repos.
502 # Cache of types representing filtered repos.
503 self._filteredrepotypes = weakref.WeakKeyDictionary()
503 self._filteredrepotypes = weakref.WeakKeyDictionary()
504
504
505 # generic mapping between names and nodes
505 # generic mapping between names and nodes
506 self.names = namespaces.namespaces()
506 self.names = namespaces.namespaces()
507
507
508 # Key to signature value.
508 # Key to signature value.
509 self._sparsesignaturecache = {}
509 self._sparsesignaturecache = {}
510 # Signature to cached matcher instance.
510 # Signature to cached matcher instance.
511 self._sparsematchercache = {}
511 self._sparsematchercache = {}
512
512
513 def _getvfsward(self, origfunc):
513 def _getvfsward(self, origfunc):
514 """build a ward for self.vfs"""
514 """build a ward for self.vfs"""
515 rref = weakref.ref(self)
515 rref = weakref.ref(self)
516 def checkvfs(path, mode=None):
516 def checkvfs(path, mode=None):
517 ret = origfunc(path, mode=mode)
517 ret = origfunc(path, mode=mode)
518 repo = rref()
518 repo = rref()
519 if (repo is None
519 if (repo is None
520 or not util.safehasattr(repo, '_wlockref')
520 or not util.safehasattr(repo, '_wlockref')
521 or not util.safehasattr(repo, '_lockref')):
521 or not util.safehasattr(repo, '_lockref')):
522 return
522 return
523 if mode in (None, 'r', 'rb'):
523 if mode in (None, 'r', 'rb'):
524 return
524 return
525 if path.startswith(repo.path):
525 if path.startswith(repo.path):
526 # truncate name relative to the repository (.hg)
526 # truncate name relative to the repository (.hg)
527 path = path[len(repo.path) + 1:]
527 path = path[len(repo.path) + 1:]
528 if path.startswith('cache/'):
528 if path.startswith('cache/'):
529 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
529 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
530 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
530 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
531 if path.startswith('journal.'):
531 if path.startswith('journal.'):
532 # journal is covered by 'lock'
532 # journal is covered by 'lock'
533 if repo._currentlock(repo._lockref) is None:
533 if repo._currentlock(repo._lockref) is None:
534 repo.ui.develwarn('write with no lock: "%s"' % path,
534 repo.ui.develwarn('write with no lock: "%s"' % path,
535 stacklevel=2, config='check-locks')
535 stacklevel=2, config='check-locks')
536 elif repo._currentlock(repo._wlockref) is None:
536 elif repo._currentlock(repo._wlockref) is None:
537 # rest of vfs files are covered by 'wlock'
537 # rest of vfs files are covered by 'wlock'
538 #
538 #
539 # exclude special files
539 # exclude special files
540 for prefix in self._wlockfreeprefix:
540 for prefix in self._wlockfreeprefix:
541 if path.startswith(prefix):
541 if path.startswith(prefix):
542 return
542 return
543 repo.ui.develwarn('write with no wlock: "%s"' % path,
543 repo.ui.develwarn('write with no wlock: "%s"' % path,
544 stacklevel=2, config='check-locks')
544 stacklevel=2, config='check-locks')
545 return ret
545 return ret
546 return checkvfs
546 return checkvfs
547
547
548 def _getsvfsward(self, origfunc):
548 def _getsvfsward(self, origfunc):
549 """build a ward for self.svfs"""
549 """build a ward for self.svfs"""
550 rref = weakref.ref(self)
550 rref = weakref.ref(self)
551 def checksvfs(path, mode=None):
551 def checksvfs(path, mode=None):
552 ret = origfunc(path, mode=mode)
552 ret = origfunc(path, mode=mode)
553 repo = rref()
553 repo = rref()
554 if repo is None or not util.safehasattr(repo, '_lockref'):
554 if repo is None or not util.safehasattr(repo, '_lockref'):
555 return
555 return
556 if mode in (None, 'r', 'rb'):
556 if mode in (None, 'r', 'rb'):
557 return
557 return
558 if path.startswith(repo.sharedpath):
558 if path.startswith(repo.sharedpath):
559 # truncate name relative to the repository (.hg)
559 # truncate name relative to the repository (.hg)
560 path = path[len(repo.sharedpath) + 1:]
560 path = path[len(repo.sharedpath) + 1:]
561 if repo._currentlock(repo._lockref) is None:
561 if repo._currentlock(repo._lockref) is None:
562 repo.ui.develwarn('write with no lock: "%s"' % path,
562 repo.ui.develwarn('write with no lock: "%s"' % path,
563 stacklevel=3)
563 stacklevel=3)
564 return ret
564 return ret
565 return checksvfs
565 return checksvfs
566
566
567 def close(self):
567 def close(self):
568 self._writecaches()
568 self._writecaches()
569
569
570 def _loadextensions(self):
570 def _loadextensions(self):
571 extensions.loadall(self.ui)
571 extensions.loadall(self.ui)
572
572
573 def _writecaches(self):
573 def _writecaches(self):
574 if self._revbranchcache:
574 if self._revbranchcache:
575 self._revbranchcache.write()
575 self._revbranchcache.write()
576
576
577 def _restrictcapabilities(self, caps):
577 def _restrictcapabilities(self, caps):
578 if self.ui.configbool('experimental', 'bundle2-advertise'):
578 if self.ui.configbool('experimental', 'bundle2-advertise'):
579 caps = set(caps)
579 caps = set(caps)
580 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
580 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
581 caps.add('bundle2=' + urlreq.quote(capsblob))
581 caps.add('bundle2=' + urlreq.quote(capsblob))
582 return caps
582 return caps
583
583
584 def _applyopenerreqs(self):
584 def _applyopenerreqs(self):
585 self.svfs.options = dict((r, 1) for r in self.requirements
585 self.svfs.options = dict((r, 1) for r in self.requirements
586 if r in self.openerreqs)
586 if r in self.openerreqs)
587 # experimental config: format.chunkcachesize
587 # experimental config: format.chunkcachesize
588 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
588 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
589 if chunkcachesize is not None:
589 if chunkcachesize is not None:
590 self.svfs.options['chunkcachesize'] = chunkcachesize
590 self.svfs.options['chunkcachesize'] = chunkcachesize
591 # experimental config: format.maxchainlen
591 # experimental config: format.maxchainlen
592 maxchainlen = self.ui.configint('format', 'maxchainlen')
592 maxchainlen = self.ui.configint('format', 'maxchainlen')
593 if maxchainlen is not None:
593 if maxchainlen is not None:
594 self.svfs.options['maxchainlen'] = maxchainlen
594 self.svfs.options['maxchainlen'] = maxchainlen
595 # experimental config: format.manifestcachesize
595 # experimental config: format.manifestcachesize
596 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
596 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
597 if manifestcachesize is not None:
597 if manifestcachesize is not None:
598 self.svfs.options['manifestcachesize'] = manifestcachesize
598 self.svfs.options['manifestcachesize'] = manifestcachesize
599 # experimental config: format.aggressivemergedeltas
599 # experimental config: format.aggressivemergedeltas
600 aggressivemergedeltas = self.ui.configbool('format',
600 aggressivemergedeltas = self.ui.configbool('format',
601 'aggressivemergedeltas')
601 'aggressivemergedeltas')
602 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
602 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
603 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
603 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
604 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan')
604 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan')
605 if 0 <= chainspan:
605 if 0 <= chainspan:
606 self.svfs.options['maxdeltachainspan'] = chainspan
606 self.svfs.options['maxdeltachainspan'] = chainspan
607 mmapindexthreshold = self.ui.configbytes('experimental',
607 mmapindexthreshold = self.ui.configbytes('experimental',
608 'mmapindexthreshold')
608 'mmapindexthreshold')
609 if mmapindexthreshold is not None:
609 if mmapindexthreshold is not None:
610 self.svfs.options['mmapindexthreshold'] = mmapindexthreshold
610 self.svfs.options['mmapindexthreshold'] = mmapindexthreshold
611 withsparseread = self.ui.configbool('experimental', 'sparse-read')
611 withsparseread = self.ui.configbool('experimental', 'sparse-read')
612 srdensitythres = float(self.ui.config('experimental',
612 srdensitythres = float(self.ui.config('experimental',
613 'sparse-read.density-threshold'))
613 'sparse-read.density-threshold'))
614 srmingapsize = self.ui.configbytes('experimental',
614 srmingapsize = self.ui.configbytes('experimental',
615 'sparse-read.min-gap-size')
615 'sparse-read.min-gap-size')
616 self.svfs.options['with-sparse-read'] = withsparseread
616 self.svfs.options['with-sparse-read'] = withsparseread
617 self.svfs.options['sparse-read-density-threshold'] = srdensitythres
617 self.svfs.options['sparse-read-density-threshold'] = srdensitythres
618 self.svfs.options['sparse-read-min-gap-size'] = srmingapsize
618 self.svfs.options['sparse-read-min-gap-size'] = srmingapsize
619
619
620 for r in self.requirements:
620 for r in self.requirements:
621 if r.startswith('exp-compression-'):
621 if r.startswith('exp-compression-'):
622 self.svfs.options['compengine'] = r[len('exp-compression-'):]
622 self.svfs.options['compengine'] = r[len('exp-compression-'):]
623
623
624 # TODO move "revlogv2" to openerreqs once finalized.
624 # TODO move "revlogv2" to openerreqs once finalized.
625 if REVLOGV2_REQUIREMENT in self.requirements:
625 if REVLOGV2_REQUIREMENT in self.requirements:
626 self.svfs.options['revlogv2'] = True
626 self.svfs.options['revlogv2'] = True
627
627
628 def _writerequirements(self):
628 def _writerequirements(self):
629 scmutil.writerequires(self.vfs, self.requirements)
629 scmutil.writerequires(self.vfs, self.requirements)
630
630
631 def _checknested(self, path):
631 def _checknested(self, path):
632 """Determine if path is a legal nested repository."""
632 """Determine if path is a legal nested repository."""
633 if not path.startswith(self.root):
633 if not path.startswith(self.root):
634 return False
634 return False
635 subpath = path[len(self.root) + 1:]
635 subpath = path[len(self.root) + 1:]
636 normsubpath = util.pconvert(subpath)
636 normsubpath = util.pconvert(subpath)
637
637
638 # XXX: Checking against the current working copy is wrong in
638 # XXX: Checking against the current working copy is wrong in
639 # the sense that it can reject things like
639 # the sense that it can reject things like
640 #
640 #
641 # $ hg cat -r 10 sub/x.txt
641 # $ hg cat -r 10 sub/x.txt
642 #
642 #
643 # if sub/ is no longer a subrepository in the working copy
643 # if sub/ is no longer a subrepository in the working copy
644 # parent revision.
644 # parent revision.
645 #
645 #
646 # However, it can of course also allow things that would have
646 # However, it can of course also allow things that would have
647 # been rejected before, such as the above cat command if sub/
647 # been rejected before, such as the above cat command if sub/
648 # is a subrepository now, but was a normal directory before.
648 # is a subrepository now, but was a normal directory before.
649 # The old path auditor would have rejected by mistake since it
649 # The old path auditor would have rejected by mistake since it
650 # panics when it sees sub/.hg/.
650 # panics when it sees sub/.hg/.
651 #
651 #
652 # All in all, checking against the working copy seems sensible
652 # All in all, checking against the working copy seems sensible
653 # since we want to prevent access to nested repositories on
653 # since we want to prevent access to nested repositories on
654 # the filesystem *now*.
654 # the filesystem *now*.
655 ctx = self[None]
655 ctx = self[None]
656 parts = util.splitpath(subpath)
656 parts = util.splitpath(subpath)
657 while parts:
657 while parts:
658 prefix = '/'.join(parts)
658 prefix = '/'.join(parts)
659 if prefix in ctx.substate:
659 if prefix in ctx.substate:
660 if prefix == normsubpath:
660 if prefix == normsubpath:
661 return True
661 return True
662 else:
662 else:
663 sub = ctx.sub(prefix)
663 sub = ctx.sub(prefix)
664 return sub.checknested(subpath[len(prefix) + 1:])
664 return sub.checknested(subpath[len(prefix) + 1:])
665 else:
665 else:
666 parts.pop()
666 parts.pop()
667 return False
667 return False
668
668
669 def peer(self):
669 def peer(self):
670 return localpeer(self) # not cached to avoid reference cycle
670 return localpeer(self) # not cached to avoid reference cycle
671
671
672 def unfiltered(self):
672 def unfiltered(self):
673 """Return unfiltered version of the repository
673 """Return unfiltered version of the repository
674
674
675 Intended to be overwritten by filtered repo."""
675 Intended to be overwritten by filtered repo."""
676 return self
676 return self
677
677
678 def filtered(self, name):
678 def filtered(self, name):
679 """Return a filtered version of a repository"""
679 """Return a filtered version of a repository"""
680 # Python <3.4 easily leaks types via __mro__. See
680 # Python <3.4 easily leaks types via __mro__. See
681 # https://bugs.python.org/issue17950. We cache dynamically
681 # https://bugs.python.org/issue17950. We cache dynamically
682 # created types so this method doesn't leak on every
682 # created types so this method doesn't leak on every
683 # invocation.
683 # invocation.
684
684
685 key = self.unfiltered().__class__
685 key = self.unfiltered().__class__
686 if key not in self._filteredrepotypes:
686 if key not in self._filteredrepotypes:
687 # Build a new type with the repoview mixin and the base
687 # Build a new type with the repoview mixin and the base
688 # class of this repo. Give it a name containing the
688 # class of this repo. Give it a name containing the
689 # filter name to aid debugging.
689 # filter name to aid debugging.
690 bases = (repoview.repoview, key)
690 bases = (repoview.repoview, key)
691 cls = type(r'%sfilteredrepo' % name, bases, {})
691 cls = type(r'%sfilteredrepo' % name, bases, {})
692 self._filteredrepotypes[key] = cls
692 self._filteredrepotypes[key] = cls
693
693
694 return self._filteredrepotypes[key](self, name)
694 return self._filteredrepotypes[key](self, name)
695
695
696 @repofilecache('bookmarks', 'bookmarks.current')
696 @repofilecache('bookmarks', 'bookmarks.current')
697 def _bookmarks(self):
697 def _bookmarks(self):
698 return bookmarks.bmstore(self)
698 return bookmarks.bmstore(self)
699
699
700 @property
700 @property
701 def _activebookmark(self):
701 def _activebookmark(self):
702 return self._bookmarks.active
702 return self._bookmarks.active
703
703
704 # _phaserevs and _phasesets depend on changelog. what we need is to
704 # _phaserevs and _phasesets depend on changelog. what we need is to
705 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
705 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
706 # can't be easily expressed in filecache mechanism.
706 # can't be easily expressed in filecache mechanism.
707 @storecache('phaseroots', '00changelog.i')
707 @storecache('phaseroots', '00changelog.i')
708 def _phasecache(self):
708 def _phasecache(self):
709 return phases.phasecache(self, self._phasedefaults)
709 return phases.phasecache(self, self._phasedefaults)
710
710
711 @storecache('obsstore')
711 @storecache('obsstore')
712 def obsstore(self):
712 def obsstore(self):
713 return obsolete.makestore(self.ui, self)
713 return obsolete.makestore(self.ui, self)
714
714
715 @storecache('00changelog.i')
715 @storecache('00changelog.i')
716 def changelog(self):
716 def changelog(self):
717 return changelog.changelog(self.svfs,
717 return changelog.changelog(self.svfs,
718 trypending=txnutil.mayhavepending(self.root))
718 trypending=txnutil.mayhavepending(self.root))
719
719
720 def _constructmanifest(self):
720 def _constructmanifest(self):
721 # This is a temporary function while we migrate from manifest to
721 # This is a temporary function while we migrate from manifest to
722 # manifestlog. It allows bundlerepo and unionrepo to intercept the
722 # manifestlog. It allows bundlerepo and unionrepo to intercept the
723 # manifest creation.
723 # manifest creation.
724 return manifest.manifestrevlog(self.svfs)
724 return manifest.manifestrevlog(self.svfs)
725
725
726 @storecache('00manifest.i')
726 @storecache('00manifest.i')
727 def manifestlog(self):
727 def manifestlog(self):
728 return manifest.manifestlog(self.svfs, self)
728 return manifest.manifestlog(self.svfs, self)
729
729
730 @repofilecache('dirstate')
730 @repofilecache('dirstate')
731 def dirstate(self):
731 def dirstate(self):
732 sparsematchfn = lambda: sparse.matcher(self)
732 sparsematchfn = lambda: sparse.matcher(self)
733
733
734 return dirstate.dirstate(self.vfs, self.ui, self.root,
734 return dirstate.dirstate(self.vfs, self.ui, self.root,
735 self._dirstatevalidate, sparsematchfn)
735 self._dirstatevalidate, sparsematchfn)
736
736
737 def _dirstatevalidate(self, node):
737 def _dirstatevalidate(self, node):
738 try:
738 try:
739 self.changelog.rev(node)
739 self.changelog.rev(node)
740 return node
740 return node
741 except error.LookupError:
741 except error.LookupError:
742 if not self._dirstatevalidatewarned:
742 if not self._dirstatevalidatewarned:
743 self._dirstatevalidatewarned = True
743 self._dirstatevalidatewarned = True
744 self.ui.warn(_("warning: ignoring unknown"
744 self.ui.warn(_("warning: ignoring unknown"
745 " working parent %s!\n") % short(node))
745 " working parent %s!\n") % short(node))
746 return nullid
746 return nullid
747
747
748 def __getitem__(self, changeid):
748 def __getitem__(self, changeid):
749 if changeid is None:
749 if changeid is None:
750 return context.workingctx(self)
750 return context.workingctx(self)
751 if isinstance(changeid, slice):
751 if isinstance(changeid, slice):
752 # wdirrev isn't contiguous so the slice shouldn't include it
752 # wdirrev isn't contiguous so the slice shouldn't include it
753 return [context.changectx(self, i)
753 return [context.changectx(self, i)
754 for i in xrange(*changeid.indices(len(self)))
754 for i in xrange(*changeid.indices(len(self)))
755 if i not in self.changelog.filteredrevs]
755 if i not in self.changelog.filteredrevs]
756 try:
756 try:
757 return context.changectx(self, changeid)
757 return context.changectx(self, changeid)
758 except error.WdirUnsupported:
758 except error.WdirUnsupported:
759 return context.workingctx(self)
759 return context.workingctx(self)
760
760
761 def __contains__(self, changeid):
761 def __contains__(self, changeid):
762 """True if the given changeid exists
762 """True if the given changeid exists
763
763
764 error.LookupError is raised if an ambiguous node specified.
764 error.LookupError is raised if an ambiguous node specified.
765 """
765 """
766 try:
766 try:
767 self[changeid]
767 self[changeid]
768 return True
768 return True
769 except error.RepoLookupError:
769 except error.RepoLookupError:
770 return False
770 return False
771
771
772 def __nonzero__(self):
772 def __nonzero__(self):
773 return True
773 return True
774
774
775 __bool__ = __nonzero__
775 __bool__ = __nonzero__
776
776
777 def __len__(self):
777 def __len__(self):
778 return len(self.changelog)
778 return len(self.changelog)
779
779
780 def __iter__(self):
780 def __iter__(self):
781 return iter(self.changelog)
781 return iter(self.changelog)
782
782
783 def revs(self, expr, *args):
783 def revs(self, expr, *args):
784 '''Find revisions matching a revset.
784 '''Find revisions matching a revset.
785
785
786 The revset is specified as a string ``expr`` that may contain
786 The revset is specified as a string ``expr`` that may contain
787 %-formatting to escape certain types. See ``revsetlang.formatspec``.
787 %-formatting to escape certain types. See ``revsetlang.formatspec``.
788
788
789 Revset aliases from the configuration are not expanded. To expand
789 Revset aliases from the configuration are not expanded. To expand
790 user aliases, consider calling ``scmutil.revrange()`` or
790 user aliases, consider calling ``scmutil.revrange()`` or
791 ``repo.anyrevs([expr], user=True)``.
791 ``repo.anyrevs([expr], user=True)``.
792
792
793 Returns a revset.abstractsmartset, which is a list-like interface
793 Returns a revset.abstractsmartset, which is a list-like interface
794 that contains integer revisions.
794 that contains integer revisions.
795 '''
795 '''
796 expr = revsetlang.formatspec(expr, *args)
796 expr = revsetlang.formatspec(expr, *args)
797 m = revset.match(None, expr)
797 m = revset.match(None, expr)
798 return m(self)
798 return m(self)
799
799
800 def set(self, expr, *args):
800 def set(self, expr, *args):
801 '''Find revisions matching a revset and emit changectx instances.
801 '''Find revisions matching a revset and emit changectx instances.
802
802
803 This is a convenience wrapper around ``revs()`` that iterates the
803 This is a convenience wrapper around ``revs()`` that iterates the
804 result and is a generator of changectx instances.
804 result and is a generator of changectx instances.
805
805
806 Revset aliases from the configuration are not expanded. To expand
806 Revset aliases from the configuration are not expanded. To expand
807 user aliases, consider calling ``scmutil.revrange()``.
807 user aliases, consider calling ``scmutil.revrange()``.
808 '''
808 '''
809 for r in self.revs(expr, *args):
809 for r in self.revs(expr, *args):
810 yield self[r]
810 yield self[r]
811
811
812 def anyrevs(self, specs, user=False, localalias=None):
812 def anyrevs(self, specs, user=False, localalias=None):
813 '''Find revisions matching one of the given revsets.
813 '''Find revisions matching one of the given revsets.
814
814
815 Revset aliases from the configuration are not expanded by default. To
815 Revset aliases from the configuration are not expanded by default. To
816 expand user aliases, specify ``user=True``. To provide some local
816 expand user aliases, specify ``user=True``. To provide some local
817 definitions overriding user aliases, set ``localalias`` to
817 definitions overriding user aliases, set ``localalias`` to
818 ``{name: definitionstring}``.
818 ``{name: definitionstring}``.
819 '''
819 '''
820 if user:
820 if user:
821 m = revset.matchany(self.ui, specs, repo=self,
821 m = revset.matchany(self.ui, specs, repo=self,
822 localalias=localalias)
822 localalias=localalias)
823 else:
823 else:
824 m = revset.matchany(None, specs, localalias=localalias)
824 m = revset.matchany(None, specs, localalias=localalias)
825 return m(self)
825 return m(self)
826
826
827 def url(self):
827 def url(self):
828 return 'file:' + self.root
828 return 'file:' + self.root
829
829
830 def hook(self, name, throw=False, **args):
830 def hook(self, name, throw=False, **args):
831 """Call a hook, passing this repo instance.
831 """Call a hook, passing this repo instance.
832
832
833 This a convenience method to aid invoking hooks. Extensions likely
833 This a convenience method to aid invoking hooks. Extensions likely
834 won't call this unless they have registered a custom hook or are
834 won't call this unless they have registered a custom hook or are
835 replacing code that is expected to call a hook.
835 replacing code that is expected to call a hook.
836 """
836 """
837 return hook.hook(self.ui, self, name, throw, **args)
837 return hook.hook(self.ui, self, name, throw, **args)
838
838
839 @filteredpropertycache
839 @filteredpropertycache
840 def _tagscache(self):
840 def _tagscache(self):
841 '''Returns a tagscache object that contains various tags related
841 '''Returns a tagscache object that contains various tags related
842 caches.'''
842 caches.'''
843
843
844 # This simplifies its cache management by having one decorated
844 # This simplifies its cache management by having one decorated
845 # function (this one) and the rest simply fetch things from it.
845 # function (this one) and the rest simply fetch things from it.
846 class tagscache(object):
846 class tagscache(object):
847 def __init__(self):
847 def __init__(self):
848 # These two define the set of tags for this repository. tags
848 # These two define the set of tags for this repository. tags
849 # maps tag name to node; tagtypes maps tag name to 'global' or
849 # maps tag name to node; tagtypes maps tag name to 'global' or
850 # 'local'. (Global tags are defined by .hgtags across all
850 # 'local'. (Global tags are defined by .hgtags across all
851 # heads, and local tags are defined in .hg/localtags.)
851 # heads, and local tags are defined in .hg/localtags.)
852 # They constitute the in-memory cache of tags.
852 # They constitute the in-memory cache of tags.
853 self.tags = self.tagtypes = None
853 self.tags = self.tagtypes = None
854
854
855 self.nodetagscache = self.tagslist = None
855 self.nodetagscache = self.tagslist = None
856
856
857 cache = tagscache()
857 cache = tagscache()
858 cache.tags, cache.tagtypes = self._findtags()
858 cache.tags, cache.tagtypes = self._findtags()
859
859
860 return cache
860 return cache
861
861
862 def tags(self):
862 def tags(self):
863 '''return a mapping of tag to node'''
863 '''return a mapping of tag to node'''
864 t = {}
864 t = {}
865 if self.changelog.filteredrevs:
865 if self.changelog.filteredrevs:
866 tags, tt = self._findtags()
866 tags, tt = self._findtags()
867 else:
867 else:
868 tags = self._tagscache.tags
868 tags = self._tagscache.tags
869 for k, v in tags.iteritems():
869 for k, v in tags.iteritems():
870 try:
870 try:
871 # ignore tags to unknown nodes
871 # ignore tags to unknown nodes
872 self.changelog.rev(v)
872 self.changelog.rev(v)
873 t[k] = v
873 t[k] = v
874 except (error.LookupError, ValueError):
874 except (error.LookupError, ValueError):
875 pass
875 pass
876 return t
876 return t
877
877
878 def _findtags(self):
878 def _findtags(self):
879 '''Do the hard work of finding tags. Return a pair of dicts
879 '''Do the hard work of finding tags. Return a pair of dicts
880 (tags, tagtypes) where tags maps tag name to node, and tagtypes
880 (tags, tagtypes) where tags maps tag name to node, and tagtypes
881 maps tag name to a string like \'global\' or \'local\'.
881 maps tag name to a string like \'global\' or \'local\'.
882 Subclasses or extensions are free to add their own tags, but
882 Subclasses or extensions are free to add their own tags, but
883 should be aware that the returned dicts will be retained for the
883 should be aware that the returned dicts will be retained for the
884 duration of the localrepo object.'''
884 duration of the localrepo object.'''
885
885
886 # XXX what tagtype should subclasses/extensions use? Currently
886 # XXX what tagtype should subclasses/extensions use? Currently
887 # mq and bookmarks add tags, but do not set the tagtype at all.
887 # mq and bookmarks add tags, but do not set the tagtype at all.
888 # Should each extension invent its own tag type? Should there
888 # Should each extension invent its own tag type? Should there
889 # be one tagtype for all such "virtual" tags? Or is the status
889 # be one tagtype for all such "virtual" tags? Or is the status
890 # quo fine?
890 # quo fine?
891
891
892
892
893 # map tag name to (node, hist)
893 # map tag name to (node, hist)
894 alltags = tagsmod.findglobaltags(self.ui, self)
894 alltags = tagsmod.findglobaltags(self.ui, self)
895 # map tag name to tag type
895 # map tag name to tag type
896 tagtypes = dict((tag, 'global') for tag in alltags)
896 tagtypes = dict((tag, 'global') for tag in alltags)
897
897
898 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
898 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
899
899
900 # Build the return dicts. Have to re-encode tag names because
900 # Build the return dicts. Have to re-encode tag names because
901 # the tags module always uses UTF-8 (in order not to lose info
901 # the tags module always uses UTF-8 (in order not to lose info
902 # writing to the cache), but the rest of Mercurial wants them in
902 # writing to the cache), but the rest of Mercurial wants them in
903 # local encoding.
903 # local encoding.
904 tags = {}
904 tags = {}
905 for (name, (node, hist)) in alltags.iteritems():
905 for (name, (node, hist)) in alltags.iteritems():
906 if node != nullid:
906 if node != nullid:
907 tags[encoding.tolocal(name)] = node
907 tags[encoding.tolocal(name)] = node
908 tags['tip'] = self.changelog.tip()
908 tags['tip'] = self.changelog.tip()
909 tagtypes = dict([(encoding.tolocal(name), value)
909 tagtypes = dict([(encoding.tolocal(name), value)
910 for (name, value) in tagtypes.iteritems()])
910 for (name, value) in tagtypes.iteritems()])
911 return (tags, tagtypes)
911 return (tags, tagtypes)
912
912
913 def tagtype(self, tagname):
913 def tagtype(self, tagname):
914 '''
914 '''
915 return the type of the given tag. result can be:
915 return the type of the given tag. result can be:
916
916
917 'local' : a local tag
917 'local' : a local tag
918 'global' : a global tag
918 'global' : a global tag
919 None : tag does not exist
919 None : tag does not exist
920 '''
920 '''
921
921
922 return self._tagscache.tagtypes.get(tagname)
922 return self._tagscache.tagtypes.get(tagname)
923
923
924 def tagslist(self):
924 def tagslist(self):
925 '''return a list of tags ordered by revision'''
925 '''return a list of tags ordered by revision'''
926 if not self._tagscache.tagslist:
926 if not self._tagscache.tagslist:
927 l = []
927 l = []
928 for t, n in self.tags().iteritems():
928 for t, n in self.tags().iteritems():
929 l.append((self.changelog.rev(n), t, n))
929 l.append((self.changelog.rev(n), t, n))
930 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
930 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
931
931
932 return self._tagscache.tagslist
932 return self._tagscache.tagslist
933
933
934 def nodetags(self, node):
934 def nodetags(self, node):
935 '''return the tags associated with a node'''
935 '''return the tags associated with a node'''
936 if not self._tagscache.nodetagscache:
936 if not self._tagscache.nodetagscache:
937 nodetagscache = {}
937 nodetagscache = {}
938 for t, n in self._tagscache.tags.iteritems():
938 for t, n in self._tagscache.tags.iteritems():
939 nodetagscache.setdefault(n, []).append(t)
939 nodetagscache.setdefault(n, []).append(t)
940 for tags in nodetagscache.itervalues():
940 for tags in nodetagscache.itervalues():
941 tags.sort()
941 tags.sort()
942 self._tagscache.nodetagscache = nodetagscache
942 self._tagscache.nodetagscache = nodetagscache
943 return self._tagscache.nodetagscache.get(node, [])
943 return self._tagscache.nodetagscache.get(node, [])
944
944
945 def nodebookmarks(self, node):
945 def nodebookmarks(self, node):
946 """return the list of bookmarks pointing to the specified node"""
946 """return the list of bookmarks pointing to the specified node"""
947 marks = []
947 marks = []
948 for bookmark, n in self._bookmarks.iteritems():
948 for bookmark, n in self._bookmarks.iteritems():
949 if n == node:
949 if n == node:
950 marks.append(bookmark)
950 marks.append(bookmark)
951 return sorted(marks)
951 return sorted(marks)
952
952
953 def branchmap(self):
953 def branchmap(self):
954 '''returns a dictionary {branch: [branchheads]} with branchheads
954 '''returns a dictionary {branch: [branchheads]} with branchheads
955 ordered by increasing revision number'''
955 ordered by increasing revision number'''
956 branchmap.updatecache(self)
956 branchmap.updatecache(self)
957 return self._branchcaches[self.filtername]
957 return self._branchcaches[self.filtername]
958
958
959 @unfilteredmethod
959 @unfilteredmethod
960 def revbranchcache(self):
960 def revbranchcache(self):
961 if not self._revbranchcache:
961 if not self._revbranchcache:
962 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
962 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
963 return self._revbranchcache
963 return self._revbranchcache
964
964
965 def branchtip(self, branch, ignoremissing=False):
965 def branchtip(self, branch, ignoremissing=False):
966 '''return the tip node for a given branch
966 '''return the tip node for a given branch
967
967
968 If ignoremissing is True, then this method will not raise an error.
968 If ignoremissing is True, then this method will not raise an error.
969 This is helpful for callers that only expect None for a missing branch
969 This is helpful for callers that only expect None for a missing branch
970 (e.g. namespace).
970 (e.g. namespace).
971
971
972 '''
972 '''
973 try:
973 try:
974 return self.branchmap().branchtip(branch)
974 return self.branchmap().branchtip(branch)
975 except KeyError:
975 except KeyError:
976 if not ignoremissing:
976 if not ignoremissing:
977 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
977 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
978 else:
978 else:
979 pass
979 pass
980
980
981 def lookup(self, key):
981 def lookup(self, key):
982 return self[key].node()
982 return self[key].node()
983
983
984 def lookupbranch(self, key, remote=None):
984 def lookupbranch(self, key, remote=None):
985 repo = remote or self
985 repo = remote or self
986 if key in repo.branchmap():
986 if key in repo.branchmap():
987 return key
987 return key
988
988
989 repo = (remote and remote.local()) and remote or self
989 repo = (remote and remote.local()) and remote or self
990 return repo[key].branch()
990 return repo[key].branch()
991
991
992 def known(self, nodes):
992 def known(self, nodes):
993 cl = self.changelog
993 cl = self.changelog
994 nm = cl.nodemap
994 nm = cl.nodemap
995 filtered = cl.filteredrevs
995 filtered = cl.filteredrevs
996 result = []
996 result = []
997 for n in nodes:
997 for n in nodes:
998 r = nm.get(n)
998 r = nm.get(n)
999 resp = not (r is None or r in filtered)
999 resp = not (r is None or r in filtered)
1000 result.append(resp)
1000 result.append(resp)
1001 return result
1001 return result
1002
1002
1003 def local(self):
1003 def local(self):
1004 return self
1004 return self
1005
1005
1006 def publishing(self):
1006 def publishing(self):
1007 # it's safe (and desirable) to trust the publish flag unconditionally
1007 # it's safe (and desirable) to trust the publish flag unconditionally
1008 # so that we don't finalize changes shared between users via ssh or nfs
1008 # so that we don't finalize changes shared between users via ssh or nfs
1009 return self.ui.configbool('phases', 'publish', untrusted=True)
1009 return self.ui.configbool('phases', 'publish', untrusted=True)
1010
1010
1011 def cancopy(self):
1011 def cancopy(self):
1012 # so statichttprepo's override of local() works
1012 # so statichttprepo's override of local() works
1013 if not self.local():
1013 if not self.local():
1014 return False
1014 return False
1015 if not self.publishing():
1015 if not self.publishing():
1016 return True
1016 return True
1017 # if publishing we can't copy if there is filtered content
1017 # if publishing we can't copy if there is filtered content
1018 return not self.filtered('visible').changelog.filteredrevs
1018 return not self.filtered('visible').changelog.filteredrevs
1019
1019
1020 def shared(self):
1020 def shared(self):
1021 '''the type of shared repository (None if not shared)'''
1021 '''the type of shared repository (None if not shared)'''
1022 if self.sharedpath != self.path:
1022 if self.sharedpath != self.path:
1023 return 'store'
1023 return 'store'
1024 return None
1024 return None
1025
1025
1026 def wjoin(self, f, *insidef):
1026 def wjoin(self, f, *insidef):
1027 return self.vfs.reljoin(self.root, f, *insidef)
1027 return self.vfs.reljoin(self.root, f, *insidef)
1028
1028
1029 def file(self, f):
1029 def file(self, f):
1030 if f[0] == '/':
1030 if f[0] == '/':
1031 f = f[1:]
1031 f = f[1:]
1032 return filelog.filelog(self.svfs, f)
1032 return filelog.filelog(self.svfs, f)
1033
1033
1034 def changectx(self, changeid):
1034 def changectx(self, changeid):
1035 return self[changeid]
1035 return self[changeid]
1036
1036
1037 def setparents(self, p1, p2=nullid):
1037 def setparents(self, p1, p2=nullid):
1038 with self.dirstate.parentchange():
1038 with self.dirstate.parentchange():
1039 copies = self.dirstate.setparents(p1, p2)
1039 copies = self.dirstate.setparents(p1, p2)
1040 pctx = self[p1]
1040 pctx = self[p1]
1041 if copies:
1041 if copies:
1042 # Adjust copy records, the dirstate cannot do it, it
1042 # Adjust copy records, the dirstate cannot do it, it
1043 # requires access to parents manifests. Preserve them
1043 # requires access to parents manifests. Preserve them
1044 # only for entries added to first parent.
1044 # only for entries added to first parent.
1045 for f in copies:
1045 for f in copies:
1046 if f not in pctx and copies[f] in pctx:
1046 if f not in pctx and copies[f] in pctx:
1047 self.dirstate.copy(copies[f], f)
1047 self.dirstate.copy(copies[f], f)
1048 if p2 == nullid:
1048 if p2 == nullid:
1049 for f, s in sorted(self.dirstate.copies().items()):
1049 for f, s in sorted(self.dirstate.copies().items()):
1050 if f not in pctx and s not in pctx:
1050 if f not in pctx and s not in pctx:
1051 self.dirstate.copy(None, f)
1051 self.dirstate.copy(None, f)
1052
1052
1053 def filectx(self, path, changeid=None, fileid=None):
1053 def filectx(self, path, changeid=None, fileid=None):
1054 """changeid can be a changeset revision, node, or tag.
1054 """changeid can be a changeset revision, node, or tag.
1055 fileid can be a file revision or node."""
1055 fileid can be a file revision or node."""
1056 return context.filectx(self, path, changeid, fileid)
1056 return context.filectx(self, path, changeid, fileid)
1057
1057
1058 def getcwd(self):
1058 def getcwd(self):
1059 return self.dirstate.getcwd()
1059 return self.dirstate.getcwd()
1060
1060
1061 def pathto(self, f, cwd=None):
1061 def pathto(self, f, cwd=None):
1062 return self.dirstate.pathto(f, cwd)
1062 return self.dirstate.pathto(f, cwd)
1063
1063
1064 def _loadfilter(self, filter):
1064 def _loadfilter(self, filter):
1065 if filter not in self.filterpats:
1065 if filter not in self.filterpats:
1066 l = []
1066 l = []
1067 for pat, cmd in self.ui.configitems(filter):
1067 for pat, cmd in self.ui.configitems(filter):
1068 if cmd == '!':
1068 if cmd == '!':
1069 continue
1069 continue
1070 mf = matchmod.match(self.root, '', [pat])
1070 mf = matchmod.match(self.root, '', [pat])
1071 fn = None
1071 fn = None
1072 params = cmd
1072 params = cmd
1073 for name, filterfn in self._datafilters.iteritems():
1073 for name, filterfn in self._datafilters.iteritems():
1074 if cmd.startswith(name):
1074 if cmd.startswith(name):
1075 fn = filterfn
1075 fn = filterfn
1076 params = cmd[len(name):].lstrip()
1076 params = cmd[len(name):].lstrip()
1077 break
1077 break
1078 if not fn:
1078 if not fn:
1079 fn = lambda s, c, **kwargs: util.filter(s, c)
1079 fn = lambda s, c, **kwargs: util.filter(s, c)
1080 # Wrap old filters not supporting keyword arguments
1080 # Wrap old filters not supporting keyword arguments
1081 if not inspect.getargspec(fn)[2]:
1081 if not inspect.getargspec(fn)[2]:
1082 oldfn = fn
1082 oldfn = fn
1083 fn = lambda s, c, **kwargs: oldfn(s, c)
1083 fn = lambda s, c, **kwargs: oldfn(s, c)
1084 l.append((mf, fn, params))
1084 l.append((mf, fn, params))
1085 self.filterpats[filter] = l
1085 self.filterpats[filter] = l
1086 return self.filterpats[filter]
1086 return self.filterpats[filter]
1087
1087
1088 def _filter(self, filterpats, filename, data):
1088 def _filter(self, filterpats, filename, data):
1089 for mf, fn, cmd in filterpats:
1089 for mf, fn, cmd in filterpats:
1090 if mf(filename):
1090 if mf(filename):
1091 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1091 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1092 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1092 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1093 break
1093 break
1094
1094
1095 return data
1095 return data
1096
1096
1097 @unfilteredpropertycache
1097 @unfilteredpropertycache
1098 def _encodefilterpats(self):
1098 def _encodefilterpats(self):
1099 return self._loadfilter('encode')
1099 return self._loadfilter('encode')
1100
1100
1101 @unfilteredpropertycache
1101 @unfilteredpropertycache
1102 def _decodefilterpats(self):
1102 def _decodefilterpats(self):
1103 return self._loadfilter('decode')
1103 return self._loadfilter('decode')
1104
1104
1105 def adddatafilter(self, name, filter):
1105 def adddatafilter(self, name, filter):
1106 self._datafilters[name] = filter
1106 self._datafilters[name] = filter
1107
1107
1108 def wread(self, filename):
1108 def wread(self, filename):
1109 if self.wvfs.islink(filename):
1109 if self.wvfs.islink(filename):
1110 data = self.wvfs.readlink(filename)
1110 data = self.wvfs.readlink(filename)
1111 else:
1111 else:
1112 data = self.wvfs.read(filename)
1112 data = self.wvfs.read(filename)
1113 return self._filter(self._encodefilterpats, filename, data)
1113 return self._filter(self._encodefilterpats, filename, data)
1114
1114
1115 def wwrite(self, filename, data, flags, backgroundclose=False):
1115 def wwrite(self, filename, data, flags, backgroundclose=False):
1116 """write ``data`` into ``filename`` in the working directory
1116 """write ``data`` into ``filename`` in the working directory
1117
1117
1118 This returns length of written (maybe decoded) data.
1118 This returns length of written (maybe decoded) data.
1119 """
1119 """
1120 data = self._filter(self._decodefilterpats, filename, data)
1120 data = self._filter(self._decodefilterpats, filename, data)
1121 if 'l' in flags:
1121 if 'l' in flags:
1122 self.wvfs.symlink(data, filename)
1122 self.wvfs.symlink(data, filename)
1123 else:
1123 else:
1124 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1124 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1125 if 'x' in flags:
1125 if 'x' in flags:
1126 self.wvfs.setflags(filename, False, True)
1126 self.wvfs.setflags(filename, False, True)
1127 return len(data)
1127 return len(data)
1128
1128
1129 def wwritedata(self, filename, data):
1129 def wwritedata(self, filename, data):
1130 return self._filter(self._decodefilterpats, filename, data)
1130 return self._filter(self._decodefilterpats, filename, data)
1131
1131
1132 def currenttransaction(self):
1132 def currenttransaction(self):
1133 """return the current transaction or None if non exists"""
1133 """return the current transaction or None if non exists"""
1134 if self._transref:
1134 if self._transref:
1135 tr = self._transref()
1135 tr = self._transref()
1136 else:
1136 else:
1137 tr = None
1137 tr = None
1138
1138
1139 if tr and tr.running():
1139 if tr and tr.running():
1140 return tr
1140 return tr
1141 return None
1141 return None
1142
1142
1143 def transaction(self, desc, report=None):
1143 def transaction(self, desc, report=None):
1144 if (self.ui.configbool('devel', 'all-warnings')
1144 if (self.ui.configbool('devel', 'all-warnings')
1145 or self.ui.configbool('devel', 'check-locks')):
1145 or self.ui.configbool('devel', 'check-locks')):
1146 if self._currentlock(self._lockref) is None:
1146 if self._currentlock(self._lockref) is None:
1147 raise error.ProgrammingError('transaction requires locking')
1147 raise error.ProgrammingError('transaction requires locking')
1148 tr = self.currenttransaction()
1148 tr = self.currenttransaction()
1149 if tr is not None:
1149 if tr is not None:
1150 scmutil.registersummarycallback(self, tr, desc)
1150 scmutil.registersummarycallback(self, tr, desc)
1151 return tr.nest()
1151 return tr.nest()
1152
1152
1153 # abort here if the journal already exists
1153 # abort here if the journal already exists
1154 if self.svfs.exists("journal"):
1154 if self.svfs.exists("journal"):
1155 raise error.RepoError(
1155 raise error.RepoError(
1156 _("abandoned transaction found"),
1156 _("abandoned transaction found"),
1157 hint=_("run 'hg recover' to clean up transaction"))
1157 hint=_("run 'hg recover' to clean up transaction"))
1158
1158
1159 idbase = "%.40f#%f" % (random.random(), time.time())
1159 idbase = "%.40f#%f" % (random.random(), time.time())
1160 ha = hex(hashlib.sha1(idbase).digest())
1160 ha = hex(hashlib.sha1(idbase).digest())
1161 txnid = 'TXN:' + ha
1161 txnid = 'TXN:' + ha
1162 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1162 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1163
1163
1164 self._writejournal(desc)
1164 self._writejournal(desc)
1165 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1165 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1166 if report:
1166 if report:
1167 rp = report
1167 rp = report
1168 else:
1168 else:
1169 rp = self.ui.warn
1169 rp = self.ui.warn
1170 vfsmap = {'plain': self.vfs} # root of .hg/
1170 vfsmap = {'plain': self.vfs} # root of .hg/
1171 # we must avoid cyclic reference between repo and transaction.
1171 # we must avoid cyclic reference between repo and transaction.
1172 reporef = weakref.ref(self)
1172 reporef = weakref.ref(self)
1173 # Code to track tag movement
1173 # Code to track tag movement
1174 #
1174 #
1175 # Since tags are all handled as file content, it is actually quite hard
1175 # Since tags are all handled as file content, it is actually quite hard
1176 # to track these movement from a code perspective. So we fallback to a
1176 # to track these movement from a code perspective. So we fallback to a
1177 # tracking at the repository level. One could envision to track changes
1177 # tracking at the repository level. One could envision to track changes
1178 # to the '.hgtags' file through changegroup apply but that fails to
1178 # to the '.hgtags' file through changegroup apply but that fails to
1179 # cope with case where transaction expose new heads without changegroup
1179 # cope with case where transaction expose new heads without changegroup
1180 # being involved (eg: phase movement).
1180 # being involved (eg: phase movement).
1181 #
1181 #
1182 # For now, We gate the feature behind a flag since this likely comes
1182 # For now, We gate the feature behind a flag since this likely comes
1183 # with performance impacts. The current code run more often than needed
1183 # with performance impacts. The current code run more often than needed
1184 # and do not use caches as much as it could. The current focus is on
1184 # and do not use caches as much as it could. The current focus is on
1185 # the behavior of the feature so we disable it by default. The flag
1185 # the behavior of the feature so we disable it by default. The flag
1186 # will be removed when we are happy with the performance impact.
1186 # will be removed when we are happy with the performance impact.
1187 #
1187 #
1188 # Once this feature is no longer experimental move the following
1188 # Once this feature is no longer experimental move the following
1189 # documentation to the appropriate help section:
1189 # documentation to the appropriate help section:
1190 #
1190 #
1191 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1191 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1192 # tags (new or changed or deleted tags). In addition the details of
1192 # tags (new or changed or deleted tags). In addition the details of
1193 # these changes are made available in a file at:
1193 # these changes are made available in a file at:
1194 # ``REPOROOT/.hg/changes/tags.changes``.
1194 # ``REPOROOT/.hg/changes/tags.changes``.
1195 # Make sure you check for HG_TAG_MOVED before reading that file as it
1195 # Make sure you check for HG_TAG_MOVED before reading that file as it
1196 # might exist from a previous transaction even if no tag were touched
1196 # might exist from a previous transaction even if no tag were touched
1197 # in this one. Changes are recorded in a line base format::
1197 # in this one. Changes are recorded in a line base format::
1198 #
1198 #
1199 # <action> <hex-node> <tag-name>\n
1199 # <action> <hex-node> <tag-name>\n
1200 #
1200 #
1201 # Actions are defined as follow:
1201 # Actions are defined as follow:
1202 # "-R": tag is removed,
1202 # "-R": tag is removed,
1203 # "+A": tag is added,
1203 # "+A": tag is added,
1204 # "-M": tag is moved (old value),
1204 # "-M": tag is moved (old value),
1205 # "+M": tag is moved (new value),
1205 # "+M": tag is moved (new value),
1206 tracktags = lambda x: None
1206 tracktags = lambda x: None
1207 # experimental config: experimental.hook-track-tags
1207 # experimental config: experimental.hook-track-tags
1208 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1208 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1209 if desc != 'strip' and shouldtracktags:
1209 if desc != 'strip' and shouldtracktags:
1210 oldheads = self.changelog.headrevs()
1210 oldheads = self.changelog.headrevs()
1211 def tracktags(tr2):
1211 def tracktags(tr2):
1212 repo = reporef()
1212 repo = reporef()
1213 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1213 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1214 newheads = repo.changelog.headrevs()
1214 newheads = repo.changelog.headrevs()
1215 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1215 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1216 # notes: we compare lists here.
1216 # notes: we compare lists here.
1217 # As we do it only once buiding set would not be cheaper
1217 # As we do it only once buiding set would not be cheaper
1218 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1218 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1219 if changes:
1219 if changes:
1220 tr2.hookargs['tag_moved'] = '1'
1220 tr2.hookargs['tag_moved'] = '1'
1221 with repo.vfs('changes/tags.changes', 'w',
1221 with repo.vfs('changes/tags.changes', 'w',
1222 atomictemp=True) as changesfile:
1222 atomictemp=True) as changesfile:
1223 # note: we do not register the file to the transaction
1223 # note: we do not register the file to the transaction
1224 # because we needs it to still exist on the transaction
1224 # because we needs it to still exist on the transaction
1225 # is close (for txnclose hooks)
1225 # is close (for txnclose hooks)
1226 tagsmod.writediff(changesfile, changes)
1226 tagsmod.writediff(changesfile, changes)
1227 def validate(tr2):
1227 def validate(tr2):
1228 """will run pre-closing hooks"""
1228 """will run pre-closing hooks"""
1229 # XXX the transaction API is a bit lacking here so we take a hacky
1229 # XXX the transaction API is a bit lacking here so we take a hacky
1230 # path for now
1230 # path for now
1231 #
1231 #
1232 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1232 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1233 # dict is copied before these run. In addition we needs the data
1233 # dict is copied before these run. In addition we needs the data
1234 # available to in memory hooks too.
1234 # available to in memory hooks too.
1235 #
1235 #
1236 # Moreover, we also need to make sure this runs before txnclose
1236 # Moreover, we also need to make sure this runs before txnclose
1237 # hooks and there is no "pending" mechanism that would execute
1237 # hooks and there is no "pending" mechanism that would execute
1238 # logic only if hooks are about to run.
1238 # logic only if hooks are about to run.
1239 #
1239 #
1240 # Fixing this limitation of the transaction is also needed to track
1240 # Fixing this limitation of the transaction is also needed to track
1241 # other families of changes (bookmarks, phases, obsolescence).
1241 # other families of changes (bookmarks, phases, obsolescence).
1242 #
1242 #
1243 # This will have to be fixed before we remove the experimental
1243 # This will have to be fixed before we remove the experimental
1244 # gating.
1244 # gating.
1245 tracktags(tr2)
1245 tracktags(tr2)
1246 repo = reporef()
1246 repo = reporef()
1247 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1247 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1248 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1248 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1249 args = tr.hookargs.copy()
1249 args = tr.hookargs.copy()
1250 args.update(bookmarks.preparehookargs(name, old, new))
1250 args.update(bookmarks.preparehookargs(name, old, new))
1251 repo.hook('pretxnclose-bookmark', throw=True,
1251 repo.hook('pretxnclose-bookmark', throw=True,
1252 txnname=desc,
1252 txnname=desc,
1253 **pycompat.strkwargs(args))
1253 **pycompat.strkwargs(args))
1254 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1254 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1255 cl = repo.unfiltered().changelog
1255 cl = repo.unfiltered().changelog
1256 for rev, (old, new) in tr.changes['phases'].items():
1256 for rev, (old, new) in tr.changes['phases'].items():
1257 args = tr.hookargs.copy()
1257 args = tr.hookargs.copy()
1258 node = hex(cl.node(rev))
1258 node = hex(cl.node(rev))
1259 args.update(phases.preparehookargs(node, old, new))
1259 args.update(phases.preparehookargs(node, old, new))
1260 repo.hook('pretxnclose-phase', throw=True, txnname=desc,
1260 repo.hook('pretxnclose-phase', throw=True, txnname=desc,
1261 **pycompat.strkwargs(args))
1261 **pycompat.strkwargs(args))
1262
1262
1263 repo.hook('pretxnclose', throw=True,
1263 repo.hook('pretxnclose', throw=True,
1264 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1264 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1265 def releasefn(tr, success):
1265 def releasefn(tr, success):
1266 repo = reporef()
1266 repo = reporef()
1267 if success:
1267 if success:
1268 # this should be explicitly invoked here, because
1268 # this should be explicitly invoked here, because
1269 # in-memory changes aren't written out at closing
1269 # in-memory changes aren't written out at closing
1270 # transaction, if tr.addfilegenerator (via
1270 # transaction, if tr.addfilegenerator (via
1271 # dirstate.write or so) isn't invoked while
1271 # dirstate.write or so) isn't invoked while
1272 # transaction running
1272 # transaction running
1273 repo.dirstate.write(None)
1273 repo.dirstate.write(None)
1274 else:
1274 else:
1275 # discard all changes (including ones already written
1275 # discard all changes (including ones already written
1276 # out) in this transaction
1276 # out) in this transaction
1277 repo.dirstate.restorebackup(None, 'journal.dirstate')
1277 repo.dirstate.restorebackup(None, 'journal.dirstate')
1278
1278
1279 repo.invalidate(clearfilecache=True)
1279 repo.invalidate(clearfilecache=True)
1280
1280
1281 tr = transaction.transaction(rp, self.svfs, vfsmap,
1281 tr = transaction.transaction(rp, self.svfs, vfsmap,
1282 "journal",
1282 "journal",
1283 "undo",
1283 "undo",
1284 aftertrans(renames),
1284 aftertrans(renames),
1285 self.store.createmode,
1285 self.store.createmode,
1286 validator=validate,
1286 validator=validate,
1287 releasefn=releasefn,
1287 releasefn=releasefn,
1288 checkambigfiles=_cachedfiles)
1288 checkambigfiles=_cachedfiles)
1289 tr.changes['revs'] = set()
1289 tr.changes['revs'] = set()
1290 tr.changes['obsmarkers'] = set()
1290 tr.changes['obsmarkers'] = set()
1291 tr.changes['phases'] = {}
1291 tr.changes['phases'] = {}
1292 tr.changes['bookmarks'] = {}
1292 tr.changes['bookmarks'] = {}
1293
1293
1294 tr.hookargs['txnid'] = txnid
1294 tr.hookargs['txnid'] = txnid
1295 # note: writing the fncache only during finalize mean that the file is
1295 # note: writing the fncache only during finalize mean that the file is
1296 # outdated when running hooks. As fncache is used for streaming clone,
1296 # outdated when running hooks. As fncache is used for streaming clone,
1297 # this is not expected to break anything that happen during the hooks.
1297 # this is not expected to break anything that happen during the hooks.
1298 tr.addfinalize('flush-fncache', self.store.write)
1298 tr.addfinalize('flush-fncache', self.store.write)
1299 def txnclosehook(tr2):
1299 def txnclosehook(tr2):
1300 """To be run if transaction is successful, will schedule a hook run
1300 """To be run if transaction is successful, will schedule a hook run
1301 """
1301 """
1302 # Don't reference tr2 in hook() so we don't hold a reference.
1302 # Don't reference tr2 in hook() so we don't hold a reference.
1303 # This reduces memory consumption when there are multiple
1303 # This reduces memory consumption when there are multiple
1304 # transactions per lock. This can likely go away if issue5045
1304 # transactions per lock. This can likely go away if issue5045
1305 # fixes the function accumulation.
1305 # fixes the function accumulation.
1306 hookargs = tr2.hookargs
1306 hookargs = tr2.hookargs
1307
1307
1308 def hookfunc():
1308 def hookfunc():
1309 repo = reporef()
1309 repo = reporef()
1310 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1310 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1311 bmchanges = sorted(tr.changes['bookmarks'].items())
1311 bmchanges = sorted(tr.changes['bookmarks'].items())
1312 for name, (old, new) in bmchanges:
1312 for name, (old, new) in bmchanges:
1313 args = tr.hookargs.copy()
1313 args = tr.hookargs.copy()
1314 args.update(bookmarks.preparehookargs(name, old, new))
1314 args.update(bookmarks.preparehookargs(name, old, new))
1315 repo.hook('txnclose-bookmark', throw=False,
1315 repo.hook('txnclose-bookmark', throw=False,
1316 txnname=desc, **pycompat.strkwargs(args))
1316 txnname=desc, **pycompat.strkwargs(args))
1317
1317
1318 if hook.hashook(repo.ui, 'txnclose-phase'):
1318 if hook.hashook(repo.ui, 'txnclose-phase'):
1319 cl = repo.unfiltered().changelog
1319 cl = repo.unfiltered().changelog
1320 phasemv = sorted(tr.changes['phases'].items())
1320 phasemv = sorted(tr.changes['phases'].items())
1321 for rev, (old, new) in phasemv:
1321 for rev, (old, new) in phasemv:
1322 args = tr.hookargs.copy()
1322 args = tr.hookargs.copy()
1323 node = hex(cl.node(rev))
1323 node = hex(cl.node(rev))
1324 args.update(phases.preparehookargs(node, old, new))
1324 args.update(phases.preparehookargs(node, old, new))
1325 repo.hook('txnclose-phase', throw=False, txnname=desc,
1325 repo.hook('txnclose-phase', throw=False, txnname=desc,
1326 **pycompat.strkwargs(args))
1326 **pycompat.strkwargs(args))
1327
1327
1328 repo.hook('txnclose', throw=False, txnname=desc,
1328 repo.hook('txnclose', throw=False, txnname=desc,
1329 **pycompat.strkwargs(hookargs))
1329 **pycompat.strkwargs(hookargs))
1330 reporef()._afterlock(hookfunc)
1330 reporef()._afterlock(hookfunc)
1331 tr.addfinalize('txnclose-hook', txnclosehook)
1331 tr.addfinalize('txnclose-hook', txnclosehook)
1332 tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
1332 tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
1333 def txnaborthook(tr2):
1333 def txnaborthook(tr2):
1334 """To be run if transaction is aborted
1334 """To be run if transaction is aborted
1335 """
1335 """
1336 reporef().hook('txnabort', throw=False, txnname=desc,
1336 reporef().hook('txnabort', throw=False, txnname=desc,
1337 **tr2.hookargs)
1337 **tr2.hookargs)
1338 tr.addabort('txnabort-hook', txnaborthook)
1338 tr.addabort('txnabort-hook', txnaborthook)
1339 # avoid eager cache invalidation. in-memory data should be identical
1339 # avoid eager cache invalidation. in-memory data should be identical
1340 # to stored data if transaction has no error.
1340 # to stored data if transaction has no error.
1341 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1341 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1342 self._transref = weakref.ref(tr)
1342 self._transref = weakref.ref(tr)
1343 scmutil.registersummarycallback(self, tr, desc)
1343 scmutil.registersummarycallback(self, tr, desc)
1344 return tr
1344 return tr
1345
1345
1346 def _journalfiles(self):
1346 def _journalfiles(self):
1347 return ((self.svfs, 'journal'),
1347 return ((self.svfs, 'journal'),
1348 (self.vfs, 'journal.dirstate'),
1348 (self.vfs, 'journal.dirstate'),
1349 (self.vfs, 'journal.branch'),
1349 (self.vfs, 'journal.branch'),
1350 (self.vfs, 'journal.desc'),
1350 (self.vfs, 'journal.desc'),
1351 (self.vfs, 'journal.bookmarks'),
1351 (self.vfs, 'journal.bookmarks'),
1352 (self.svfs, 'journal.phaseroots'))
1352 (self.svfs, 'journal.phaseroots'))
1353
1353
1354 def undofiles(self):
1354 def undofiles(self):
1355 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1355 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1356
1356
1357 @unfilteredmethod
1357 @unfilteredmethod
1358 def _writejournal(self, desc):
1358 def _writejournal(self, desc):
1359 self.dirstate.savebackup(None, 'journal.dirstate')
1359 self.dirstate.savebackup(None, 'journal.dirstate')
1360 self.vfs.write("journal.branch",
1360 self.vfs.write("journal.branch",
1361 encoding.fromlocal(self.dirstate.branch()))
1361 encoding.fromlocal(self.dirstate.branch()))
1362 self.vfs.write("journal.desc",
1362 self.vfs.write("journal.desc",
1363 "%d\n%s\n" % (len(self), desc))
1363 "%d\n%s\n" % (len(self), desc))
1364 self.vfs.write("journal.bookmarks",
1364 self.vfs.write("journal.bookmarks",
1365 self.vfs.tryread("bookmarks"))
1365 self.vfs.tryread("bookmarks"))
1366 self.svfs.write("journal.phaseroots",
1366 self.svfs.write("journal.phaseroots",
1367 self.svfs.tryread("phaseroots"))
1367 self.svfs.tryread("phaseroots"))
1368
1368
1369 def recover(self):
1369 def recover(self):
1370 with self.lock():
1370 with self.lock():
1371 if self.svfs.exists("journal"):
1371 if self.svfs.exists("journal"):
1372 self.ui.status(_("rolling back interrupted transaction\n"))
1372 self.ui.status(_("rolling back interrupted transaction\n"))
1373 vfsmap = {'': self.svfs,
1373 vfsmap = {'': self.svfs,
1374 'plain': self.vfs,}
1374 'plain': self.vfs,}
1375 transaction.rollback(self.svfs, vfsmap, "journal",
1375 transaction.rollback(self.svfs, vfsmap, "journal",
1376 self.ui.warn,
1376 self.ui.warn,
1377 checkambigfiles=_cachedfiles)
1377 checkambigfiles=_cachedfiles)
1378 self.invalidate()
1378 self.invalidate()
1379 return True
1379 return True
1380 else:
1380 else:
1381 self.ui.warn(_("no interrupted transaction available\n"))
1381 self.ui.warn(_("no interrupted transaction available\n"))
1382 return False
1382 return False
1383
1383
1384 def rollback(self, dryrun=False, force=False):
1384 def rollback(self, dryrun=False, force=False):
1385 wlock = lock = dsguard = None
1385 wlock = lock = dsguard = None
1386 try:
1386 try:
1387 wlock = self.wlock()
1387 wlock = self.wlock()
1388 lock = self.lock()
1388 lock = self.lock()
1389 if self.svfs.exists("undo"):
1389 if self.svfs.exists("undo"):
1390 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1390 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1391
1391
1392 return self._rollback(dryrun, force, dsguard)
1392 return self._rollback(dryrun, force, dsguard)
1393 else:
1393 else:
1394 self.ui.warn(_("no rollback information available\n"))
1394 self.ui.warn(_("no rollback information available\n"))
1395 return 1
1395 return 1
1396 finally:
1396 finally:
1397 release(dsguard, lock, wlock)
1397 release(dsguard, lock, wlock)
1398
1398
1399 @unfilteredmethod # Until we get smarter cache management
1399 @unfilteredmethod # Until we get smarter cache management
1400 def _rollback(self, dryrun, force, dsguard):
1400 def _rollback(self, dryrun, force, dsguard):
1401 ui = self.ui
1401 ui = self.ui
1402 try:
1402 try:
1403 args = self.vfs.read('undo.desc').splitlines()
1403 args = self.vfs.read('undo.desc').splitlines()
1404 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1404 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1405 if len(args) >= 3:
1405 if len(args) >= 3:
1406 detail = args[2]
1406 detail = args[2]
1407 oldtip = oldlen - 1
1407 oldtip = oldlen - 1
1408
1408
1409 if detail and ui.verbose:
1409 if detail and ui.verbose:
1410 msg = (_('repository tip rolled back to revision %d'
1410 msg = (_('repository tip rolled back to revision %d'
1411 ' (undo %s: %s)\n')
1411 ' (undo %s: %s)\n')
1412 % (oldtip, desc, detail))
1412 % (oldtip, desc, detail))
1413 else:
1413 else:
1414 msg = (_('repository tip rolled back to revision %d'
1414 msg = (_('repository tip rolled back to revision %d'
1415 ' (undo %s)\n')
1415 ' (undo %s)\n')
1416 % (oldtip, desc))
1416 % (oldtip, desc))
1417 except IOError:
1417 except IOError:
1418 msg = _('rolling back unknown transaction\n')
1418 msg = _('rolling back unknown transaction\n')
1419 desc = None
1419 desc = None
1420
1420
1421 if not force and self['.'] != self['tip'] and desc == 'commit':
1421 if not force and self['.'] != self['tip'] and desc == 'commit':
1422 raise error.Abort(
1422 raise error.Abort(
1423 _('rollback of last commit while not checked out '
1423 _('rollback of last commit while not checked out '
1424 'may lose data'), hint=_('use -f to force'))
1424 'may lose data'), hint=_('use -f to force'))
1425
1425
1426 ui.status(msg)
1426 ui.status(msg)
1427 if dryrun:
1427 if dryrun:
1428 return 0
1428 return 0
1429
1429
1430 parents = self.dirstate.parents()
1430 parents = self.dirstate.parents()
1431 self.destroying()
1431 self.destroying()
1432 vfsmap = {'plain': self.vfs, '': self.svfs}
1432 vfsmap = {'plain': self.vfs, '': self.svfs}
1433 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1433 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1434 checkambigfiles=_cachedfiles)
1434 checkambigfiles=_cachedfiles)
1435 if self.vfs.exists('undo.bookmarks'):
1435 if self.vfs.exists('undo.bookmarks'):
1436 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1436 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1437 if self.svfs.exists('undo.phaseroots'):
1437 if self.svfs.exists('undo.phaseroots'):
1438 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1438 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1439 self.invalidate()
1439 self.invalidate()
1440
1440
1441 parentgone = (parents[0] not in self.changelog.nodemap or
1441 parentgone = (parents[0] not in self.changelog.nodemap or
1442 parents[1] not in self.changelog.nodemap)
1442 parents[1] not in self.changelog.nodemap)
1443 if parentgone:
1443 if parentgone:
1444 # prevent dirstateguard from overwriting already restored one
1444 # prevent dirstateguard from overwriting already restored one
1445 dsguard.close()
1445 dsguard.close()
1446
1446
1447 self.dirstate.restorebackup(None, 'undo.dirstate')
1447 self.dirstate.restorebackup(None, 'undo.dirstate')
1448 try:
1448 try:
1449 branch = self.vfs.read('undo.branch')
1449 branch = self.vfs.read('undo.branch')
1450 self.dirstate.setbranch(encoding.tolocal(branch))
1450 self.dirstate.setbranch(encoding.tolocal(branch))
1451 except IOError:
1451 except IOError:
1452 ui.warn(_('named branch could not be reset: '
1452 ui.warn(_('named branch could not be reset: '
1453 'current branch is still \'%s\'\n')
1453 'current branch is still \'%s\'\n')
1454 % self.dirstate.branch())
1454 % self.dirstate.branch())
1455
1455
1456 parents = tuple([p.rev() for p in self[None].parents()])
1456 parents = tuple([p.rev() for p in self[None].parents()])
1457 if len(parents) > 1:
1457 if len(parents) > 1:
1458 ui.status(_('working directory now based on '
1458 ui.status(_('working directory now based on '
1459 'revisions %d and %d\n') % parents)
1459 'revisions %d and %d\n') % parents)
1460 else:
1460 else:
1461 ui.status(_('working directory now based on '
1461 ui.status(_('working directory now based on '
1462 'revision %d\n') % parents)
1462 'revision %d\n') % parents)
1463 mergemod.mergestate.clean(self, self['.'].node())
1463 mergemod.mergestate.clean(self, self['.'].node())
1464
1464
1465 # TODO: if we know which new heads may result from this rollback, pass
1465 # TODO: if we know which new heads may result from this rollback, pass
1466 # them to destroy(), which will prevent the branchhead cache from being
1466 # them to destroy(), which will prevent the branchhead cache from being
1467 # invalidated.
1467 # invalidated.
1468 self.destroyed()
1468 self.destroyed()
1469 return 0
1469 return 0
1470
1470
1471 def _buildcacheupdater(self, newtransaction):
1471 def _buildcacheupdater(self, newtransaction):
1472 """called during transaction to build the callback updating cache
1472 """called during transaction to build the callback updating cache
1473
1473
1474 Lives on the repository to help extension who might want to augment
1474 Lives on the repository to help extension who might want to augment
1475 this logic. For this purpose, the created transaction is passed to the
1475 this logic. For this purpose, the created transaction is passed to the
1476 method.
1476 method.
1477 """
1477 """
1478 # we must avoid cyclic reference between repo and transaction.
1478 # we must avoid cyclic reference between repo and transaction.
1479 reporef = weakref.ref(self)
1479 reporef = weakref.ref(self)
1480 def updater(tr):
1480 def updater(tr):
1481 repo = reporef()
1481 repo = reporef()
1482 repo.updatecaches(tr)
1482 repo.updatecaches(tr)
1483 return updater
1483 return updater
1484
1484
1485 @unfilteredmethod
1485 @unfilteredmethod
1486 def updatecaches(self, tr=None):
1486 def updatecaches(self, tr=None):
1487 """warm appropriate caches
1487 """warm appropriate caches
1488
1488
1489 If this function is called after a transaction closed. The transaction
1489 If this function is called after a transaction closed. The transaction
1490 will be available in the 'tr' argument. This can be used to selectively
1490 will be available in the 'tr' argument. This can be used to selectively
1491 update caches relevant to the changes in that transaction.
1491 update caches relevant to the changes in that transaction.
1492 """
1492 """
1493 if tr is not None and tr.hookargs.get('source') == 'strip':
1493 if tr is not None and tr.hookargs.get('source') == 'strip':
1494 # During strip, many caches are invalid but
1494 # During strip, many caches are invalid but
1495 # later call to `destroyed` will refresh them.
1495 # later call to `destroyed` will refresh them.
1496 return
1496 return
1497
1497
1498 if tr is None or tr.changes['revs']:
1498 if tr is None or tr.changes['revs']:
1499 # updating the unfiltered branchmap should refresh all the others,
1499 # updating the unfiltered branchmap should refresh all the others,
1500 self.ui.debug('updating the branch cache\n')
1500 self.ui.debug('updating the branch cache\n')
1501 branchmap.updatecache(self.filtered('served'))
1501 branchmap.updatecache(self.filtered('served'))
1502
1502
1503 def invalidatecaches(self):
1503 def invalidatecaches(self):
1504
1504
1505 if '_tagscache' in vars(self):
1505 if '_tagscache' in vars(self):
1506 # can't use delattr on proxy
1506 # can't use delattr on proxy
1507 del self.__dict__['_tagscache']
1507 del self.__dict__['_tagscache']
1508
1508
1509 self.unfiltered()._branchcaches.clear()
1509 self.unfiltered()._branchcaches.clear()
1510 self.invalidatevolatilesets()
1510 self.invalidatevolatilesets()
1511 self._sparsesignaturecache.clear()
1511 self._sparsesignaturecache.clear()
1512
1512
1513 def invalidatevolatilesets(self):
1513 def invalidatevolatilesets(self):
1514 self.filteredrevcache.clear()
1514 self.filteredrevcache.clear()
1515 obsolete.clearobscaches(self)
1515 obsolete.clearobscaches(self)
1516
1516
1517 def invalidatedirstate(self):
1517 def invalidatedirstate(self):
1518 '''Invalidates the dirstate, causing the next call to dirstate
1518 '''Invalidates the dirstate, causing the next call to dirstate
1519 to check if it was modified since the last time it was read,
1519 to check if it was modified since the last time it was read,
1520 rereading it if it has.
1520 rereading it if it has.
1521
1521
1522 This is different to dirstate.invalidate() that it doesn't always
1522 This is different to dirstate.invalidate() that it doesn't always
1523 rereads the dirstate. Use dirstate.invalidate() if you want to
1523 rereads the dirstate. Use dirstate.invalidate() if you want to
1524 explicitly read the dirstate again (i.e. restoring it to a previous
1524 explicitly read the dirstate again (i.e. restoring it to a previous
1525 known good state).'''
1525 known good state).'''
1526 if hasunfilteredcache(self, 'dirstate'):
1526 if hasunfilteredcache(self, 'dirstate'):
1527 for k in self.dirstate._filecache:
1527 for k in self.dirstate._filecache:
1528 try:
1528 try:
1529 delattr(self.dirstate, k)
1529 delattr(self.dirstate, k)
1530 except AttributeError:
1530 except AttributeError:
1531 pass
1531 pass
1532 delattr(self.unfiltered(), 'dirstate')
1532 delattr(self.unfiltered(), 'dirstate')
1533
1533
1534 def invalidate(self, clearfilecache=False):
1534 def invalidate(self, clearfilecache=False):
1535 '''Invalidates both store and non-store parts other than dirstate
1535 '''Invalidates both store and non-store parts other than dirstate
1536
1536
1537 If a transaction is running, invalidation of store is omitted,
1537 If a transaction is running, invalidation of store is omitted,
1538 because discarding in-memory changes might cause inconsistency
1538 because discarding in-memory changes might cause inconsistency
1539 (e.g. incomplete fncache causes unintentional failure, but
1539 (e.g. incomplete fncache causes unintentional failure, but
1540 redundant one doesn't).
1540 redundant one doesn't).
1541 '''
1541 '''
1542 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1542 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1543 for k in list(self._filecache.keys()):
1543 for k in list(self._filecache.keys()):
1544 # dirstate is invalidated separately in invalidatedirstate()
1544 # dirstate is invalidated separately in invalidatedirstate()
1545 if k == 'dirstate':
1545 if k == 'dirstate':
1546 continue
1546 continue
1547 if (k == 'changelog' and
1547 if (k == 'changelog' and
1548 self.currenttransaction() and
1548 self.currenttransaction() and
1549 self.changelog._delayed):
1549 self.changelog._delayed):
1550 # The changelog object may store unwritten revisions. We don't
1550 # The changelog object may store unwritten revisions. We don't
1551 # want to lose them.
1551 # want to lose them.
1552 # TODO: Solve the problem instead of working around it.
1552 # TODO: Solve the problem instead of working around it.
1553 continue
1553 continue
1554
1554
1555 if clearfilecache:
1555 if clearfilecache:
1556 del self._filecache[k]
1556 del self._filecache[k]
1557 try:
1557 try:
1558 delattr(unfiltered, k)
1558 delattr(unfiltered, k)
1559 except AttributeError:
1559 except AttributeError:
1560 pass
1560 pass
1561 self.invalidatecaches()
1561 self.invalidatecaches()
1562 if not self.currenttransaction():
1562 if not self.currenttransaction():
1563 # TODO: Changing contents of store outside transaction
1563 # TODO: Changing contents of store outside transaction
1564 # causes inconsistency. We should make in-memory store
1564 # causes inconsistency. We should make in-memory store
1565 # changes detectable, and abort if changed.
1565 # changes detectable, and abort if changed.
1566 self.store.invalidatecaches()
1566 self.store.invalidatecaches()
1567
1567
1568 def invalidateall(self):
1568 def invalidateall(self):
1569 '''Fully invalidates both store and non-store parts, causing the
1569 '''Fully invalidates both store and non-store parts, causing the
1570 subsequent operation to reread any outside changes.'''
1570 subsequent operation to reread any outside changes.'''
1571 # extension should hook this to invalidate its caches
1571 # extension should hook this to invalidate its caches
1572 self.invalidate()
1572 self.invalidate()
1573 self.invalidatedirstate()
1573 self.invalidatedirstate()
1574
1574
1575 @unfilteredmethod
1575 @unfilteredmethod
1576 def _refreshfilecachestats(self, tr):
1576 def _refreshfilecachestats(self, tr):
1577 """Reload stats of cached files so that they are flagged as valid"""
1577 """Reload stats of cached files so that they are flagged as valid"""
1578 for k, ce in self._filecache.items():
1578 for k, ce in self._filecache.items():
1579 if k == 'dirstate' or k not in self.__dict__:
1579 if k == 'dirstate' or k not in self.__dict__:
1580 continue
1580 continue
1581 ce.refresh()
1581 ce.refresh()
1582
1582
1583 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1583 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1584 inheritchecker=None, parentenvvar=None):
1584 inheritchecker=None, parentenvvar=None):
1585 parentlock = None
1585 parentlock = None
1586 # the contents of parentenvvar are used by the underlying lock to
1586 # the contents of parentenvvar are used by the underlying lock to
1587 # determine whether it can be inherited
1587 # determine whether it can be inherited
1588 if parentenvvar is not None:
1588 if parentenvvar is not None:
1589 parentlock = encoding.environ.get(parentenvvar)
1589 parentlock = encoding.environ.get(parentenvvar)
1590 try:
1590 try:
1591 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1591 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1592 acquirefn=acquirefn, desc=desc,
1592 acquirefn=acquirefn, desc=desc,
1593 inheritchecker=inheritchecker,
1593 inheritchecker=inheritchecker,
1594 parentlock=parentlock)
1594 parentlock=parentlock)
1595 except error.LockHeld as inst:
1595 except error.LockHeld as inst:
1596 if not wait:
1596 if not wait:
1597 raise
1597 raise
1598 # show more details for new-style locks
1598 # show more details for new-style locks
1599 if ':' in inst.locker:
1599 if ':' in inst.locker:
1600 host, pid = inst.locker.split(":", 1)
1600 host, pid = inst.locker.split(":", 1)
1601 self.ui.warn(
1601 self.ui.warn(
1602 _("waiting for lock on %s held by process %r "
1602 _("waiting for lock on %s held by process %r "
1603 "on host %r\n") % (desc, pid, host))
1603 "on host %r\n") % (desc, pid, host))
1604 else:
1604 else:
1605 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1605 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1606 (desc, inst.locker))
1606 (desc, inst.locker))
1607 # default to 600 seconds timeout
1607 # default to 600 seconds timeout
1608 l = lockmod.lock(vfs, lockname,
1608 l = lockmod.lock(vfs, lockname,
1609 int(self.ui.config("ui", "timeout")),
1609 int(self.ui.config("ui", "timeout")),
1610 releasefn=releasefn, acquirefn=acquirefn,
1610 releasefn=releasefn, acquirefn=acquirefn,
1611 desc=desc)
1611 desc=desc)
1612 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1612 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1613 return l
1613 return l
1614
1614
1615 def _afterlock(self, callback):
1615 def _afterlock(self, callback):
1616 """add a callback to be run when the repository is fully unlocked
1616 """add a callback to be run when the repository is fully unlocked
1617
1617
1618 The callback will be executed when the outermost lock is released
1618 The callback will be executed when the outermost lock is released
1619 (with wlock being higher level than 'lock')."""
1619 (with wlock being higher level than 'lock')."""
1620 for ref in (self._wlockref, self._lockref):
1620 for ref in (self._wlockref, self._lockref):
1621 l = ref and ref()
1621 l = ref and ref()
1622 if l and l.held:
1622 if l and l.held:
1623 l.postrelease.append(callback)
1623 l.postrelease.append(callback)
1624 break
1624 break
1625 else: # no lock have been found.
1625 else: # no lock have been found.
1626 callback()
1626 callback()
1627
1627
1628 def lock(self, wait=True):
1628 def lock(self, wait=True):
1629 '''Lock the repository store (.hg/store) and return a weak reference
1629 '''Lock the repository store (.hg/store) and return a weak reference
1630 to the lock. Use this before modifying the store (e.g. committing or
1630 to the lock. Use this before modifying the store (e.g. committing or
1631 stripping). If you are opening a transaction, get a lock as well.)
1631 stripping). If you are opening a transaction, get a lock as well.)
1632
1632
1633 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1633 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1634 'wlock' first to avoid a dead-lock hazard.'''
1634 'wlock' first to avoid a dead-lock hazard.'''
1635 l = self._currentlock(self._lockref)
1635 l = self._currentlock(self._lockref)
1636 if l is not None:
1636 if l is not None:
1637 l.lock()
1637 l.lock()
1638 return l
1638 return l
1639
1639
1640 l = self._lock(self.svfs, "lock", wait, None,
1640 l = self._lock(self.svfs, "lock", wait, None,
1641 self.invalidate, _('repository %s') % self.origroot)
1641 self.invalidate, _('repository %s') % self.origroot)
1642 self._lockref = weakref.ref(l)
1642 self._lockref = weakref.ref(l)
1643 return l
1643 return l
1644
1644
1645 def _wlockchecktransaction(self):
1645 def _wlockchecktransaction(self):
1646 if self.currenttransaction() is not None:
1646 if self.currenttransaction() is not None:
1647 raise error.LockInheritanceContractViolation(
1647 raise error.LockInheritanceContractViolation(
1648 'wlock cannot be inherited in the middle of a transaction')
1648 'wlock cannot be inherited in the middle of a transaction')
1649
1649
1650 def wlock(self, wait=True):
1650 def wlock(self, wait=True):
1651 '''Lock the non-store parts of the repository (everything under
1651 '''Lock the non-store parts of the repository (everything under
1652 .hg except .hg/store) and return a weak reference to the lock.
1652 .hg except .hg/store) and return a weak reference to the lock.
1653
1653
1654 Use this before modifying files in .hg.
1654 Use this before modifying files in .hg.
1655
1655
1656 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1656 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1657 'wlock' first to avoid a dead-lock hazard.'''
1657 'wlock' first to avoid a dead-lock hazard.'''
1658 l = self._wlockref and self._wlockref()
1658 l = self._wlockref and self._wlockref()
1659 if l is not None and l.held:
1659 if l is not None and l.held:
1660 l.lock()
1660 l.lock()
1661 return l
1661 return l
1662
1662
1663 # We do not need to check for non-waiting lock acquisition. Such
1663 # We do not need to check for non-waiting lock acquisition. Such
1664 # acquisition would not cause dead-lock as they would just fail.
1664 # acquisition would not cause dead-lock as they would just fail.
1665 if wait and (self.ui.configbool('devel', 'all-warnings')
1665 if wait and (self.ui.configbool('devel', 'all-warnings')
1666 or self.ui.configbool('devel', 'check-locks')):
1666 or self.ui.configbool('devel', 'check-locks')):
1667 if self._currentlock(self._lockref) is not None:
1667 if self._currentlock(self._lockref) is not None:
1668 self.ui.develwarn('"wlock" acquired after "lock"')
1668 self.ui.develwarn('"wlock" acquired after "lock"')
1669
1669
1670 def unlock():
1670 def unlock():
1671 if self.dirstate.pendingparentchange():
1671 if self.dirstate.pendingparentchange():
1672 self.dirstate.invalidate()
1672 self.dirstate.invalidate()
1673 else:
1673 else:
1674 self.dirstate.write(None)
1674 self.dirstate.write(None)
1675
1675
1676 self._filecache['dirstate'].refresh()
1676 self._filecache['dirstate'].refresh()
1677
1677
1678 l = self._lock(self.vfs, "wlock", wait, unlock,
1678 l = self._lock(self.vfs, "wlock", wait, unlock,
1679 self.invalidatedirstate, _('working directory of %s') %
1679 self.invalidatedirstate, _('working directory of %s') %
1680 self.origroot,
1680 self.origroot,
1681 inheritchecker=self._wlockchecktransaction,
1681 inheritchecker=self._wlockchecktransaction,
1682 parentenvvar='HG_WLOCK_LOCKER')
1682 parentenvvar='HG_WLOCK_LOCKER')
1683 self._wlockref = weakref.ref(l)
1683 self._wlockref = weakref.ref(l)
1684 return l
1684 return l
1685
1685
1686 def _currentlock(self, lockref):
1686 def _currentlock(self, lockref):
1687 """Returns the lock if it's held, or None if it's not."""
1687 """Returns the lock if it's held, or None if it's not."""
1688 if lockref is None:
1688 if lockref is None:
1689 return None
1689 return None
1690 l = lockref()
1690 l = lockref()
1691 if l is None or not l.held:
1691 if l is None or not l.held:
1692 return None
1692 return None
1693 return l
1693 return l
1694
1694
1695 def currentwlock(self):
1695 def currentwlock(self):
1696 """Returns the wlock if it's held, or None if it's not."""
1696 """Returns the wlock if it's held, or None if it's not."""
1697 return self._currentlock(self._wlockref)
1697 return self._currentlock(self._wlockref)
1698
1698
1699 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1699 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1700 """
1700 """
1701 commit an individual file as part of a larger transaction
1701 commit an individual file as part of a larger transaction
1702 """
1702 """
1703
1703
1704 fname = fctx.path()
1704 fname = fctx.path()
1705 fparent1 = manifest1.get(fname, nullid)
1705 fparent1 = manifest1.get(fname, nullid)
1706 fparent2 = manifest2.get(fname, nullid)
1706 fparent2 = manifest2.get(fname, nullid)
1707 if isinstance(fctx, context.filectx):
1707 if isinstance(fctx, context.filectx):
1708 node = fctx.filenode()
1708 node = fctx.filenode()
1709 if node in [fparent1, fparent2]:
1709 if node in [fparent1, fparent2]:
1710 self.ui.debug('reusing %s filelog entry\n' % fname)
1710 self.ui.debug('reusing %s filelog entry\n' % fname)
1711 if manifest1.flags(fname) != fctx.flags():
1711 if manifest1.flags(fname) != fctx.flags():
1712 changelist.append(fname)
1712 changelist.append(fname)
1713 return node
1713 return node
1714
1714
1715 flog = self.file(fname)
1715 flog = self.file(fname)
1716 meta = {}
1716 meta = {}
1717 copy = fctx.renamed()
1717 copy = fctx.renamed()
1718 if copy and copy[0] != fname:
1718 if copy and copy[0] != fname:
1719 # Mark the new revision of this file as a copy of another
1719 # Mark the new revision of this file as a copy of another
1720 # file. This copy data will effectively act as a parent
1720 # file. This copy data will effectively act as a parent
1721 # of this new revision. If this is a merge, the first
1721 # of this new revision. If this is a merge, the first
1722 # parent will be the nullid (meaning "look up the copy data")
1722 # parent will be the nullid (meaning "look up the copy data")
1723 # and the second one will be the other parent. For example:
1723 # and the second one will be the other parent. For example:
1724 #
1724 #
1725 # 0 --- 1 --- 3 rev1 changes file foo
1725 # 0 --- 1 --- 3 rev1 changes file foo
1726 # \ / rev2 renames foo to bar and changes it
1726 # \ / rev2 renames foo to bar and changes it
1727 # \- 2 -/ rev3 should have bar with all changes and
1727 # \- 2 -/ rev3 should have bar with all changes and
1728 # should record that bar descends from
1728 # should record that bar descends from
1729 # bar in rev2 and foo in rev1
1729 # bar in rev2 and foo in rev1
1730 #
1730 #
1731 # this allows this merge to succeed:
1731 # this allows this merge to succeed:
1732 #
1732 #
1733 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1733 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1734 # \ / merging rev3 and rev4 should use bar@rev2
1734 # \ / merging rev3 and rev4 should use bar@rev2
1735 # \- 2 --- 4 as the merge base
1735 # \- 2 --- 4 as the merge base
1736 #
1736 #
1737
1737
1738 cfname = copy[0]
1738 cfname = copy[0]
1739 crev = manifest1.get(cfname)
1739 crev = manifest1.get(cfname)
1740 newfparent = fparent2
1740 newfparent = fparent2
1741
1741
1742 if manifest2: # branch merge
1742 if manifest2: # branch merge
1743 if fparent2 == nullid or crev is None: # copied on remote side
1743 if fparent2 == nullid or crev is None: # copied on remote side
1744 if cfname in manifest2:
1744 if cfname in manifest2:
1745 crev = manifest2[cfname]
1745 crev = manifest2[cfname]
1746 newfparent = fparent1
1746 newfparent = fparent1
1747
1747
1748 # Here, we used to search backwards through history to try to find
1748 # Here, we used to search backwards through history to try to find
1749 # where the file copy came from if the source of a copy was not in
1749 # where the file copy came from if the source of a copy was not in
1750 # the parent directory. However, this doesn't actually make sense to
1750 # the parent directory. However, this doesn't actually make sense to
1751 # do (what does a copy from something not in your working copy even
1751 # do (what does a copy from something not in your working copy even
1752 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1752 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1753 # the user that copy information was dropped, so if they didn't
1753 # the user that copy information was dropped, so if they didn't
1754 # expect this outcome it can be fixed, but this is the correct
1754 # expect this outcome it can be fixed, but this is the correct
1755 # behavior in this circumstance.
1755 # behavior in this circumstance.
1756
1756
1757 if crev:
1757 if crev:
1758 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1758 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1759 meta["copy"] = cfname
1759 meta["copy"] = cfname
1760 meta["copyrev"] = hex(crev)
1760 meta["copyrev"] = hex(crev)
1761 fparent1, fparent2 = nullid, newfparent
1761 fparent1, fparent2 = nullid, newfparent
1762 else:
1762 else:
1763 self.ui.warn(_("warning: can't find ancestor for '%s' "
1763 self.ui.warn(_("warning: can't find ancestor for '%s' "
1764 "copied from '%s'!\n") % (fname, cfname))
1764 "copied from '%s'!\n") % (fname, cfname))
1765
1765
1766 elif fparent1 == nullid:
1766 elif fparent1 == nullid:
1767 fparent1, fparent2 = fparent2, nullid
1767 fparent1, fparent2 = fparent2, nullid
1768 elif fparent2 != nullid:
1768 elif fparent2 != nullid:
1769 # is one parent an ancestor of the other?
1769 # is one parent an ancestor of the other?
1770 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1770 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1771 if fparent1 in fparentancestors:
1771 if fparent1 in fparentancestors:
1772 fparent1, fparent2 = fparent2, nullid
1772 fparent1, fparent2 = fparent2, nullid
1773 elif fparent2 in fparentancestors:
1773 elif fparent2 in fparentancestors:
1774 fparent2 = nullid
1774 fparent2 = nullid
1775
1775
1776 # is the file changed?
1776 # is the file changed?
1777 text = fctx.data()
1777 text = fctx.data()
1778 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1778 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1779 changelist.append(fname)
1779 changelist.append(fname)
1780 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1780 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1781 # are just the flags changed during merge?
1781 # are just the flags changed during merge?
1782 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1782 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1783 changelist.append(fname)
1783 changelist.append(fname)
1784
1784
1785 return fparent1
1785 return fparent1
1786
1786
1787 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1787 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1788 """check for commit arguments that aren't committable"""
1788 """check for commit arguments that aren't committable"""
1789 if match.isexact() or match.prefix():
1789 if match.isexact() or match.prefix():
1790 matched = set(status.modified + status.added + status.removed)
1790 matched = set(status.modified + status.added + status.removed)
1791
1791
1792 for f in match.files():
1792 for f in match.files():
1793 f = self.dirstate.normalize(f)
1793 f = self.dirstate.normalize(f)
1794 if f == '.' or f in matched or f in wctx.substate:
1794 if f == '.' or f in matched or f in wctx.substate:
1795 continue
1795 continue
1796 if f in status.deleted:
1796 if f in status.deleted:
1797 fail(f, _('file not found!'))
1797 fail(f, _('file not found!'))
1798 if f in vdirs: # visited directory
1798 if f in vdirs: # visited directory
1799 d = f + '/'
1799 d = f + '/'
1800 for mf in matched:
1800 for mf in matched:
1801 if mf.startswith(d):
1801 if mf.startswith(d):
1802 break
1802 break
1803 else:
1803 else:
1804 fail(f, _("no match under directory!"))
1804 fail(f, _("no match under directory!"))
1805 elif f not in self.dirstate:
1805 elif f not in self.dirstate:
1806 fail(f, _("file not tracked!"))
1806 fail(f, _("file not tracked!"))
1807
1807
1808 @unfilteredmethod
1808 @unfilteredmethod
1809 def commit(self, text="", user=None, date=None, match=None, force=False,
1809 def commit(self, text="", user=None, date=None, match=None, force=False,
1810 editor=False, extra=None):
1810 editor=False, extra=None):
1811 """Add a new revision to current repository.
1811 """Add a new revision to current repository.
1812
1812
1813 Revision information is gathered from the working directory,
1813 Revision information is gathered from the working directory,
1814 match can be used to filter the committed files. If editor is
1814 match can be used to filter the committed files. If editor is
1815 supplied, it is called to get a commit message.
1815 supplied, it is called to get a commit message.
1816 """
1816 """
1817 if extra is None:
1817 if extra is None:
1818 extra = {}
1818 extra = {}
1819
1819
1820 def fail(f, msg):
1820 def fail(f, msg):
1821 raise error.Abort('%s: %s' % (f, msg))
1821 raise error.Abort('%s: %s' % (f, msg))
1822
1822
1823 if not match:
1823 if not match:
1824 match = matchmod.always(self.root, '')
1824 match = matchmod.always(self.root, '')
1825
1825
1826 if not force:
1826 if not force:
1827 vdirs = []
1827 vdirs = []
1828 match.explicitdir = vdirs.append
1828 match.explicitdir = vdirs.append
1829 match.bad = fail
1829 match.bad = fail
1830
1830
1831 wlock = lock = tr = None
1831 wlock = lock = tr = None
1832 try:
1832 try:
1833 wlock = self.wlock()
1833 wlock = self.wlock()
1834 lock = self.lock() # for recent changelog (see issue4368)
1834 lock = self.lock() # for recent changelog (see issue4368)
1835
1835
1836 wctx = self[None]
1836 wctx = self[None]
1837 merge = len(wctx.parents()) > 1
1837 merge = len(wctx.parents()) > 1
1838
1838
1839 if not force and merge and not match.always():
1839 if not force and merge and not match.always():
1840 raise error.Abort(_('cannot partially commit a merge '
1840 raise error.Abort(_('cannot partially commit a merge '
1841 '(do not specify files or patterns)'))
1841 '(do not specify files or patterns)'))
1842
1842
1843 status = self.status(match=match, clean=force)
1843 status = self.status(match=match, clean=force)
1844 if force:
1844 if force:
1845 status.modified.extend(status.clean) # mq may commit clean files
1845 status.modified.extend(status.clean) # mq may commit clean files
1846
1846
1847 # check subrepos
1847 # check subrepos
1848 subs = []
1848 subs, commitsubs, newstate = subrepo.precommit(
1849 commitsubs = set()
1849 self.ui, wctx, status, match, force=force)
1850 newstate = wctx.substate.copy()
1851 # only manage subrepos and .hgsubstate if .hgsub is present
1852 if '.hgsub' in wctx:
1853 # we'll decide whether to track this ourselves, thanks
1854 for c in status.modified, status.added, status.removed:
1855 if '.hgsubstate' in c:
1856 c.remove('.hgsubstate')
1857
1858 # compare current state to last committed state
1859 # build new substate based on last committed state
1860 oldstate = wctx.p1().substate
1861 for s in sorted(newstate.keys()):
1862 if not match(s):
1863 # ignore working copy, use old state if present
1864 if s in oldstate:
1865 newstate[s] = oldstate[s]
1866 continue
1867 if not force:
1868 raise error.Abort(
1869 _("commit with new subrepo %s excluded") % s)
1870 dirtyreason = wctx.sub(s).dirtyreason(True)
1871 if dirtyreason:
1872 if not self.ui.configbool('ui', 'commitsubrepos'):
1873 raise error.Abort(dirtyreason,
1874 hint=_("use --subrepos for recursive commit"))
1875 subs.append(s)
1876 commitsubs.add(s)
1877 else:
1878 bs = wctx.sub(s).basestate()
1879 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1880 if oldstate.get(s, (None, None, None))[1] != bs:
1881 subs.append(s)
1882
1883 # check for removed subrepos
1884 for p in wctx.parents():
1885 r = [s for s in p.substate if s not in newstate]
1886 subs += [s for s in r if match(s)]
1887 if subs:
1888 if (not match('.hgsub') and
1889 '.hgsub' in (wctx.modified() + wctx.added())):
1890 raise error.Abort(
1891 _("can't commit subrepos without .hgsub"))
1892 status.modified.insert(0, '.hgsubstate')
1893
1894 elif '.hgsub' in status.removed:
1895 # clean up .hgsubstate when .hgsub is removed
1896 if ('.hgsubstate' in wctx and
1897 '.hgsubstate' not in (status.modified + status.added +
1898 status.removed)):
1899 status.removed.insert(0, '.hgsubstate')
1900
1850
1901 # make sure all explicit patterns are matched
1851 # make sure all explicit patterns are matched
1902 if not force:
1852 if not force:
1903 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1853 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1904
1854
1905 cctx = context.workingcommitctx(self, status,
1855 cctx = context.workingcommitctx(self, status,
1906 text, user, date, extra)
1856 text, user, date, extra)
1907
1857
1908 # internal config: ui.allowemptycommit
1858 # internal config: ui.allowemptycommit
1909 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1859 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1910 or extra.get('close') or merge or cctx.files()
1860 or extra.get('close') or merge or cctx.files()
1911 or self.ui.configbool('ui', 'allowemptycommit'))
1861 or self.ui.configbool('ui', 'allowemptycommit'))
1912 if not allowemptycommit:
1862 if not allowemptycommit:
1913 return None
1863 return None
1914
1864
1915 if merge and cctx.deleted():
1865 if merge and cctx.deleted():
1916 raise error.Abort(_("cannot commit merge with missing files"))
1866 raise error.Abort(_("cannot commit merge with missing files"))
1917
1867
1918 ms = mergemod.mergestate.read(self)
1868 ms = mergemod.mergestate.read(self)
1919 mergeutil.checkunresolved(ms)
1869 mergeutil.checkunresolved(ms)
1920
1870
1921 if editor:
1871 if editor:
1922 cctx._text = editor(self, cctx, subs)
1872 cctx._text = editor(self, cctx, subs)
1923 edited = (text != cctx._text)
1873 edited = (text != cctx._text)
1924
1874
1925 # Save commit message in case this transaction gets rolled back
1875 # Save commit message in case this transaction gets rolled back
1926 # (e.g. by a pretxncommit hook). Leave the content alone on
1876 # (e.g. by a pretxncommit hook). Leave the content alone on
1927 # the assumption that the user will use the same editor again.
1877 # the assumption that the user will use the same editor again.
1928 msgfn = self.savecommitmessage(cctx._text)
1878 msgfn = self.savecommitmessage(cctx._text)
1929
1879
1930 # commit subs and write new state
1880 # commit subs and write new state
1931 if subs:
1881 if subs:
1932 for s in sorted(commitsubs):
1882 for s in sorted(commitsubs):
1933 sub = wctx.sub(s)
1883 sub = wctx.sub(s)
1934 self.ui.status(_('committing subrepository %s\n') %
1884 self.ui.status(_('committing subrepository %s\n') %
1935 subrepo.subrelpath(sub))
1885 subrepo.subrelpath(sub))
1936 sr = sub.commit(cctx._text, user, date)
1886 sr = sub.commit(cctx._text, user, date)
1937 newstate[s] = (newstate[s][0], sr)
1887 newstate[s] = (newstate[s][0], sr)
1938 subrepo.writestate(self, newstate)
1888 subrepo.writestate(self, newstate)
1939
1889
1940 p1, p2 = self.dirstate.parents()
1890 p1, p2 = self.dirstate.parents()
1941 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1891 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1942 try:
1892 try:
1943 self.hook("precommit", throw=True, parent1=hookp1,
1893 self.hook("precommit", throw=True, parent1=hookp1,
1944 parent2=hookp2)
1894 parent2=hookp2)
1945 tr = self.transaction('commit')
1895 tr = self.transaction('commit')
1946 ret = self.commitctx(cctx, True)
1896 ret = self.commitctx(cctx, True)
1947 except: # re-raises
1897 except: # re-raises
1948 if edited:
1898 if edited:
1949 self.ui.write(
1899 self.ui.write(
1950 _('note: commit message saved in %s\n') % msgfn)
1900 _('note: commit message saved in %s\n') % msgfn)
1951 raise
1901 raise
1952 # update bookmarks, dirstate and mergestate
1902 # update bookmarks, dirstate and mergestate
1953 bookmarks.update(self, [p1, p2], ret)
1903 bookmarks.update(self, [p1, p2], ret)
1954 cctx.markcommitted(ret)
1904 cctx.markcommitted(ret)
1955 ms.reset()
1905 ms.reset()
1956 tr.close()
1906 tr.close()
1957
1907
1958 finally:
1908 finally:
1959 lockmod.release(tr, lock, wlock)
1909 lockmod.release(tr, lock, wlock)
1960
1910
1961 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1911 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1962 # hack for command that use a temporary commit (eg: histedit)
1912 # hack for command that use a temporary commit (eg: histedit)
1963 # temporary commit got stripped before hook release
1913 # temporary commit got stripped before hook release
1964 if self.changelog.hasnode(ret):
1914 if self.changelog.hasnode(ret):
1965 self.hook("commit", node=node, parent1=parent1,
1915 self.hook("commit", node=node, parent1=parent1,
1966 parent2=parent2)
1916 parent2=parent2)
1967 self._afterlock(commithook)
1917 self._afterlock(commithook)
1968 return ret
1918 return ret
1969
1919
1970 @unfilteredmethod
1920 @unfilteredmethod
1971 def commitctx(self, ctx, error=False):
1921 def commitctx(self, ctx, error=False):
1972 """Add a new revision to current repository.
1922 """Add a new revision to current repository.
1973 Revision information is passed via the context argument.
1923 Revision information is passed via the context argument.
1974 """
1924 """
1975
1925
1976 tr = None
1926 tr = None
1977 p1, p2 = ctx.p1(), ctx.p2()
1927 p1, p2 = ctx.p1(), ctx.p2()
1978 user = ctx.user()
1928 user = ctx.user()
1979
1929
1980 lock = self.lock()
1930 lock = self.lock()
1981 try:
1931 try:
1982 tr = self.transaction("commit")
1932 tr = self.transaction("commit")
1983 trp = weakref.proxy(tr)
1933 trp = weakref.proxy(tr)
1984
1934
1985 if ctx.manifestnode():
1935 if ctx.manifestnode():
1986 # reuse an existing manifest revision
1936 # reuse an existing manifest revision
1987 mn = ctx.manifestnode()
1937 mn = ctx.manifestnode()
1988 files = ctx.files()
1938 files = ctx.files()
1989 elif ctx.files():
1939 elif ctx.files():
1990 m1ctx = p1.manifestctx()
1940 m1ctx = p1.manifestctx()
1991 m2ctx = p2.manifestctx()
1941 m2ctx = p2.manifestctx()
1992 mctx = m1ctx.copy()
1942 mctx = m1ctx.copy()
1993
1943
1994 m = mctx.read()
1944 m = mctx.read()
1995 m1 = m1ctx.read()
1945 m1 = m1ctx.read()
1996 m2 = m2ctx.read()
1946 m2 = m2ctx.read()
1997
1947
1998 # check in files
1948 # check in files
1999 added = []
1949 added = []
2000 changed = []
1950 changed = []
2001 removed = list(ctx.removed())
1951 removed = list(ctx.removed())
2002 linkrev = len(self)
1952 linkrev = len(self)
2003 self.ui.note(_("committing files:\n"))
1953 self.ui.note(_("committing files:\n"))
2004 for f in sorted(ctx.modified() + ctx.added()):
1954 for f in sorted(ctx.modified() + ctx.added()):
2005 self.ui.note(f + "\n")
1955 self.ui.note(f + "\n")
2006 try:
1956 try:
2007 fctx = ctx[f]
1957 fctx = ctx[f]
2008 if fctx is None:
1958 if fctx is None:
2009 removed.append(f)
1959 removed.append(f)
2010 else:
1960 else:
2011 added.append(f)
1961 added.append(f)
2012 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1962 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2013 trp, changed)
1963 trp, changed)
2014 m.setflag(f, fctx.flags())
1964 m.setflag(f, fctx.flags())
2015 except OSError as inst:
1965 except OSError as inst:
2016 self.ui.warn(_("trouble committing %s!\n") % f)
1966 self.ui.warn(_("trouble committing %s!\n") % f)
2017 raise
1967 raise
2018 except IOError as inst:
1968 except IOError as inst:
2019 errcode = getattr(inst, 'errno', errno.ENOENT)
1969 errcode = getattr(inst, 'errno', errno.ENOENT)
2020 if error or errcode and errcode != errno.ENOENT:
1970 if error or errcode and errcode != errno.ENOENT:
2021 self.ui.warn(_("trouble committing %s!\n") % f)
1971 self.ui.warn(_("trouble committing %s!\n") % f)
2022 raise
1972 raise
2023
1973
2024 # update manifest
1974 # update manifest
2025 self.ui.note(_("committing manifest\n"))
1975 self.ui.note(_("committing manifest\n"))
2026 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1976 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2027 drop = [f for f in removed if f in m]
1977 drop = [f for f in removed if f in m]
2028 for f in drop:
1978 for f in drop:
2029 del m[f]
1979 del m[f]
2030 mn = mctx.write(trp, linkrev,
1980 mn = mctx.write(trp, linkrev,
2031 p1.manifestnode(), p2.manifestnode(),
1981 p1.manifestnode(), p2.manifestnode(),
2032 added, drop)
1982 added, drop)
2033 files = changed + removed
1983 files = changed + removed
2034 else:
1984 else:
2035 mn = p1.manifestnode()
1985 mn = p1.manifestnode()
2036 files = []
1986 files = []
2037
1987
2038 # update changelog
1988 # update changelog
2039 self.ui.note(_("committing changelog\n"))
1989 self.ui.note(_("committing changelog\n"))
2040 self.changelog.delayupdate(tr)
1990 self.changelog.delayupdate(tr)
2041 n = self.changelog.add(mn, files, ctx.description(),
1991 n = self.changelog.add(mn, files, ctx.description(),
2042 trp, p1.node(), p2.node(),
1992 trp, p1.node(), p2.node(),
2043 user, ctx.date(), ctx.extra().copy())
1993 user, ctx.date(), ctx.extra().copy())
2044 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1994 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2045 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1995 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2046 parent2=xp2)
1996 parent2=xp2)
2047 # set the new commit is proper phase
1997 # set the new commit is proper phase
2048 targetphase = subrepo.newcommitphase(self.ui, ctx)
1998 targetphase = subrepo.newcommitphase(self.ui, ctx)
2049 if targetphase:
1999 if targetphase:
2050 # retract boundary do not alter parent changeset.
2000 # retract boundary do not alter parent changeset.
2051 # if a parent have higher the resulting phase will
2001 # if a parent have higher the resulting phase will
2052 # be compliant anyway
2002 # be compliant anyway
2053 #
2003 #
2054 # if minimal phase was 0 we don't need to retract anything
2004 # if minimal phase was 0 we don't need to retract anything
2055 phases.registernew(self, tr, targetphase, [n])
2005 phases.registernew(self, tr, targetphase, [n])
2056 tr.close()
2006 tr.close()
2057 return n
2007 return n
2058 finally:
2008 finally:
2059 if tr:
2009 if tr:
2060 tr.release()
2010 tr.release()
2061 lock.release()
2011 lock.release()
2062
2012
2063 @unfilteredmethod
2013 @unfilteredmethod
2064 def destroying(self):
2014 def destroying(self):
2065 '''Inform the repository that nodes are about to be destroyed.
2015 '''Inform the repository that nodes are about to be destroyed.
2066 Intended for use by strip and rollback, so there's a common
2016 Intended for use by strip and rollback, so there's a common
2067 place for anything that has to be done before destroying history.
2017 place for anything that has to be done before destroying history.
2068
2018
2069 This is mostly useful for saving state that is in memory and waiting
2019 This is mostly useful for saving state that is in memory and waiting
2070 to be flushed when the current lock is released. Because a call to
2020 to be flushed when the current lock is released. Because a call to
2071 destroyed is imminent, the repo will be invalidated causing those
2021 destroyed is imminent, the repo will be invalidated causing those
2072 changes to stay in memory (waiting for the next unlock), or vanish
2022 changes to stay in memory (waiting for the next unlock), or vanish
2073 completely.
2023 completely.
2074 '''
2024 '''
2075 # When using the same lock to commit and strip, the phasecache is left
2025 # When using the same lock to commit and strip, the phasecache is left
2076 # dirty after committing. Then when we strip, the repo is invalidated,
2026 # dirty after committing. Then when we strip, the repo is invalidated,
2077 # causing those changes to disappear.
2027 # causing those changes to disappear.
2078 if '_phasecache' in vars(self):
2028 if '_phasecache' in vars(self):
2079 self._phasecache.write()
2029 self._phasecache.write()
2080
2030
2081 @unfilteredmethod
2031 @unfilteredmethod
2082 def destroyed(self):
2032 def destroyed(self):
2083 '''Inform the repository that nodes have been destroyed.
2033 '''Inform the repository that nodes have been destroyed.
2084 Intended for use by strip and rollback, so there's a common
2034 Intended for use by strip and rollback, so there's a common
2085 place for anything that has to be done after destroying history.
2035 place for anything that has to be done after destroying history.
2086 '''
2036 '''
2087 # When one tries to:
2037 # When one tries to:
2088 # 1) destroy nodes thus calling this method (e.g. strip)
2038 # 1) destroy nodes thus calling this method (e.g. strip)
2089 # 2) use phasecache somewhere (e.g. commit)
2039 # 2) use phasecache somewhere (e.g. commit)
2090 #
2040 #
2091 # then 2) will fail because the phasecache contains nodes that were
2041 # then 2) will fail because the phasecache contains nodes that were
2092 # removed. We can either remove phasecache from the filecache,
2042 # removed. We can either remove phasecache from the filecache,
2093 # causing it to reload next time it is accessed, or simply filter
2043 # causing it to reload next time it is accessed, or simply filter
2094 # the removed nodes now and write the updated cache.
2044 # the removed nodes now and write the updated cache.
2095 self._phasecache.filterunknown(self)
2045 self._phasecache.filterunknown(self)
2096 self._phasecache.write()
2046 self._phasecache.write()
2097
2047
2098 # refresh all repository caches
2048 # refresh all repository caches
2099 self.updatecaches()
2049 self.updatecaches()
2100
2050
2101 # Ensure the persistent tag cache is updated. Doing it now
2051 # Ensure the persistent tag cache is updated. Doing it now
2102 # means that the tag cache only has to worry about destroyed
2052 # means that the tag cache only has to worry about destroyed
2103 # heads immediately after a strip/rollback. That in turn
2053 # heads immediately after a strip/rollback. That in turn
2104 # guarantees that "cachetip == currenttip" (comparing both rev
2054 # guarantees that "cachetip == currenttip" (comparing both rev
2105 # and node) always means no nodes have been added or destroyed.
2055 # and node) always means no nodes have been added or destroyed.
2106
2056
2107 # XXX this is suboptimal when qrefresh'ing: we strip the current
2057 # XXX this is suboptimal when qrefresh'ing: we strip the current
2108 # head, refresh the tag cache, then immediately add a new head.
2058 # head, refresh the tag cache, then immediately add a new head.
2109 # But I think doing it this way is necessary for the "instant
2059 # But I think doing it this way is necessary for the "instant
2110 # tag cache retrieval" case to work.
2060 # tag cache retrieval" case to work.
2111 self.invalidate()
2061 self.invalidate()
2112
2062
2113 def walk(self, match, node=None):
2063 def walk(self, match, node=None):
2114 '''
2064 '''
2115 walk recursively through the directory tree or a given
2065 walk recursively through the directory tree or a given
2116 changeset, finding all files matched by the match
2066 changeset, finding all files matched by the match
2117 function
2067 function
2118 '''
2068 '''
2119 self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
2069 self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
2120 return self[node].walk(match)
2070 return self[node].walk(match)
2121
2071
2122 def status(self, node1='.', node2=None, match=None,
2072 def status(self, node1='.', node2=None, match=None,
2123 ignored=False, clean=False, unknown=False,
2073 ignored=False, clean=False, unknown=False,
2124 listsubrepos=False):
2074 listsubrepos=False):
2125 '''a convenience method that calls node1.status(node2)'''
2075 '''a convenience method that calls node1.status(node2)'''
2126 return self[node1].status(node2, match, ignored, clean, unknown,
2076 return self[node1].status(node2, match, ignored, clean, unknown,
2127 listsubrepos)
2077 listsubrepos)
2128
2078
2129 def addpostdsstatus(self, ps):
2079 def addpostdsstatus(self, ps):
2130 """Add a callback to run within the wlock, at the point at which status
2080 """Add a callback to run within the wlock, at the point at which status
2131 fixups happen.
2081 fixups happen.
2132
2082
2133 On status completion, callback(wctx, status) will be called with the
2083 On status completion, callback(wctx, status) will be called with the
2134 wlock held, unless the dirstate has changed from underneath or the wlock
2084 wlock held, unless the dirstate has changed from underneath or the wlock
2135 couldn't be grabbed.
2085 couldn't be grabbed.
2136
2086
2137 Callbacks should not capture and use a cached copy of the dirstate --
2087 Callbacks should not capture and use a cached copy of the dirstate --
2138 it might change in the meanwhile. Instead, they should access the
2088 it might change in the meanwhile. Instead, they should access the
2139 dirstate via wctx.repo().dirstate.
2089 dirstate via wctx.repo().dirstate.
2140
2090
2141 This list is emptied out after each status run -- extensions should
2091 This list is emptied out after each status run -- extensions should
2142 make sure it adds to this list each time dirstate.status is called.
2092 make sure it adds to this list each time dirstate.status is called.
2143 Extensions should also make sure they don't call this for statuses
2093 Extensions should also make sure they don't call this for statuses
2144 that don't involve the dirstate.
2094 that don't involve the dirstate.
2145 """
2095 """
2146
2096
2147 # The list is located here for uniqueness reasons -- it is actually
2097 # The list is located here for uniqueness reasons -- it is actually
2148 # managed by the workingctx, but that isn't unique per-repo.
2098 # managed by the workingctx, but that isn't unique per-repo.
2149 self._postdsstatus.append(ps)
2099 self._postdsstatus.append(ps)
2150
2100
2151 def postdsstatus(self):
2101 def postdsstatus(self):
2152 """Used by workingctx to get the list of post-dirstate-status hooks."""
2102 """Used by workingctx to get the list of post-dirstate-status hooks."""
2153 return self._postdsstatus
2103 return self._postdsstatus
2154
2104
2155 def clearpostdsstatus(self):
2105 def clearpostdsstatus(self):
2156 """Used by workingctx to clear post-dirstate-status hooks."""
2106 """Used by workingctx to clear post-dirstate-status hooks."""
2157 del self._postdsstatus[:]
2107 del self._postdsstatus[:]
2158
2108
2159 def heads(self, start=None):
2109 def heads(self, start=None):
2160 if start is None:
2110 if start is None:
2161 cl = self.changelog
2111 cl = self.changelog
2162 headrevs = reversed(cl.headrevs())
2112 headrevs = reversed(cl.headrevs())
2163 return [cl.node(rev) for rev in headrevs]
2113 return [cl.node(rev) for rev in headrevs]
2164
2114
2165 heads = self.changelog.heads(start)
2115 heads = self.changelog.heads(start)
2166 # sort the output in rev descending order
2116 # sort the output in rev descending order
2167 return sorted(heads, key=self.changelog.rev, reverse=True)
2117 return sorted(heads, key=self.changelog.rev, reverse=True)
2168
2118
2169 def branchheads(self, branch=None, start=None, closed=False):
2119 def branchheads(self, branch=None, start=None, closed=False):
2170 '''return a (possibly filtered) list of heads for the given branch
2120 '''return a (possibly filtered) list of heads for the given branch
2171
2121
2172 Heads are returned in topological order, from newest to oldest.
2122 Heads are returned in topological order, from newest to oldest.
2173 If branch is None, use the dirstate branch.
2123 If branch is None, use the dirstate branch.
2174 If start is not None, return only heads reachable from start.
2124 If start is not None, return only heads reachable from start.
2175 If closed is True, return heads that are marked as closed as well.
2125 If closed is True, return heads that are marked as closed as well.
2176 '''
2126 '''
2177 if branch is None:
2127 if branch is None:
2178 branch = self[None].branch()
2128 branch = self[None].branch()
2179 branches = self.branchmap()
2129 branches = self.branchmap()
2180 if branch not in branches:
2130 if branch not in branches:
2181 return []
2131 return []
2182 # the cache returns heads ordered lowest to highest
2132 # the cache returns heads ordered lowest to highest
2183 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2133 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2184 if start is not None:
2134 if start is not None:
2185 # filter out the heads that cannot be reached from startrev
2135 # filter out the heads that cannot be reached from startrev
2186 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2136 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2187 bheads = [h for h in bheads if h in fbheads]
2137 bheads = [h for h in bheads if h in fbheads]
2188 return bheads
2138 return bheads
2189
2139
2190 def branches(self, nodes):
2140 def branches(self, nodes):
2191 if not nodes:
2141 if not nodes:
2192 nodes = [self.changelog.tip()]
2142 nodes = [self.changelog.tip()]
2193 b = []
2143 b = []
2194 for n in nodes:
2144 for n in nodes:
2195 t = n
2145 t = n
2196 while True:
2146 while True:
2197 p = self.changelog.parents(n)
2147 p = self.changelog.parents(n)
2198 if p[1] != nullid or p[0] == nullid:
2148 if p[1] != nullid or p[0] == nullid:
2199 b.append((t, n, p[0], p[1]))
2149 b.append((t, n, p[0], p[1]))
2200 break
2150 break
2201 n = p[0]
2151 n = p[0]
2202 return b
2152 return b
2203
2153
2204 def between(self, pairs):
2154 def between(self, pairs):
2205 r = []
2155 r = []
2206
2156
2207 for top, bottom in pairs:
2157 for top, bottom in pairs:
2208 n, l, i = top, [], 0
2158 n, l, i = top, [], 0
2209 f = 1
2159 f = 1
2210
2160
2211 while n != bottom and n != nullid:
2161 while n != bottom and n != nullid:
2212 p = self.changelog.parents(n)[0]
2162 p = self.changelog.parents(n)[0]
2213 if i == f:
2163 if i == f:
2214 l.append(n)
2164 l.append(n)
2215 f = f * 2
2165 f = f * 2
2216 n = p
2166 n = p
2217 i += 1
2167 i += 1
2218
2168
2219 r.append(l)
2169 r.append(l)
2220
2170
2221 return r
2171 return r
2222
2172
2223 def checkpush(self, pushop):
2173 def checkpush(self, pushop):
2224 """Extensions can override this function if additional checks have
2174 """Extensions can override this function if additional checks have
2225 to be performed before pushing, or call it if they override push
2175 to be performed before pushing, or call it if they override push
2226 command.
2176 command.
2227 """
2177 """
2228
2178
2229 @unfilteredpropertycache
2179 @unfilteredpropertycache
2230 def prepushoutgoinghooks(self):
2180 def prepushoutgoinghooks(self):
2231 """Return util.hooks consists of a pushop with repo, remote, outgoing
2181 """Return util.hooks consists of a pushop with repo, remote, outgoing
2232 methods, which are called before pushing changesets.
2182 methods, which are called before pushing changesets.
2233 """
2183 """
2234 return util.hooks()
2184 return util.hooks()
2235
2185
2236 def pushkey(self, namespace, key, old, new):
2186 def pushkey(self, namespace, key, old, new):
2237 try:
2187 try:
2238 tr = self.currenttransaction()
2188 tr = self.currenttransaction()
2239 hookargs = {}
2189 hookargs = {}
2240 if tr is not None:
2190 if tr is not None:
2241 hookargs.update(tr.hookargs)
2191 hookargs.update(tr.hookargs)
2242 hookargs['namespace'] = namespace
2192 hookargs['namespace'] = namespace
2243 hookargs['key'] = key
2193 hookargs['key'] = key
2244 hookargs['old'] = old
2194 hookargs['old'] = old
2245 hookargs['new'] = new
2195 hookargs['new'] = new
2246 self.hook('prepushkey', throw=True, **hookargs)
2196 self.hook('prepushkey', throw=True, **hookargs)
2247 except error.HookAbort as exc:
2197 except error.HookAbort as exc:
2248 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2198 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2249 if exc.hint:
2199 if exc.hint:
2250 self.ui.write_err(_("(%s)\n") % exc.hint)
2200 self.ui.write_err(_("(%s)\n") % exc.hint)
2251 return False
2201 return False
2252 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2202 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2253 ret = pushkey.push(self, namespace, key, old, new)
2203 ret = pushkey.push(self, namespace, key, old, new)
2254 def runhook():
2204 def runhook():
2255 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2205 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2256 ret=ret)
2206 ret=ret)
2257 self._afterlock(runhook)
2207 self._afterlock(runhook)
2258 return ret
2208 return ret
2259
2209
2260 def listkeys(self, namespace):
2210 def listkeys(self, namespace):
2261 self.hook('prelistkeys', throw=True, namespace=namespace)
2211 self.hook('prelistkeys', throw=True, namespace=namespace)
2262 self.ui.debug('listing keys for "%s"\n' % namespace)
2212 self.ui.debug('listing keys for "%s"\n' % namespace)
2263 values = pushkey.list(self, namespace)
2213 values = pushkey.list(self, namespace)
2264 self.hook('listkeys', namespace=namespace, values=values)
2214 self.hook('listkeys', namespace=namespace, values=values)
2265 return values
2215 return values
2266
2216
2267 def debugwireargs(self, one, two, three=None, four=None, five=None):
2217 def debugwireargs(self, one, two, three=None, four=None, five=None):
2268 '''used to test argument passing over the wire'''
2218 '''used to test argument passing over the wire'''
2269 return "%s %s %s %s %s" % (one, two, three, four, five)
2219 return "%s %s %s %s %s" % (one, two, three, four, five)
2270
2220
2271 def savecommitmessage(self, text):
2221 def savecommitmessage(self, text):
2272 fp = self.vfs('last-message.txt', 'wb')
2222 fp = self.vfs('last-message.txt', 'wb')
2273 try:
2223 try:
2274 fp.write(text)
2224 fp.write(text)
2275 finally:
2225 finally:
2276 fp.close()
2226 fp.close()
2277 return self.pathto(fp.name[len(self.root) + 1:])
2227 return self.pathto(fp.name[len(self.root) + 1:])
2278
2228
2279 # used to avoid circular references so destructors work
2229 # used to avoid circular references so destructors work
2280 def aftertrans(files):
2230 def aftertrans(files):
2281 renamefiles = [tuple(t) for t in files]
2231 renamefiles = [tuple(t) for t in files]
2282 def a():
2232 def a():
2283 for vfs, src, dest in renamefiles:
2233 for vfs, src, dest in renamefiles:
2284 # if src and dest refer to a same file, vfs.rename is a no-op,
2234 # if src and dest refer to a same file, vfs.rename is a no-op,
2285 # leaving both src and dest on disk. delete dest to make sure
2235 # leaving both src and dest on disk. delete dest to make sure
2286 # the rename couldn't be such a no-op.
2236 # the rename couldn't be such a no-op.
2287 vfs.tryunlink(dest)
2237 vfs.tryunlink(dest)
2288 try:
2238 try:
2289 vfs.rename(src, dest)
2239 vfs.rename(src, dest)
2290 except OSError: # journal file does not yet exist
2240 except OSError: # journal file does not yet exist
2291 pass
2241 pass
2292 return a
2242 return a
2293
2243
2294 def undoname(fn):
2244 def undoname(fn):
2295 base, name = os.path.split(fn)
2245 base, name = os.path.split(fn)
2296 assert name.startswith('journal')
2246 assert name.startswith('journal')
2297 return os.path.join(base, name.replace('journal', 'undo', 1))
2247 return os.path.join(base, name.replace('journal', 'undo', 1))
2298
2248
2299 def instance(ui, path, create):
2249 def instance(ui, path, create):
2300 return localrepository(ui, util.urllocalpath(path), create)
2250 return localrepository(ui, util.urllocalpath(path), create)
2301
2251
2302 def islocal(path):
2252 def islocal(path):
2303 return True
2253 return True
2304
2254
2305 def newreporequirements(repo):
2255 def newreporequirements(repo):
2306 """Determine the set of requirements for a new local repository.
2256 """Determine the set of requirements for a new local repository.
2307
2257
2308 Extensions can wrap this function to specify custom requirements for
2258 Extensions can wrap this function to specify custom requirements for
2309 new repositories.
2259 new repositories.
2310 """
2260 """
2311 ui = repo.ui
2261 ui = repo.ui
2312 requirements = {'revlogv1'}
2262 requirements = {'revlogv1'}
2313 if ui.configbool('format', 'usestore'):
2263 if ui.configbool('format', 'usestore'):
2314 requirements.add('store')
2264 requirements.add('store')
2315 if ui.configbool('format', 'usefncache'):
2265 if ui.configbool('format', 'usefncache'):
2316 requirements.add('fncache')
2266 requirements.add('fncache')
2317 if ui.configbool('format', 'dotencode'):
2267 if ui.configbool('format', 'dotencode'):
2318 requirements.add('dotencode')
2268 requirements.add('dotencode')
2319
2269
2320 compengine = ui.config('experimental', 'format.compression')
2270 compengine = ui.config('experimental', 'format.compression')
2321 if compengine not in util.compengines:
2271 if compengine not in util.compengines:
2322 raise error.Abort(_('compression engine %s defined by '
2272 raise error.Abort(_('compression engine %s defined by '
2323 'experimental.format.compression not available') %
2273 'experimental.format.compression not available') %
2324 compengine,
2274 compengine,
2325 hint=_('run "hg debuginstall" to list available '
2275 hint=_('run "hg debuginstall" to list available '
2326 'compression engines'))
2276 'compression engines'))
2327
2277
2328 # zlib is the historical default and doesn't need an explicit requirement.
2278 # zlib is the historical default and doesn't need an explicit requirement.
2329 if compengine != 'zlib':
2279 if compengine != 'zlib':
2330 requirements.add('exp-compression-%s' % compengine)
2280 requirements.add('exp-compression-%s' % compengine)
2331
2281
2332 if scmutil.gdinitconfig(ui):
2282 if scmutil.gdinitconfig(ui):
2333 requirements.add('generaldelta')
2283 requirements.add('generaldelta')
2334 if ui.configbool('experimental', 'treemanifest'):
2284 if ui.configbool('experimental', 'treemanifest'):
2335 requirements.add('treemanifest')
2285 requirements.add('treemanifest')
2336 if ui.configbool('experimental', 'manifestv2'):
2286 if ui.configbool('experimental', 'manifestv2'):
2337 requirements.add('manifestv2')
2287 requirements.add('manifestv2')
2338
2288
2339 revlogv2 = ui.config('experimental', 'revlogv2')
2289 revlogv2 = ui.config('experimental', 'revlogv2')
2340 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2290 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2341 requirements.remove('revlogv1')
2291 requirements.remove('revlogv1')
2342 # generaldelta is implied by revlogv2.
2292 # generaldelta is implied by revlogv2.
2343 requirements.discard('generaldelta')
2293 requirements.discard('generaldelta')
2344 requirements.add(REVLOGV2_REQUIREMENT)
2294 requirements.add(REVLOGV2_REQUIREMENT)
2345
2295
2346 return requirements
2296 return requirements
@@ -1,2063 +1,2128
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 cmdutil,
25 cmdutil,
26 config,
26 config,
27 encoding,
27 encoding,
28 error,
28 error,
29 exchange,
29 exchange,
30 filemerge,
30 filemerge,
31 match as matchmod,
31 match as matchmod,
32 node,
32 node,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 vfs as vfsmod,
38 vfs as vfsmod,
39 )
39 )
40
40
41 hg = None
41 hg = None
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43
43
44 nullstate = ('', '', 'empty')
44 nullstate = ('', '', 'empty')
45
45
46 def _expandedabspath(path):
46 def _expandedabspath(path):
47 '''
47 '''
48 get a path or url and if it is a path expand it and return an absolute path
48 get a path or url and if it is a path expand it and return an absolute path
49 '''
49 '''
50 expandedpath = util.urllocalpath(util.expandpath(path))
50 expandedpath = util.urllocalpath(util.expandpath(path))
51 u = util.url(expandedpath)
51 u = util.url(expandedpath)
52 if not u.scheme:
52 if not u.scheme:
53 path = util.normpath(os.path.abspath(u.path))
53 path = util.normpath(os.path.abspath(u.path))
54 return path
54 return path
55
55
56 def _getstorehashcachename(remotepath):
56 def _getstorehashcachename(remotepath):
57 '''get a unique filename for the store hash cache of a remote repository'''
57 '''get a unique filename for the store hash cache of a remote repository'''
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
59
59
60 class SubrepoAbort(error.Abort):
60 class SubrepoAbort(error.Abort):
61 """Exception class used to avoid handling a subrepo error more than once"""
61 """Exception class used to avoid handling a subrepo error more than once"""
62 def __init__(self, *args, **kw):
62 def __init__(self, *args, **kw):
63 self.subrepo = kw.pop('subrepo', None)
63 self.subrepo = kw.pop('subrepo', None)
64 self.cause = kw.pop('cause', None)
64 self.cause = kw.pop('cause', None)
65 error.Abort.__init__(self, *args, **kw)
65 error.Abort.__init__(self, *args, **kw)
66
66
67 def annotatesubrepoerror(func):
67 def annotatesubrepoerror(func):
68 def decoratedmethod(self, *args, **kargs):
68 def decoratedmethod(self, *args, **kargs):
69 try:
69 try:
70 res = func(self, *args, **kargs)
70 res = func(self, *args, **kargs)
71 except SubrepoAbort as ex:
71 except SubrepoAbort as ex:
72 # This exception has already been handled
72 # This exception has already been handled
73 raise ex
73 raise ex
74 except error.Abort as ex:
74 except error.Abort as ex:
75 subrepo = subrelpath(self)
75 subrepo = subrelpath(self)
76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
77 # avoid handling this exception by raising a SubrepoAbort exception
77 # avoid handling this exception by raising a SubrepoAbort exception
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
79 cause=sys.exc_info())
79 cause=sys.exc_info())
80 return res
80 return res
81 return decoratedmethod
81 return decoratedmethod
82
82
83 def state(ctx, ui):
83 def state(ctx, ui):
84 """return a state dict, mapping subrepo paths configured in .hgsub
84 """return a state dict, mapping subrepo paths configured in .hgsub
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
86 (key in types dict))
86 (key in types dict))
87 """
87 """
88 p = config.config()
88 p = config.config()
89 repo = ctx.repo()
89 repo = ctx.repo()
90 def read(f, sections=None, remap=None):
90 def read(f, sections=None, remap=None):
91 if f in ctx:
91 if f in ctx:
92 try:
92 try:
93 data = ctx[f].data()
93 data = ctx[f].data()
94 except IOError as err:
94 except IOError as err:
95 if err.errno != errno.ENOENT:
95 if err.errno != errno.ENOENT:
96 raise
96 raise
97 # handle missing subrepo spec files as removed
97 # handle missing subrepo spec files as removed
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
99 repo.pathto(f))
99 repo.pathto(f))
100 return
100 return
101 p.parse(f, data, sections, remap, read)
101 p.parse(f, data, sections, remap, read)
102 else:
102 else:
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
104 repo.pathto(f))
104 repo.pathto(f))
105 if '.hgsub' in ctx:
105 if '.hgsub' in ctx:
106 read('.hgsub')
106 read('.hgsub')
107
107
108 for path, src in ui.configitems('subpaths'):
108 for path, src in ui.configitems('subpaths'):
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
110
110
111 rev = {}
111 rev = {}
112 if '.hgsubstate' in ctx:
112 if '.hgsubstate' in ctx:
113 try:
113 try:
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
115 l = l.lstrip()
115 l = l.lstrip()
116 if not l:
116 if not l:
117 continue
117 continue
118 try:
118 try:
119 revision, path = l.split(" ", 1)
119 revision, path = l.split(" ", 1)
120 except ValueError:
120 except ValueError:
121 raise error.Abort(_("invalid subrepository revision "
121 raise error.Abort(_("invalid subrepository revision "
122 "specifier in \'%s\' line %d")
122 "specifier in \'%s\' line %d")
123 % (repo.pathto('.hgsubstate'), (i + 1)))
123 % (repo.pathto('.hgsubstate'), (i + 1)))
124 rev[path] = revision
124 rev[path] = revision
125 except IOError as err:
125 except IOError as err:
126 if err.errno != errno.ENOENT:
126 if err.errno != errno.ENOENT:
127 raise
127 raise
128
128
129 def remap(src):
129 def remap(src):
130 for pattern, repl in p.items('subpaths'):
130 for pattern, repl in p.items('subpaths'):
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
132 # does a string decode.
132 # does a string decode.
133 repl = util.escapestr(repl)
133 repl = util.escapestr(repl)
134 # However, we still want to allow back references to go
134 # However, we still want to allow back references to go
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
136 # extra escapes are needed because re.sub string decodes.
136 # extra escapes are needed because re.sub string decodes.
137 repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl)
137 repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl)
138 try:
138 try:
139 src = re.sub(pattern, repl, src, 1)
139 src = re.sub(pattern, repl, src, 1)
140 except re.error as e:
140 except re.error as e:
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
142 % (p.source('subpaths', pattern), e))
142 % (p.source('subpaths', pattern), e))
143 return src
143 return src
144
144
145 state = {}
145 state = {}
146 for path, src in p[''].items():
146 for path, src in p[''].items():
147 kind = 'hg'
147 kind = 'hg'
148 if src.startswith('['):
148 if src.startswith('['):
149 if ']' not in src:
149 if ']' not in src:
150 raise error.Abort(_('missing ] in subrepository source'))
150 raise error.Abort(_('missing ] in subrepository source'))
151 kind, src = src.split(']', 1)
151 kind, src = src.split(']', 1)
152 kind = kind[1:]
152 kind = kind[1:]
153 src = src.lstrip() # strip any extra whitespace after ']'
153 src = src.lstrip() # strip any extra whitespace after ']'
154
154
155 if not util.url(src).isabs():
155 if not util.url(src).isabs():
156 parent = _abssource(repo, abort=False)
156 parent = _abssource(repo, abort=False)
157 if parent:
157 if parent:
158 parent = util.url(parent)
158 parent = util.url(parent)
159 parent.path = posixpath.join(parent.path or '', src)
159 parent.path = posixpath.join(parent.path or '', src)
160 parent.path = posixpath.normpath(parent.path)
160 parent.path = posixpath.normpath(parent.path)
161 joined = str(parent)
161 joined = str(parent)
162 # Remap the full joined path and use it if it changes,
162 # Remap the full joined path and use it if it changes,
163 # else remap the original source.
163 # else remap the original source.
164 remapped = remap(joined)
164 remapped = remap(joined)
165 if remapped == joined:
165 if remapped == joined:
166 src = remap(src)
166 src = remap(src)
167 else:
167 else:
168 src = remapped
168 src = remapped
169
169
170 src = remap(src)
170 src = remap(src)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
172
172
173 return state
173 return state
174
174
175 def writestate(repo, state):
175 def writestate(repo, state):
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
178 if state[s][1] != nullstate[1]]
178 if state[s][1] != nullstate[1]]
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
180
180
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
182 """delegated from merge.applyupdates: merging of .hgsubstate file
182 """delegated from merge.applyupdates: merging of .hgsubstate file
183 in working context, merging context and ancestor context"""
183 in working context, merging context and ancestor context"""
184 if mctx == actx: # backwards?
184 if mctx == actx: # backwards?
185 actx = wctx.p1()
185 actx = wctx.p1()
186 s1 = wctx.substate
186 s1 = wctx.substate
187 s2 = mctx.substate
187 s2 = mctx.substate
188 sa = actx.substate
188 sa = actx.substate
189 sm = {}
189 sm = {}
190
190
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
192
192
193 def debug(s, msg, r=""):
193 def debug(s, msg, r=""):
194 if r:
194 if r:
195 r = "%s:%s:%s" % r
195 r = "%s:%s:%s" % r
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
197
197
198 promptssrc = filemerge.partextras(labels)
198 promptssrc = filemerge.partextras(labels)
199 for s, l in sorted(s1.iteritems()):
199 for s, l in sorted(s1.iteritems()):
200 prompts = None
200 prompts = None
201 a = sa.get(s, nullstate)
201 a = sa.get(s, nullstate)
202 ld = l # local state with possible dirty flag for compares
202 ld = l # local state with possible dirty flag for compares
203 if wctx.sub(s).dirty():
203 if wctx.sub(s).dirty():
204 ld = (l[0], l[1] + "+")
204 ld = (l[0], l[1] + "+")
205 if wctx == actx: # overwrite
205 if wctx == actx: # overwrite
206 a = ld
206 a = ld
207
207
208 prompts = promptssrc.copy()
208 prompts = promptssrc.copy()
209 prompts['s'] = s
209 prompts['s'] = s
210 if s in s2:
210 if s in s2:
211 r = s2[s]
211 r = s2[s]
212 if ld == r or r == a: # no change or local is newer
212 if ld == r or r == a: # no change or local is newer
213 sm[s] = l
213 sm[s] = l
214 continue
214 continue
215 elif ld == a: # other side changed
215 elif ld == a: # other side changed
216 debug(s, "other changed, get", r)
216 debug(s, "other changed, get", r)
217 wctx.sub(s).get(r, overwrite)
217 wctx.sub(s).get(r, overwrite)
218 sm[s] = r
218 sm[s] = r
219 elif ld[0] != r[0]: # sources differ
219 elif ld[0] != r[0]: # sources differ
220 prompts['lo'] = l[0]
220 prompts['lo'] = l[0]
221 prompts['ro'] = r[0]
221 prompts['ro'] = r[0]
222 if repo.ui.promptchoice(
222 if repo.ui.promptchoice(
223 _(' subrepository sources for %(s)s differ\n'
223 _(' subrepository sources for %(s)s differ\n'
224 'use (l)ocal%(l)s source (%(lo)s)'
224 'use (l)ocal%(l)s source (%(lo)s)'
225 ' or (r)emote%(o)s source (%(ro)s)?'
225 ' or (r)emote%(o)s source (%(ro)s)?'
226 '$$ &Local $$ &Remote') % prompts, 0):
226 '$$ &Local $$ &Remote') % prompts, 0):
227 debug(s, "prompt changed, get", r)
227 debug(s, "prompt changed, get", r)
228 wctx.sub(s).get(r, overwrite)
228 wctx.sub(s).get(r, overwrite)
229 sm[s] = r
229 sm[s] = r
230 elif ld[1] == a[1]: # local side is unchanged
230 elif ld[1] == a[1]: # local side is unchanged
231 debug(s, "other side changed, get", r)
231 debug(s, "other side changed, get", r)
232 wctx.sub(s).get(r, overwrite)
232 wctx.sub(s).get(r, overwrite)
233 sm[s] = r
233 sm[s] = r
234 else:
234 else:
235 debug(s, "both sides changed")
235 debug(s, "both sides changed")
236 srepo = wctx.sub(s)
236 srepo = wctx.sub(s)
237 prompts['sl'] = srepo.shortid(l[1])
237 prompts['sl'] = srepo.shortid(l[1])
238 prompts['sr'] = srepo.shortid(r[1])
238 prompts['sr'] = srepo.shortid(r[1])
239 option = repo.ui.promptchoice(
239 option = repo.ui.promptchoice(
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
241 'remote revision: %(sr)s)\n'
241 'remote revision: %(sr)s)\n'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
243 '$$ &Merge $$ &Local $$ &Remote')
243 '$$ &Merge $$ &Local $$ &Remote')
244 % prompts, 0)
244 % prompts, 0)
245 if option == 0:
245 if option == 0:
246 wctx.sub(s).merge(r)
246 wctx.sub(s).merge(r)
247 sm[s] = l
247 sm[s] = l
248 debug(s, "merge with", r)
248 debug(s, "merge with", r)
249 elif option == 1:
249 elif option == 1:
250 sm[s] = l
250 sm[s] = l
251 debug(s, "keep local subrepo revision", l)
251 debug(s, "keep local subrepo revision", l)
252 else:
252 else:
253 wctx.sub(s).get(r, overwrite)
253 wctx.sub(s).get(r, overwrite)
254 sm[s] = r
254 sm[s] = r
255 debug(s, "get remote subrepo revision", r)
255 debug(s, "get remote subrepo revision", r)
256 elif ld == a: # remote removed, local unchanged
256 elif ld == a: # remote removed, local unchanged
257 debug(s, "remote removed, remove")
257 debug(s, "remote removed, remove")
258 wctx.sub(s).remove()
258 wctx.sub(s).remove()
259 elif a == nullstate: # not present in remote or ancestor
259 elif a == nullstate: # not present in remote or ancestor
260 debug(s, "local added, keep")
260 debug(s, "local added, keep")
261 sm[s] = l
261 sm[s] = l
262 continue
262 continue
263 else:
263 else:
264 if repo.ui.promptchoice(
264 if repo.ui.promptchoice(
265 _(' local%(l)s changed subrepository %(s)s'
265 _(' local%(l)s changed subrepository %(s)s'
266 ' which remote%(o)s removed\n'
266 ' which remote%(o)s removed\n'
267 'use (c)hanged version or (d)elete?'
267 'use (c)hanged version or (d)elete?'
268 '$$ &Changed $$ &Delete') % prompts, 0):
268 '$$ &Changed $$ &Delete') % prompts, 0):
269 debug(s, "prompt remove")
269 debug(s, "prompt remove")
270 wctx.sub(s).remove()
270 wctx.sub(s).remove()
271
271
272 for s, r in sorted(s2.items()):
272 for s, r in sorted(s2.items()):
273 prompts = None
273 prompts = None
274 if s in s1:
274 if s in s1:
275 continue
275 continue
276 elif s not in sa:
276 elif s not in sa:
277 debug(s, "remote added, get", r)
277 debug(s, "remote added, get", r)
278 mctx.sub(s).get(r)
278 mctx.sub(s).get(r)
279 sm[s] = r
279 sm[s] = r
280 elif r != sa[s]:
280 elif r != sa[s]:
281 prompts = promptssrc.copy()
281 prompts = promptssrc.copy()
282 prompts['s'] = s
282 prompts['s'] = s
283 if repo.ui.promptchoice(
283 if repo.ui.promptchoice(
284 _(' remote%(o)s changed subrepository %(s)s'
284 _(' remote%(o)s changed subrepository %(s)s'
285 ' which local%(l)s removed\n'
285 ' which local%(l)s removed\n'
286 'use (c)hanged version or (d)elete?'
286 'use (c)hanged version or (d)elete?'
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
288 debug(s, "prompt recreate", r)
288 debug(s, "prompt recreate", r)
289 mctx.sub(s).get(r)
289 mctx.sub(s).get(r)
290 sm[s] = r
290 sm[s] = r
291
291
292 # record merged .hgsubstate
292 # record merged .hgsubstate
293 writestate(repo, sm)
293 writestate(repo, sm)
294 return sm
294 return sm
295
295
296 def precommit(ui, wctx, status, match, force=False):
297 """Calculate .hgsubstate changes that should be applied before committing
298
299 Returns (subs, commitsubs, newstate) where
300 - subs: changed subrepos (including dirty ones)
301 - commitsubs: dirty subrepos which the caller needs to commit recursively
302 - newstate: new state dict which the caller must write to .hgsubstate
303
304 This also updates the given status argument.
305 """
306 subs = []
307 commitsubs = set()
308 newstate = wctx.substate.copy()
309
310 # only manage subrepos and .hgsubstate if .hgsub is present
311 if '.hgsub' in wctx:
312 # we'll decide whether to track this ourselves, thanks
313 for c in status.modified, status.added, status.removed:
314 if '.hgsubstate' in c:
315 c.remove('.hgsubstate')
316
317 # compare current state to last committed state
318 # build new substate based on last committed state
319 oldstate = wctx.p1().substate
320 for s in sorted(newstate.keys()):
321 if not match(s):
322 # ignore working copy, use old state if present
323 if s in oldstate:
324 newstate[s] = oldstate[s]
325 continue
326 if not force:
327 raise error.Abort(
328 _("commit with new subrepo %s excluded") % s)
329 dirtyreason = wctx.sub(s).dirtyreason(True)
330 if dirtyreason:
331 if not ui.configbool('ui', 'commitsubrepos'):
332 raise error.Abort(dirtyreason,
333 hint=_("use --subrepos for recursive commit"))
334 subs.append(s)
335 commitsubs.add(s)
336 else:
337 bs = wctx.sub(s).basestate()
338 newstate[s] = (newstate[s][0], bs, newstate[s][2])
339 if oldstate.get(s, (None, None, None))[1] != bs:
340 subs.append(s)
341
342 # check for removed subrepos
343 for p in wctx.parents():
344 r = [s for s in p.substate if s not in newstate]
345 subs += [s for s in r if match(s)]
346 if subs:
347 if (not match('.hgsub') and
348 '.hgsub' in (wctx.modified() + wctx.added())):
349 raise error.Abort(_("can't commit subrepos without .hgsub"))
350 status.modified.insert(0, '.hgsubstate')
351
352 elif '.hgsub' in status.removed:
353 # clean up .hgsubstate when .hgsub is removed
354 if ('.hgsubstate' in wctx and
355 '.hgsubstate' not in (status.modified + status.added +
356 status.removed)):
357 status.removed.insert(0, '.hgsubstate')
358
359 return subs, commitsubs, newstate
360
296 def _updateprompt(ui, sub, dirty, local, remote):
361 def _updateprompt(ui, sub, dirty, local, remote):
297 if dirty:
362 if dirty:
298 msg = (_(' subrepository sources for %s differ\n'
363 msg = (_(' subrepository sources for %s differ\n'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
364 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 '$$ &Local $$ &Remote')
365 '$$ &Local $$ &Remote')
301 % (subrelpath(sub), local, remote))
366 % (subrelpath(sub), local, remote))
302 else:
367 else:
303 msg = (_(' subrepository sources for %s differ (in checked out '
368 msg = (_(' subrepository sources for %s differ (in checked out '
304 'version)\n'
369 'version)\n'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
370 'use (l)ocal source (%s) or (r)emote source (%s)?'
306 '$$ &Local $$ &Remote')
371 '$$ &Local $$ &Remote')
307 % (subrelpath(sub), local, remote))
372 % (subrelpath(sub), local, remote))
308 return ui.promptchoice(msg, 0)
373 return ui.promptchoice(msg, 0)
309
374
310 def reporelpath(repo):
375 def reporelpath(repo):
311 """return path to this (sub)repo as seen from outermost repo"""
376 """return path to this (sub)repo as seen from outermost repo"""
312 parent = repo
377 parent = repo
313 while util.safehasattr(parent, '_subparent'):
378 while util.safehasattr(parent, '_subparent'):
314 parent = parent._subparent
379 parent = parent._subparent
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
380 return repo.root[len(pathutil.normasprefix(parent.root)):]
316
381
317 def subrelpath(sub):
382 def subrelpath(sub):
318 """return path to this subrepo as seen from outermost repo"""
383 """return path to this subrepo as seen from outermost repo"""
319 return sub._relpath
384 return sub._relpath
320
385
321 def _abssource(repo, push=False, abort=True):
386 def _abssource(repo, push=False, abort=True):
322 """return pull/push path of repo - either based on parent repo .hgsub info
387 """return pull/push path of repo - either based on parent repo .hgsub info
323 or on the top repo config. Abort or return None if no source found."""
388 or on the top repo config. Abort or return None if no source found."""
324 if util.safehasattr(repo, '_subparent'):
389 if util.safehasattr(repo, '_subparent'):
325 source = util.url(repo._subsource)
390 source = util.url(repo._subsource)
326 if source.isabs():
391 if source.isabs():
327 return str(source)
392 return str(source)
328 source.path = posixpath.normpath(source.path)
393 source.path = posixpath.normpath(source.path)
329 parent = _abssource(repo._subparent, push, abort=False)
394 parent = _abssource(repo._subparent, push, abort=False)
330 if parent:
395 if parent:
331 parent = util.url(util.pconvert(parent))
396 parent = util.url(util.pconvert(parent))
332 parent.path = posixpath.join(parent.path or '', source.path)
397 parent.path = posixpath.join(parent.path or '', source.path)
333 parent.path = posixpath.normpath(parent.path)
398 parent.path = posixpath.normpath(parent.path)
334 return str(parent)
399 return str(parent)
335 else: # recursion reached top repo
400 else: # recursion reached top repo
336 if util.safehasattr(repo, '_subtoppath'):
401 if util.safehasattr(repo, '_subtoppath'):
337 return repo._subtoppath
402 return repo._subtoppath
338 if push and repo.ui.config('paths', 'default-push'):
403 if push and repo.ui.config('paths', 'default-push'):
339 return repo.ui.config('paths', 'default-push')
404 return repo.ui.config('paths', 'default-push')
340 if repo.ui.config('paths', 'default'):
405 if repo.ui.config('paths', 'default'):
341 return repo.ui.config('paths', 'default')
406 return repo.ui.config('paths', 'default')
342 if repo.shared():
407 if repo.shared():
343 # chop off the .hg component to get the default path form
408 # chop off the .hg component to get the default path form
344 return os.path.dirname(repo.sharedpath)
409 return os.path.dirname(repo.sharedpath)
345 if abort:
410 if abort:
346 raise error.Abort(_("default path for subrepository not found"))
411 raise error.Abort(_("default path for subrepository not found"))
347
412
348 def _sanitize(ui, vfs, ignore):
413 def _sanitize(ui, vfs, ignore):
349 for dirname, dirs, names in vfs.walk():
414 for dirname, dirs, names in vfs.walk():
350 for i, d in enumerate(dirs):
415 for i, d in enumerate(dirs):
351 if d.lower() == ignore:
416 if d.lower() == ignore:
352 del dirs[i]
417 del dirs[i]
353 break
418 break
354 if vfs.basename(dirname).lower() != '.hg':
419 if vfs.basename(dirname).lower() != '.hg':
355 continue
420 continue
356 for f in names:
421 for f in names:
357 if f.lower() == 'hgrc':
422 if f.lower() == 'hgrc':
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
423 ui.warn(_("warning: removing potentially hostile 'hgrc' "
359 "in '%s'\n") % vfs.join(dirname))
424 "in '%s'\n") % vfs.join(dirname))
360 vfs.unlink(vfs.reljoin(dirname, f))
425 vfs.unlink(vfs.reljoin(dirname, f))
361
426
362 def _auditsubrepopath(repo, path):
427 def _auditsubrepopath(repo, path):
363 # auditor doesn't check if the path itself is a symlink
428 # auditor doesn't check if the path itself is a symlink
364 pathutil.pathauditor(repo.root)(path)
429 pathutil.pathauditor(repo.root)(path)
365 if repo.wvfs.islink(path):
430 if repo.wvfs.islink(path):
366 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
431 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
367
432
368 SUBREPO_ALLOWED_DEFAULTS = {
433 SUBREPO_ALLOWED_DEFAULTS = {
369 'hg': True,
434 'hg': True,
370 'git': False,
435 'git': False,
371 'svn': False,
436 'svn': False,
372 }
437 }
373
438
374 def _checktype(ui, kind):
439 def _checktype(ui, kind):
375 # subrepos.allowed is a master kill switch. If disabled, subrepos are
440 # subrepos.allowed is a master kill switch. If disabled, subrepos are
376 # disabled period.
441 # disabled period.
377 if not ui.configbool('subrepos', 'allowed', True):
442 if not ui.configbool('subrepos', 'allowed', True):
378 raise error.Abort(_('subrepos not enabled'),
443 raise error.Abort(_('subrepos not enabled'),
379 hint=_("see 'hg help config.subrepos' for details"))
444 hint=_("see 'hg help config.subrepos' for details"))
380
445
381 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
446 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
382 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
447 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
383 raise error.Abort(_('%s subrepos not allowed') % kind,
448 raise error.Abort(_('%s subrepos not allowed') % kind,
384 hint=_("see 'hg help config.subrepos' for details"))
449 hint=_("see 'hg help config.subrepos' for details"))
385
450
386 if kind not in types:
451 if kind not in types:
387 raise error.Abort(_('unknown subrepo type %s') % kind)
452 raise error.Abort(_('unknown subrepo type %s') % kind)
388
453
389 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
454 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
390 """return instance of the right subrepo class for subrepo in path"""
455 """return instance of the right subrepo class for subrepo in path"""
391 # subrepo inherently violates our import layering rules
456 # subrepo inherently violates our import layering rules
392 # because it wants to make repo objects from deep inside the stack
457 # because it wants to make repo objects from deep inside the stack
393 # so we manually delay the circular imports to not break
458 # so we manually delay the circular imports to not break
394 # scripts that don't use our demand-loading
459 # scripts that don't use our demand-loading
395 global hg
460 global hg
396 from . import hg as h
461 from . import hg as h
397 hg = h
462 hg = h
398
463
399 repo = ctx.repo()
464 repo = ctx.repo()
400 _auditsubrepopath(repo, path)
465 _auditsubrepopath(repo, path)
401 state = ctx.substate[path]
466 state = ctx.substate[path]
402 _checktype(repo.ui, state[2])
467 _checktype(repo.ui, state[2])
403 if allowwdir:
468 if allowwdir:
404 state = (state[0], ctx.subrev(path), state[2])
469 state = (state[0], ctx.subrev(path), state[2])
405 return types[state[2]](ctx, path, state[:2], allowcreate)
470 return types[state[2]](ctx, path, state[:2], allowcreate)
406
471
407 def nullsubrepo(ctx, path, pctx):
472 def nullsubrepo(ctx, path, pctx):
408 """return an empty subrepo in pctx for the extant subrepo in ctx"""
473 """return an empty subrepo in pctx for the extant subrepo in ctx"""
409 # subrepo inherently violates our import layering rules
474 # subrepo inherently violates our import layering rules
410 # because it wants to make repo objects from deep inside the stack
475 # because it wants to make repo objects from deep inside the stack
411 # so we manually delay the circular imports to not break
476 # so we manually delay the circular imports to not break
412 # scripts that don't use our demand-loading
477 # scripts that don't use our demand-loading
413 global hg
478 global hg
414 from . import hg as h
479 from . import hg as h
415 hg = h
480 hg = h
416
481
417 repo = ctx.repo()
482 repo = ctx.repo()
418 _auditsubrepopath(repo, path)
483 _auditsubrepopath(repo, path)
419 state = ctx.substate[path]
484 state = ctx.substate[path]
420 _checktype(repo.ui, state[2])
485 _checktype(repo.ui, state[2])
421 subrev = ''
486 subrev = ''
422 if state[2] == 'hg':
487 if state[2] == 'hg':
423 subrev = "0" * 40
488 subrev = "0" * 40
424 return types[state[2]](pctx, path, (state[0], subrev), True)
489 return types[state[2]](pctx, path, (state[0], subrev), True)
425
490
426 def newcommitphase(ui, ctx):
491 def newcommitphase(ui, ctx):
427 commitphase = phases.newcommitphase(ui)
492 commitphase = phases.newcommitphase(ui)
428 substate = getattr(ctx, "substate", None)
493 substate = getattr(ctx, "substate", None)
429 if not substate:
494 if not substate:
430 return commitphase
495 return commitphase
431 check = ui.config('phases', 'checksubrepos')
496 check = ui.config('phases', 'checksubrepos')
432 if check not in ('ignore', 'follow', 'abort'):
497 if check not in ('ignore', 'follow', 'abort'):
433 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
498 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
434 % (check))
499 % (check))
435 if check == 'ignore':
500 if check == 'ignore':
436 return commitphase
501 return commitphase
437 maxphase = phases.public
502 maxphase = phases.public
438 maxsub = None
503 maxsub = None
439 for s in sorted(substate):
504 for s in sorted(substate):
440 sub = ctx.sub(s)
505 sub = ctx.sub(s)
441 subphase = sub.phase(substate[s][1])
506 subphase = sub.phase(substate[s][1])
442 if maxphase < subphase:
507 if maxphase < subphase:
443 maxphase = subphase
508 maxphase = subphase
444 maxsub = s
509 maxsub = s
445 if commitphase < maxphase:
510 if commitphase < maxphase:
446 if check == 'abort':
511 if check == 'abort':
447 raise error.Abort(_("can't commit in %s phase"
512 raise error.Abort(_("can't commit in %s phase"
448 " conflicting %s from subrepository %s") %
513 " conflicting %s from subrepository %s") %
449 (phases.phasenames[commitphase],
514 (phases.phasenames[commitphase],
450 phases.phasenames[maxphase], maxsub))
515 phases.phasenames[maxphase], maxsub))
451 ui.warn(_("warning: changes are committed in"
516 ui.warn(_("warning: changes are committed in"
452 " %s phase from subrepository %s\n") %
517 " %s phase from subrepository %s\n") %
453 (phases.phasenames[maxphase], maxsub))
518 (phases.phasenames[maxphase], maxsub))
454 return maxphase
519 return maxphase
455 return commitphase
520 return commitphase
456
521
457 # subrepo classes need to implement the following abstract class:
522 # subrepo classes need to implement the following abstract class:
458
523
459 class abstractsubrepo(object):
524 class abstractsubrepo(object):
460
525
461 def __init__(self, ctx, path):
526 def __init__(self, ctx, path):
462 """Initialize abstractsubrepo part
527 """Initialize abstractsubrepo part
463
528
464 ``ctx`` is the context referring this subrepository in the
529 ``ctx`` is the context referring this subrepository in the
465 parent repository.
530 parent repository.
466
531
467 ``path`` is the path to this subrepository as seen from
532 ``path`` is the path to this subrepository as seen from
468 innermost repository.
533 innermost repository.
469 """
534 """
470 self.ui = ctx.repo().ui
535 self.ui = ctx.repo().ui
471 self._ctx = ctx
536 self._ctx = ctx
472 self._path = path
537 self._path = path
473
538
474 def addwebdirpath(self, serverpath, webconf):
539 def addwebdirpath(self, serverpath, webconf):
475 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
540 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
476
541
477 ``serverpath`` is the path component of the URL for this repo.
542 ``serverpath`` is the path component of the URL for this repo.
478
543
479 ``webconf`` is the dictionary of hgwebdir entries.
544 ``webconf`` is the dictionary of hgwebdir entries.
480 """
545 """
481 pass
546 pass
482
547
483 def storeclean(self, path):
548 def storeclean(self, path):
484 """
549 """
485 returns true if the repository has not changed since it was last
550 returns true if the repository has not changed since it was last
486 cloned from or pushed to a given repository.
551 cloned from or pushed to a given repository.
487 """
552 """
488 return False
553 return False
489
554
490 def dirty(self, ignoreupdate=False, missing=False):
555 def dirty(self, ignoreupdate=False, missing=False):
491 """returns true if the dirstate of the subrepo is dirty or does not
556 """returns true if the dirstate of the subrepo is dirty or does not
492 match current stored state. If ignoreupdate is true, only check
557 match current stored state. If ignoreupdate is true, only check
493 whether the subrepo has uncommitted changes in its dirstate. If missing
558 whether the subrepo has uncommitted changes in its dirstate. If missing
494 is true, check for deleted files.
559 is true, check for deleted files.
495 """
560 """
496 raise NotImplementedError
561 raise NotImplementedError
497
562
498 def dirtyreason(self, ignoreupdate=False, missing=False):
563 def dirtyreason(self, ignoreupdate=False, missing=False):
499 """return reason string if it is ``dirty()``
564 """return reason string if it is ``dirty()``
500
565
501 Returned string should have enough information for the message
566 Returned string should have enough information for the message
502 of exception.
567 of exception.
503
568
504 This returns None, otherwise.
569 This returns None, otherwise.
505 """
570 """
506 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
571 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
507 return _('uncommitted changes in subrepository "%s"'
572 return _('uncommitted changes in subrepository "%s"'
508 ) % subrelpath(self)
573 ) % subrelpath(self)
509
574
510 def bailifchanged(self, ignoreupdate=False, hint=None):
575 def bailifchanged(self, ignoreupdate=False, hint=None):
511 """raise Abort if subrepository is ``dirty()``
576 """raise Abort if subrepository is ``dirty()``
512 """
577 """
513 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
578 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
514 missing=True)
579 missing=True)
515 if dirtyreason:
580 if dirtyreason:
516 raise error.Abort(dirtyreason, hint=hint)
581 raise error.Abort(dirtyreason, hint=hint)
517
582
518 def basestate(self):
583 def basestate(self):
519 """current working directory base state, disregarding .hgsubstate
584 """current working directory base state, disregarding .hgsubstate
520 state and working directory modifications"""
585 state and working directory modifications"""
521 raise NotImplementedError
586 raise NotImplementedError
522
587
523 def checknested(self, path):
588 def checknested(self, path):
524 """check if path is a subrepository within this repository"""
589 """check if path is a subrepository within this repository"""
525 return False
590 return False
526
591
527 def commit(self, text, user, date):
592 def commit(self, text, user, date):
528 """commit the current changes to the subrepo with the given
593 """commit the current changes to the subrepo with the given
529 log message. Use given user and date if possible. Return the
594 log message. Use given user and date if possible. Return the
530 new state of the subrepo.
595 new state of the subrepo.
531 """
596 """
532 raise NotImplementedError
597 raise NotImplementedError
533
598
534 def phase(self, state):
599 def phase(self, state):
535 """returns phase of specified state in the subrepository.
600 """returns phase of specified state in the subrepository.
536 """
601 """
537 return phases.public
602 return phases.public
538
603
539 def remove(self):
604 def remove(self):
540 """remove the subrepo
605 """remove the subrepo
541
606
542 (should verify the dirstate is not dirty first)
607 (should verify the dirstate is not dirty first)
543 """
608 """
544 raise NotImplementedError
609 raise NotImplementedError
545
610
546 def get(self, state, overwrite=False):
611 def get(self, state, overwrite=False):
547 """run whatever commands are needed to put the subrepo into
612 """run whatever commands are needed to put the subrepo into
548 this state
613 this state
549 """
614 """
550 raise NotImplementedError
615 raise NotImplementedError
551
616
552 def merge(self, state):
617 def merge(self, state):
553 """merge currently-saved state with the new state."""
618 """merge currently-saved state with the new state."""
554 raise NotImplementedError
619 raise NotImplementedError
555
620
556 def push(self, opts):
621 def push(self, opts):
557 """perform whatever action is analogous to 'hg push'
622 """perform whatever action is analogous to 'hg push'
558
623
559 This may be a no-op on some systems.
624 This may be a no-op on some systems.
560 """
625 """
561 raise NotImplementedError
626 raise NotImplementedError
562
627
563 def add(self, ui, match, prefix, explicitonly, **opts):
628 def add(self, ui, match, prefix, explicitonly, **opts):
564 return []
629 return []
565
630
566 def addremove(self, matcher, prefix, opts, dry_run, similarity):
631 def addremove(self, matcher, prefix, opts, dry_run, similarity):
567 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
632 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
568 return 1
633 return 1
569
634
570 def cat(self, match, fm, fntemplate, prefix, **opts):
635 def cat(self, match, fm, fntemplate, prefix, **opts):
571 return 1
636 return 1
572
637
573 def status(self, rev2, **opts):
638 def status(self, rev2, **opts):
574 return scmutil.status([], [], [], [], [], [], [])
639 return scmutil.status([], [], [], [], [], [], [])
575
640
576 def diff(self, ui, diffopts, node2, match, prefix, **opts):
641 def diff(self, ui, diffopts, node2, match, prefix, **opts):
577 pass
642 pass
578
643
579 def outgoing(self, ui, dest, opts):
644 def outgoing(self, ui, dest, opts):
580 return 1
645 return 1
581
646
582 def incoming(self, ui, source, opts):
647 def incoming(self, ui, source, opts):
583 return 1
648 return 1
584
649
585 def files(self):
650 def files(self):
586 """return filename iterator"""
651 """return filename iterator"""
587 raise NotImplementedError
652 raise NotImplementedError
588
653
589 def filedata(self, name, decode):
654 def filedata(self, name, decode):
590 """return file data, optionally passed through repo decoders"""
655 """return file data, optionally passed through repo decoders"""
591 raise NotImplementedError
656 raise NotImplementedError
592
657
593 def fileflags(self, name):
658 def fileflags(self, name):
594 """return file flags"""
659 """return file flags"""
595 return ''
660 return ''
596
661
597 def getfileset(self, expr):
662 def getfileset(self, expr):
598 """Resolve the fileset expression for this repo"""
663 """Resolve the fileset expression for this repo"""
599 return set()
664 return set()
600
665
601 def printfiles(self, ui, m, fm, fmt, subrepos):
666 def printfiles(self, ui, m, fm, fmt, subrepos):
602 """handle the files command for this subrepo"""
667 """handle the files command for this subrepo"""
603 return 1
668 return 1
604
669
605 def archive(self, archiver, prefix, match=None, decode=True):
670 def archive(self, archiver, prefix, match=None, decode=True):
606 if match is not None:
671 if match is not None:
607 files = [f for f in self.files() if match(f)]
672 files = [f for f in self.files() if match(f)]
608 else:
673 else:
609 files = self.files()
674 files = self.files()
610 total = len(files)
675 total = len(files)
611 relpath = subrelpath(self)
676 relpath = subrelpath(self)
612 self.ui.progress(_('archiving (%s)') % relpath, 0,
677 self.ui.progress(_('archiving (%s)') % relpath, 0,
613 unit=_('files'), total=total)
678 unit=_('files'), total=total)
614 for i, name in enumerate(files):
679 for i, name in enumerate(files):
615 flags = self.fileflags(name)
680 flags = self.fileflags(name)
616 mode = 'x' in flags and 0o755 or 0o644
681 mode = 'x' in flags and 0o755 or 0o644
617 symlink = 'l' in flags
682 symlink = 'l' in flags
618 archiver.addfile(prefix + self._path + '/' + name,
683 archiver.addfile(prefix + self._path + '/' + name,
619 mode, symlink, self.filedata(name, decode))
684 mode, symlink, self.filedata(name, decode))
620 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
685 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
621 unit=_('files'), total=total)
686 unit=_('files'), total=total)
622 self.ui.progress(_('archiving (%s)') % relpath, None)
687 self.ui.progress(_('archiving (%s)') % relpath, None)
623 return total
688 return total
624
689
625 def walk(self, match):
690 def walk(self, match):
626 '''
691 '''
627 walk recursively through the directory tree, finding all files
692 walk recursively through the directory tree, finding all files
628 matched by the match function
693 matched by the match function
629 '''
694 '''
630
695
631 def forget(self, match, prefix):
696 def forget(self, match, prefix):
632 return ([], [])
697 return ([], [])
633
698
634 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
699 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
635 """remove the matched files from the subrepository and the filesystem,
700 """remove the matched files from the subrepository and the filesystem,
636 possibly by force and/or after the file has been removed from the
701 possibly by force and/or after the file has been removed from the
637 filesystem. Return 0 on success, 1 on any warning.
702 filesystem. Return 0 on success, 1 on any warning.
638 """
703 """
639 warnings.append(_("warning: removefiles not implemented (%s)")
704 warnings.append(_("warning: removefiles not implemented (%s)")
640 % self._path)
705 % self._path)
641 return 1
706 return 1
642
707
643 def revert(self, substate, *pats, **opts):
708 def revert(self, substate, *pats, **opts):
644 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
709 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
645 % (substate[0], substate[2]))
710 % (substate[0], substate[2]))
646 return []
711 return []
647
712
648 def shortid(self, revid):
713 def shortid(self, revid):
649 return revid
714 return revid
650
715
651 def unshare(self):
716 def unshare(self):
652 '''
717 '''
653 convert this repository from shared to normal storage.
718 convert this repository from shared to normal storage.
654 '''
719 '''
655
720
656 def verify(self):
721 def verify(self):
657 '''verify the integrity of the repository. Return 0 on success or
722 '''verify the integrity of the repository. Return 0 on success or
658 warning, 1 on any error.
723 warning, 1 on any error.
659 '''
724 '''
660 return 0
725 return 0
661
726
662 @propertycache
727 @propertycache
663 def wvfs(self):
728 def wvfs(self):
664 """return vfs to access the working directory of this subrepository
729 """return vfs to access the working directory of this subrepository
665 """
730 """
666 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
731 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
667
732
668 @propertycache
733 @propertycache
669 def _relpath(self):
734 def _relpath(self):
670 """return path to this subrepository as seen from outermost repository
735 """return path to this subrepository as seen from outermost repository
671 """
736 """
672 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
737 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
673
738
674 class hgsubrepo(abstractsubrepo):
739 class hgsubrepo(abstractsubrepo):
675 def __init__(self, ctx, path, state, allowcreate):
740 def __init__(self, ctx, path, state, allowcreate):
676 super(hgsubrepo, self).__init__(ctx, path)
741 super(hgsubrepo, self).__init__(ctx, path)
677 self._state = state
742 self._state = state
678 r = ctx.repo()
743 r = ctx.repo()
679 root = r.wjoin(path)
744 root = r.wjoin(path)
680 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
745 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
681 self._repo = hg.repository(r.baseui, root, create=create)
746 self._repo = hg.repository(r.baseui, root, create=create)
682
747
683 # Propagate the parent's --hidden option
748 # Propagate the parent's --hidden option
684 if r is r.unfiltered():
749 if r is r.unfiltered():
685 self._repo = self._repo.unfiltered()
750 self._repo = self._repo.unfiltered()
686
751
687 self.ui = self._repo.ui
752 self.ui = self._repo.ui
688 for s, k in [('ui', 'commitsubrepos')]:
753 for s, k in [('ui', 'commitsubrepos')]:
689 v = r.ui.config(s, k)
754 v = r.ui.config(s, k)
690 if v:
755 if v:
691 self.ui.setconfig(s, k, v, 'subrepo')
756 self.ui.setconfig(s, k, v, 'subrepo')
692 # internal config: ui._usedassubrepo
757 # internal config: ui._usedassubrepo
693 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
758 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
694 self._initrepo(r, state[0], create)
759 self._initrepo(r, state[0], create)
695
760
696 @annotatesubrepoerror
761 @annotatesubrepoerror
697 def addwebdirpath(self, serverpath, webconf):
762 def addwebdirpath(self, serverpath, webconf):
698 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
763 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
699
764
700 def storeclean(self, path):
765 def storeclean(self, path):
701 with self._repo.lock():
766 with self._repo.lock():
702 return self._storeclean(path)
767 return self._storeclean(path)
703
768
704 def _storeclean(self, path):
769 def _storeclean(self, path):
705 clean = True
770 clean = True
706 itercache = self._calcstorehash(path)
771 itercache = self._calcstorehash(path)
707 for filehash in self._readstorehashcache(path):
772 for filehash in self._readstorehashcache(path):
708 if filehash != next(itercache, None):
773 if filehash != next(itercache, None):
709 clean = False
774 clean = False
710 break
775 break
711 if clean:
776 if clean:
712 # if not empty:
777 # if not empty:
713 # the cached and current pull states have a different size
778 # the cached and current pull states have a different size
714 clean = next(itercache, None) is None
779 clean = next(itercache, None) is None
715 return clean
780 return clean
716
781
717 def _calcstorehash(self, remotepath):
782 def _calcstorehash(self, remotepath):
718 '''calculate a unique "store hash"
783 '''calculate a unique "store hash"
719
784
720 This method is used to to detect when there are changes that may
785 This method is used to to detect when there are changes that may
721 require a push to a given remote path.'''
786 require a push to a given remote path.'''
722 # sort the files that will be hashed in increasing (likely) file size
787 # sort the files that will be hashed in increasing (likely) file size
723 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
788 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
724 yield '# %s\n' % _expandedabspath(remotepath)
789 yield '# %s\n' % _expandedabspath(remotepath)
725 vfs = self._repo.vfs
790 vfs = self._repo.vfs
726 for relname in filelist:
791 for relname in filelist:
727 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
792 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
728 yield '%s = %s\n' % (relname, filehash)
793 yield '%s = %s\n' % (relname, filehash)
729
794
730 @propertycache
795 @propertycache
731 def _cachestorehashvfs(self):
796 def _cachestorehashvfs(self):
732 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
797 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
733
798
734 def _readstorehashcache(self, remotepath):
799 def _readstorehashcache(self, remotepath):
735 '''read the store hash cache for a given remote repository'''
800 '''read the store hash cache for a given remote repository'''
736 cachefile = _getstorehashcachename(remotepath)
801 cachefile = _getstorehashcachename(remotepath)
737 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
802 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
738
803
739 def _cachestorehash(self, remotepath):
804 def _cachestorehash(self, remotepath):
740 '''cache the current store hash
805 '''cache the current store hash
741
806
742 Each remote repo requires its own store hash cache, because a subrepo
807 Each remote repo requires its own store hash cache, because a subrepo
743 store may be "clean" versus a given remote repo, but not versus another
808 store may be "clean" versus a given remote repo, but not versus another
744 '''
809 '''
745 cachefile = _getstorehashcachename(remotepath)
810 cachefile = _getstorehashcachename(remotepath)
746 with self._repo.lock():
811 with self._repo.lock():
747 storehash = list(self._calcstorehash(remotepath))
812 storehash = list(self._calcstorehash(remotepath))
748 vfs = self._cachestorehashvfs
813 vfs = self._cachestorehashvfs
749 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
814 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
750
815
751 def _getctx(self):
816 def _getctx(self):
752 '''fetch the context for this subrepo revision, possibly a workingctx
817 '''fetch the context for this subrepo revision, possibly a workingctx
753 '''
818 '''
754 if self._ctx.rev() is None:
819 if self._ctx.rev() is None:
755 return self._repo[None] # workingctx if parent is workingctx
820 return self._repo[None] # workingctx if parent is workingctx
756 else:
821 else:
757 rev = self._state[1]
822 rev = self._state[1]
758 return self._repo[rev]
823 return self._repo[rev]
759
824
760 @annotatesubrepoerror
825 @annotatesubrepoerror
761 def _initrepo(self, parentrepo, source, create):
826 def _initrepo(self, parentrepo, source, create):
762 self._repo._subparent = parentrepo
827 self._repo._subparent = parentrepo
763 self._repo._subsource = source
828 self._repo._subsource = source
764
829
765 if create:
830 if create:
766 lines = ['[paths]\n']
831 lines = ['[paths]\n']
767
832
768 def addpathconfig(key, value):
833 def addpathconfig(key, value):
769 if value:
834 if value:
770 lines.append('%s = %s\n' % (key, value))
835 lines.append('%s = %s\n' % (key, value))
771 self.ui.setconfig('paths', key, value, 'subrepo')
836 self.ui.setconfig('paths', key, value, 'subrepo')
772
837
773 defpath = _abssource(self._repo, abort=False)
838 defpath = _abssource(self._repo, abort=False)
774 defpushpath = _abssource(self._repo, True, abort=False)
839 defpushpath = _abssource(self._repo, True, abort=False)
775 addpathconfig('default', defpath)
840 addpathconfig('default', defpath)
776 if defpath != defpushpath:
841 if defpath != defpushpath:
777 addpathconfig('default-push', defpushpath)
842 addpathconfig('default-push', defpushpath)
778
843
779 fp = self._repo.vfs("hgrc", "w", text=True)
844 fp = self._repo.vfs("hgrc", "w", text=True)
780 try:
845 try:
781 fp.write(''.join(lines))
846 fp.write(''.join(lines))
782 finally:
847 finally:
783 fp.close()
848 fp.close()
784
849
785 @annotatesubrepoerror
850 @annotatesubrepoerror
786 def add(self, ui, match, prefix, explicitonly, **opts):
851 def add(self, ui, match, prefix, explicitonly, **opts):
787 return cmdutil.add(ui, self._repo, match,
852 return cmdutil.add(ui, self._repo, match,
788 self.wvfs.reljoin(prefix, self._path),
853 self.wvfs.reljoin(prefix, self._path),
789 explicitonly, **opts)
854 explicitonly, **opts)
790
855
791 @annotatesubrepoerror
856 @annotatesubrepoerror
792 def addremove(self, m, prefix, opts, dry_run, similarity):
857 def addremove(self, m, prefix, opts, dry_run, similarity):
793 # In the same way as sub directories are processed, once in a subrepo,
858 # In the same way as sub directories are processed, once in a subrepo,
794 # always entry any of its subrepos. Don't corrupt the options that will
859 # always entry any of its subrepos. Don't corrupt the options that will
795 # be used to process sibling subrepos however.
860 # be used to process sibling subrepos however.
796 opts = copy.copy(opts)
861 opts = copy.copy(opts)
797 opts['subrepos'] = True
862 opts['subrepos'] = True
798 return scmutil.addremove(self._repo, m,
863 return scmutil.addremove(self._repo, m,
799 self.wvfs.reljoin(prefix, self._path), opts,
864 self.wvfs.reljoin(prefix, self._path), opts,
800 dry_run, similarity)
865 dry_run, similarity)
801
866
802 @annotatesubrepoerror
867 @annotatesubrepoerror
803 def cat(self, match, fm, fntemplate, prefix, **opts):
868 def cat(self, match, fm, fntemplate, prefix, **opts):
804 rev = self._state[1]
869 rev = self._state[1]
805 ctx = self._repo[rev]
870 ctx = self._repo[rev]
806 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
871 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
807 prefix, **opts)
872 prefix, **opts)
808
873
809 @annotatesubrepoerror
874 @annotatesubrepoerror
810 def status(self, rev2, **opts):
875 def status(self, rev2, **opts):
811 try:
876 try:
812 rev1 = self._state[1]
877 rev1 = self._state[1]
813 ctx1 = self._repo[rev1]
878 ctx1 = self._repo[rev1]
814 ctx2 = self._repo[rev2]
879 ctx2 = self._repo[rev2]
815 return self._repo.status(ctx1, ctx2, **opts)
880 return self._repo.status(ctx1, ctx2, **opts)
816 except error.RepoLookupError as inst:
881 except error.RepoLookupError as inst:
817 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
882 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
818 % (inst, subrelpath(self)))
883 % (inst, subrelpath(self)))
819 return scmutil.status([], [], [], [], [], [], [])
884 return scmutil.status([], [], [], [], [], [], [])
820
885
821 @annotatesubrepoerror
886 @annotatesubrepoerror
822 def diff(self, ui, diffopts, node2, match, prefix, **opts):
887 def diff(self, ui, diffopts, node2, match, prefix, **opts):
823 try:
888 try:
824 node1 = node.bin(self._state[1])
889 node1 = node.bin(self._state[1])
825 # We currently expect node2 to come from substate and be
890 # We currently expect node2 to come from substate and be
826 # in hex format
891 # in hex format
827 if node2 is not None:
892 if node2 is not None:
828 node2 = node.bin(node2)
893 node2 = node.bin(node2)
829 cmdutil.diffordiffstat(ui, self._repo, diffopts,
894 cmdutil.diffordiffstat(ui, self._repo, diffopts,
830 node1, node2, match,
895 node1, node2, match,
831 prefix=posixpath.join(prefix, self._path),
896 prefix=posixpath.join(prefix, self._path),
832 listsubrepos=True, **opts)
897 listsubrepos=True, **opts)
833 except error.RepoLookupError as inst:
898 except error.RepoLookupError as inst:
834 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
899 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
835 % (inst, subrelpath(self)))
900 % (inst, subrelpath(self)))
836
901
837 @annotatesubrepoerror
902 @annotatesubrepoerror
838 def archive(self, archiver, prefix, match=None, decode=True):
903 def archive(self, archiver, prefix, match=None, decode=True):
839 self._get(self._state + ('hg',))
904 self._get(self._state + ('hg',))
840 total = abstractsubrepo.archive(self, archiver, prefix, match)
905 total = abstractsubrepo.archive(self, archiver, prefix, match)
841 rev = self._state[1]
906 rev = self._state[1]
842 ctx = self._repo[rev]
907 ctx = self._repo[rev]
843 for subpath in ctx.substate:
908 for subpath in ctx.substate:
844 s = subrepo(ctx, subpath, True)
909 s = subrepo(ctx, subpath, True)
845 submatch = matchmod.subdirmatcher(subpath, match)
910 submatch = matchmod.subdirmatcher(subpath, match)
846 total += s.archive(archiver, prefix + self._path + '/', submatch,
911 total += s.archive(archiver, prefix + self._path + '/', submatch,
847 decode)
912 decode)
848 return total
913 return total
849
914
850 @annotatesubrepoerror
915 @annotatesubrepoerror
851 def dirty(self, ignoreupdate=False, missing=False):
916 def dirty(self, ignoreupdate=False, missing=False):
852 r = self._state[1]
917 r = self._state[1]
853 if r == '' and not ignoreupdate: # no state recorded
918 if r == '' and not ignoreupdate: # no state recorded
854 return True
919 return True
855 w = self._repo[None]
920 w = self._repo[None]
856 if r != w.p1().hex() and not ignoreupdate:
921 if r != w.p1().hex() and not ignoreupdate:
857 # different version checked out
922 # different version checked out
858 return True
923 return True
859 return w.dirty(missing=missing) # working directory changed
924 return w.dirty(missing=missing) # working directory changed
860
925
861 def basestate(self):
926 def basestate(self):
862 return self._repo['.'].hex()
927 return self._repo['.'].hex()
863
928
864 def checknested(self, path):
929 def checknested(self, path):
865 return self._repo._checknested(self._repo.wjoin(path))
930 return self._repo._checknested(self._repo.wjoin(path))
866
931
867 @annotatesubrepoerror
932 @annotatesubrepoerror
868 def commit(self, text, user, date):
933 def commit(self, text, user, date):
869 # don't bother committing in the subrepo if it's only been
934 # don't bother committing in the subrepo if it's only been
870 # updated
935 # updated
871 if not self.dirty(True):
936 if not self.dirty(True):
872 return self._repo['.'].hex()
937 return self._repo['.'].hex()
873 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
938 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
874 n = self._repo.commit(text, user, date)
939 n = self._repo.commit(text, user, date)
875 if not n:
940 if not n:
876 return self._repo['.'].hex() # different version checked out
941 return self._repo['.'].hex() # different version checked out
877 return node.hex(n)
942 return node.hex(n)
878
943
879 @annotatesubrepoerror
944 @annotatesubrepoerror
880 def phase(self, state):
945 def phase(self, state):
881 return self._repo[state].phase()
946 return self._repo[state].phase()
882
947
883 @annotatesubrepoerror
948 @annotatesubrepoerror
884 def remove(self):
949 def remove(self):
885 # we can't fully delete the repository as it may contain
950 # we can't fully delete the repository as it may contain
886 # local-only history
951 # local-only history
887 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
952 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
888 hg.clean(self._repo, node.nullid, False)
953 hg.clean(self._repo, node.nullid, False)
889
954
890 def _get(self, state):
955 def _get(self, state):
891 source, revision, kind = state
956 source, revision, kind = state
892 parentrepo = self._repo._subparent
957 parentrepo = self._repo._subparent
893
958
894 if revision in self._repo.unfiltered():
959 if revision in self._repo.unfiltered():
895 # Allow shared subrepos tracked at null to setup the sharedpath
960 # Allow shared subrepos tracked at null to setup the sharedpath
896 if len(self._repo) != 0 or not parentrepo.shared():
961 if len(self._repo) != 0 or not parentrepo.shared():
897 return True
962 return True
898 self._repo._subsource = source
963 self._repo._subsource = source
899 srcurl = _abssource(self._repo)
964 srcurl = _abssource(self._repo)
900 other = hg.peer(self._repo, {}, srcurl)
965 other = hg.peer(self._repo, {}, srcurl)
901 if len(self._repo) == 0:
966 if len(self._repo) == 0:
902 # use self._repo.vfs instead of self.wvfs to remove .hg only
967 # use self._repo.vfs instead of self.wvfs to remove .hg only
903 self._repo.vfs.rmtree()
968 self._repo.vfs.rmtree()
904 if parentrepo.shared():
969 if parentrepo.shared():
905 self.ui.status(_('sharing subrepo %s from %s\n')
970 self.ui.status(_('sharing subrepo %s from %s\n')
906 % (subrelpath(self), srcurl))
971 % (subrelpath(self), srcurl))
907 shared = hg.share(self._repo._subparent.baseui,
972 shared = hg.share(self._repo._subparent.baseui,
908 other, self._repo.root,
973 other, self._repo.root,
909 update=False, bookmarks=False)
974 update=False, bookmarks=False)
910 self._repo = shared.local()
975 self._repo = shared.local()
911 else:
976 else:
912 self.ui.status(_('cloning subrepo %s from %s\n')
977 self.ui.status(_('cloning subrepo %s from %s\n')
913 % (subrelpath(self), srcurl))
978 % (subrelpath(self), srcurl))
914 other, cloned = hg.clone(self._repo._subparent.baseui, {},
979 other, cloned = hg.clone(self._repo._subparent.baseui, {},
915 other, self._repo.root,
980 other, self._repo.root,
916 update=False)
981 update=False)
917 self._repo = cloned.local()
982 self._repo = cloned.local()
918 self._initrepo(parentrepo, source, create=True)
983 self._initrepo(parentrepo, source, create=True)
919 self._cachestorehash(srcurl)
984 self._cachestorehash(srcurl)
920 else:
985 else:
921 self.ui.status(_('pulling subrepo %s from %s\n')
986 self.ui.status(_('pulling subrepo %s from %s\n')
922 % (subrelpath(self), srcurl))
987 % (subrelpath(self), srcurl))
923 cleansub = self.storeclean(srcurl)
988 cleansub = self.storeclean(srcurl)
924 exchange.pull(self._repo, other)
989 exchange.pull(self._repo, other)
925 if cleansub:
990 if cleansub:
926 # keep the repo clean after pull
991 # keep the repo clean after pull
927 self._cachestorehash(srcurl)
992 self._cachestorehash(srcurl)
928 return False
993 return False
929
994
930 @annotatesubrepoerror
995 @annotatesubrepoerror
931 def get(self, state, overwrite=False):
996 def get(self, state, overwrite=False):
932 inrepo = self._get(state)
997 inrepo = self._get(state)
933 source, revision, kind = state
998 source, revision, kind = state
934 repo = self._repo
999 repo = self._repo
935 repo.ui.debug("getting subrepo %s\n" % self._path)
1000 repo.ui.debug("getting subrepo %s\n" % self._path)
936 if inrepo:
1001 if inrepo:
937 urepo = repo.unfiltered()
1002 urepo = repo.unfiltered()
938 ctx = urepo[revision]
1003 ctx = urepo[revision]
939 if ctx.hidden():
1004 if ctx.hidden():
940 urepo.ui.warn(
1005 urepo.ui.warn(
941 _('revision %s in subrepository "%s" is hidden\n') \
1006 _('revision %s in subrepository "%s" is hidden\n') \
942 % (revision[0:12], self._path))
1007 % (revision[0:12], self._path))
943 repo = urepo
1008 repo = urepo
944 hg.updaterepo(repo, revision, overwrite)
1009 hg.updaterepo(repo, revision, overwrite)
945
1010
946 @annotatesubrepoerror
1011 @annotatesubrepoerror
947 def merge(self, state):
1012 def merge(self, state):
948 self._get(state)
1013 self._get(state)
949 cur = self._repo['.']
1014 cur = self._repo['.']
950 dst = self._repo[state[1]]
1015 dst = self._repo[state[1]]
951 anc = dst.ancestor(cur)
1016 anc = dst.ancestor(cur)
952
1017
953 def mergefunc():
1018 def mergefunc():
954 if anc == cur and dst.branch() == cur.branch():
1019 if anc == cur and dst.branch() == cur.branch():
955 self.ui.debug('updating subrepository "%s"\n'
1020 self.ui.debug('updating subrepository "%s"\n'
956 % subrelpath(self))
1021 % subrelpath(self))
957 hg.update(self._repo, state[1])
1022 hg.update(self._repo, state[1])
958 elif anc == dst:
1023 elif anc == dst:
959 self.ui.debug('skipping subrepository "%s"\n'
1024 self.ui.debug('skipping subrepository "%s"\n'
960 % subrelpath(self))
1025 % subrelpath(self))
961 else:
1026 else:
962 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
1027 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
963 hg.merge(self._repo, state[1], remind=False)
1028 hg.merge(self._repo, state[1], remind=False)
964
1029
965 wctx = self._repo[None]
1030 wctx = self._repo[None]
966 if self.dirty():
1031 if self.dirty():
967 if anc != dst:
1032 if anc != dst:
968 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
1033 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
969 mergefunc()
1034 mergefunc()
970 else:
1035 else:
971 mergefunc()
1036 mergefunc()
972 else:
1037 else:
973 mergefunc()
1038 mergefunc()
974
1039
975 @annotatesubrepoerror
1040 @annotatesubrepoerror
976 def push(self, opts):
1041 def push(self, opts):
977 force = opts.get('force')
1042 force = opts.get('force')
978 newbranch = opts.get('new_branch')
1043 newbranch = opts.get('new_branch')
979 ssh = opts.get('ssh')
1044 ssh = opts.get('ssh')
980
1045
981 # push subrepos depth-first for coherent ordering
1046 # push subrepos depth-first for coherent ordering
982 c = self._repo['']
1047 c = self._repo['']
983 subs = c.substate # only repos that are committed
1048 subs = c.substate # only repos that are committed
984 for s in sorted(subs):
1049 for s in sorted(subs):
985 if c.sub(s).push(opts) == 0:
1050 if c.sub(s).push(opts) == 0:
986 return False
1051 return False
987
1052
988 dsturl = _abssource(self._repo, True)
1053 dsturl = _abssource(self._repo, True)
989 if not force:
1054 if not force:
990 if self.storeclean(dsturl):
1055 if self.storeclean(dsturl):
991 self.ui.status(
1056 self.ui.status(
992 _('no changes made to subrepo %s since last push to %s\n')
1057 _('no changes made to subrepo %s since last push to %s\n')
993 % (subrelpath(self), dsturl))
1058 % (subrelpath(self), dsturl))
994 return None
1059 return None
995 self.ui.status(_('pushing subrepo %s to %s\n') %
1060 self.ui.status(_('pushing subrepo %s to %s\n') %
996 (subrelpath(self), dsturl))
1061 (subrelpath(self), dsturl))
997 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
1062 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
998 res = exchange.push(self._repo, other, force, newbranch=newbranch)
1063 res = exchange.push(self._repo, other, force, newbranch=newbranch)
999
1064
1000 # the repo is now clean
1065 # the repo is now clean
1001 self._cachestorehash(dsturl)
1066 self._cachestorehash(dsturl)
1002 return res.cgresult
1067 return res.cgresult
1003
1068
1004 @annotatesubrepoerror
1069 @annotatesubrepoerror
1005 def outgoing(self, ui, dest, opts):
1070 def outgoing(self, ui, dest, opts):
1006 if 'rev' in opts or 'branch' in opts:
1071 if 'rev' in opts or 'branch' in opts:
1007 opts = copy.copy(opts)
1072 opts = copy.copy(opts)
1008 opts.pop('rev', None)
1073 opts.pop('rev', None)
1009 opts.pop('branch', None)
1074 opts.pop('branch', None)
1010 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
1075 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
1011
1076
1012 @annotatesubrepoerror
1077 @annotatesubrepoerror
1013 def incoming(self, ui, source, opts):
1078 def incoming(self, ui, source, opts):
1014 if 'rev' in opts or 'branch' in opts:
1079 if 'rev' in opts or 'branch' in opts:
1015 opts = copy.copy(opts)
1080 opts = copy.copy(opts)
1016 opts.pop('rev', None)
1081 opts.pop('rev', None)
1017 opts.pop('branch', None)
1082 opts.pop('branch', None)
1018 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
1083 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
1019
1084
1020 @annotatesubrepoerror
1085 @annotatesubrepoerror
1021 def files(self):
1086 def files(self):
1022 rev = self._state[1]
1087 rev = self._state[1]
1023 ctx = self._repo[rev]
1088 ctx = self._repo[rev]
1024 return ctx.manifest().keys()
1089 return ctx.manifest().keys()
1025
1090
1026 def filedata(self, name, decode):
1091 def filedata(self, name, decode):
1027 rev = self._state[1]
1092 rev = self._state[1]
1028 data = self._repo[rev][name].data()
1093 data = self._repo[rev][name].data()
1029 if decode:
1094 if decode:
1030 data = self._repo.wwritedata(name, data)
1095 data = self._repo.wwritedata(name, data)
1031 return data
1096 return data
1032
1097
1033 def fileflags(self, name):
1098 def fileflags(self, name):
1034 rev = self._state[1]
1099 rev = self._state[1]
1035 ctx = self._repo[rev]
1100 ctx = self._repo[rev]
1036 return ctx.flags(name)
1101 return ctx.flags(name)
1037
1102
1038 @annotatesubrepoerror
1103 @annotatesubrepoerror
1039 def printfiles(self, ui, m, fm, fmt, subrepos):
1104 def printfiles(self, ui, m, fm, fmt, subrepos):
1040 # If the parent context is a workingctx, use the workingctx here for
1105 # If the parent context is a workingctx, use the workingctx here for
1041 # consistency.
1106 # consistency.
1042 if self._ctx.rev() is None:
1107 if self._ctx.rev() is None:
1043 ctx = self._repo[None]
1108 ctx = self._repo[None]
1044 else:
1109 else:
1045 rev = self._state[1]
1110 rev = self._state[1]
1046 ctx = self._repo[rev]
1111 ctx = self._repo[rev]
1047 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1112 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1048
1113
1049 @annotatesubrepoerror
1114 @annotatesubrepoerror
1050 def getfileset(self, expr):
1115 def getfileset(self, expr):
1051 if self._ctx.rev() is None:
1116 if self._ctx.rev() is None:
1052 ctx = self._repo[None]
1117 ctx = self._repo[None]
1053 else:
1118 else:
1054 rev = self._state[1]
1119 rev = self._state[1]
1055 ctx = self._repo[rev]
1120 ctx = self._repo[rev]
1056
1121
1057 files = ctx.getfileset(expr)
1122 files = ctx.getfileset(expr)
1058
1123
1059 for subpath in ctx.substate:
1124 for subpath in ctx.substate:
1060 sub = ctx.sub(subpath)
1125 sub = ctx.sub(subpath)
1061
1126
1062 try:
1127 try:
1063 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1128 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1064 except error.LookupError:
1129 except error.LookupError:
1065 self.ui.status(_("skipping missing subrepository: %s\n")
1130 self.ui.status(_("skipping missing subrepository: %s\n")
1066 % self.wvfs.reljoin(reporelpath(self), subpath))
1131 % self.wvfs.reljoin(reporelpath(self), subpath))
1067 return files
1132 return files
1068
1133
1069 def walk(self, match):
1134 def walk(self, match):
1070 ctx = self._repo[None]
1135 ctx = self._repo[None]
1071 return ctx.walk(match)
1136 return ctx.walk(match)
1072
1137
1073 @annotatesubrepoerror
1138 @annotatesubrepoerror
1074 def forget(self, match, prefix):
1139 def forget(self, match, prefix):
1075 return cmdutil.forget(self.ui, self._repo, match,
1140 return cmdutil.forget(self.ui, self._repo, match,
1076 self.wvfs.reljoin(prefix, self._path), True)
1141 self.wvfs.reljoin(prefix, self._path), True)
1077
1142
1078 @annotatesubrepoerror
1143 @annotatesubrepoerror
1079 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1144 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1080 return cmdutil.remove(self.ui, self._repo, matcher,
1145 return cmdutil.remove(self.ui, self._repo, matcher,
1081 self.wvfs.reljoin(prefix, self._path),
1146 self.wvfs.reljoin(prefix, self._path),
1082 after, force, subrepos)
1147 after, force, subrepos)
1083
1148
1084 @annotatesubrepoerror
1149 @annotatesubrepoerror
1085 def revert(self, substate, *pats, **opts):
1150 def revert(self, substate, *pats, **opts):
1086 # reverting a subrepo is a 2 step process:
1151 # reverting a subrepo is a 2 step process:
1087 # 1. if the no_backup is not set, revert all modified
1152 # 1. if the no_backup is not set, revert all modified
1088 # files inside the subrepo
1153 # files inside the subrepo
1089 # 2. update the subrepo to the revision specified in
1154 # 2. update the subrepo to the revision specified in
1090 # the corresponding substate dictionary
1155 # the corresponding substate dictionary
1091 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1156 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1092 if not opts.get('no_backup'):
1157 if not opts.get('no_backup'):
1093 # Revert all files on the subrepo, creating backups
1158 # Revert all files on the subrepo, creating backups
1094 # Note that this will not recursively revert subrepos
1159 # Note that this will not recursively revert subrepos
1095 # We could do it if there was a set:subrepos() predicate
1160 # We could do it if there was a set:subrepos() predicate
1096 opts = opts.copy()
1161 opts = opts.copy()
1097 opts['date'] = None
1162 opts['date'] = None
1098 opts['rev'] = substate[1]
1163 opts['rev'] = substate[1]
1099
1164
1100 self.filerevert(*pats, **opts)
1165 self.filerevert(*pats, **opts)
1101
1166
1102 # Update the repo to the revision specified in the given substate
1167 # Update the repo to the revision specified in the given substate
1103 if not opts.get('dry_run'):
1168 if not opts.get('dry_run'):
1104 self.get(substate, overwrite=True)
1169 self.get(substate, overwrite=True)
1105
1170
1106 def filerevert(self, *pats, **opts):
1171 def filerevert(self, *pats, **opts):
1107 ctx = self._repo[opts['rev']]
1172 ctx = self._repo[opts['rev']]
1108 parents = self._repo.dirstate.parents()
1173 parents = self._repo.dirstate.parents()
1109 if opts.get('all'):
1174 if opts.get('all'):
1110 pats = ['set:modified()']
1175 pats = ['set:modified()']
1111 else:
1176 else:
1112 pats = []
1177 pats = []
1113 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1178 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1114
1179
1115 def shortid(self, revid):
1180 def shortid(self, revid):
1116 return revid[:12]
1181 return revid[:12]
1117
1182
1118 @annotatesubrepoerror
1183 @annotatesubrepoerror
1119 def unshare(self):
1184 def unshare(self):
1120 # subrepo inherently violates our import layering rules
1185 # subrepo inherently violates our import layering rules
1121 # because it wants to make repo objects from deep inside the stack
1186 # because it wants to make repo objects from deep inside the stack
1122 # so we manually delay the circular imports to not break
1187 # so we manually delay the circular imports to not break
1123 # scripts that don't use our demand-loading
1188 # scripts that don't use our demand-loading
1124 global hg
1189 global hg
1125 from . import hg as h
1190 from . import hg as h
1126 hg = h
1191 hg = h
1127
1192
1128 # Nothing prevents a user from sharing in a repo, and then making that a
1193 # Nothing prevents a user from sharing in a repo, and then making that a
1129 # subrepo. Alternately, the previous unshare attempt may have failed
1194 # subrepo. Alternately, the previous unshare attempt may have failed
1130 # part way through. So recurse whether or not this layer is shared.
1195 # part way through. So recurse whether or not this layer is shared.
1131 if self._repo.shared():
1196 if self._repo.shared():
1132 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
1197 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
1133
1198
1134 hg.unshare(self.ui, self._repo)
1199 hg.unshare(self.ui, self._repo)
1135
1200
1136 def verify(self):
1201 def verify(self):
1137 try:
1202 try:
1138 rev = self._state[1]
1203 rev = self._state[1]
1139 ctx = self._repo.unfiltered()[rev]
1204 ctx = self._repo.unfiltered()[rev]
1140 if ctx.hidden():
1205 if ctx.hidden():
1141 # Since hidden revisions aren't pushed/pulled, it seems worth an
1206 # Since hidden revisions aren't pushed/pulled, it seems worth an
1142 # explicit warning.
1207 # explicit warning.
1143 ui = self._repo.ui
1208 ui = self._repo.ui
1144 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1209 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1145 (self._relpath, node.short(self._ctx.node())))
1210 (self._relpath, node.short(self._ctx.node())))
1146 return 0
1211 return 0
1147 except error.RepoLookupError:
1212 except error.RepoLookupError:
1148 # A missing subrepo revision may be a case of needing to pull it, so
1213 # A missing subrepo revision may be a case of needing to pull it, so
1149 # don't treat this as an error.
1214 # don't treat this as an error.
1150 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1215 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1151 (self._relpath, node.short(self._ctx.node())))
1216 (self._relpath, node.short(self._ctx.node())))
1152 return 0
1217 return 0
1153
1218
1154 @propertycache
1219 @propertycache
1155 def wvfs(self):
1220 def wvfs(self):
1156 """return own wvfs for efficiency and consistency
1221 """return own wvfs for efficiency and consistency
1157 """
1222 """
1158 return self._repo.wvfs
1223 return self._repo.wvfs
1159
1224
1160 @propertycache
1225 @propertycache
1161 def _relpath(self):
1226 def _relpath(self):
1162 """return path to this subrepository as seen from outermost repository
1227 """return path to this subrepository as seen from outermost repository
1163 """
1228 """
1164 # Keep consistent dir separators by avoiding vfs.join(self._path)
1229 # Keep consistent dir separators by avoiding vfs.join(self._path)
1165 return reporelpath(self._repo)
1230 return reporelpath(self._repo)
1166
1231
1167 class svnsubrepo(abstractsubrepo):
1232 class svnsubrepo(abstractsubrepo):
1168 def __init__(self, ctx, path, state, allowcreate):
1233 def __init__(self, ctx, path, state, allowcreate):
1169 super(svnsubrepo, self).__init__(ctx, path)
1234 super(svnsubrepo, self).__init__(ctx, path)
1170 self._state = state
1235 self._state = state
1171 self._exe = util.findexe('svn')
1236 self._exe = util.findexe('svn')
1172 if not self._exe:
1237 if not self._exe:
1173 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1238 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1174 % self._path)
1239 % self._path)
1175
1240
1176 def _svncommand(self, commands, filename='', failok=False):
1241 def _svncommand(self, commands, filename='', failok=False):
1177 cmd = [self._exe]
1242 cmd = [self._exe]
1178 extrakw = {}
1243 extrakw = {}
1179 if not self.ui.interactive():
1244 if not self.ui.interactive():
1180 # Making stdin be a pipe should prevent svn from behaving
1245 # Making stdin be a pipe should prevent svn from behaving
1181 # interactively even if we can't pass --non-interactive.
1246 # interactively even if we can't pass --non-interactive.
1182 extrakw['stdin'] = subprocess.PIPE
1247 extrakw['stdin'] = subprocess.PIPE
1183 # Starting in svn 1.5 --non-interactive is a global flag
1248 # Starting in svn 1.5 --non-interactive is a global flag
1184 # instead of being per-command, but we need to support 1.4 so
1249 # instead of being per-command, but we need to support 1.4 so
1185 # we have to be intelligent about what commands take
1250 # we have to be intelligent about what commands take
1186 # --non-interactive.
1251 # --non-interactive.
1187 if commands[0] in ('update', 'checkout', 'commit'):
1252 if commands[0] in ('update', 'checkout', 'commit'):
1188 cmd.append('--non-interactive')
1253 cmd.append('--non-interactive')
1189 cmd.extend(commands)
1254 cmd.extend(commands)
1190 if filename is not None:
1255 if filename is not None:
1191 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1256 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1192 self._path, filename)
1257 self._path, filename)
1193 cmd.append(path)
1258 cmd.append(path)
1194 env = dict(encoding.environ)
1259 env = dict(encoding.environ)
1195 # Avoid localized output, preserve current locale for everything else.
1260 # Avoid localized output, preserve current locale for everything else.
1196 lc_all = env.get('LC_ALL')
1261 lc_all = env.get('LC_ALL')
1197 if lc_all:
1262 if lc_all:
1198 env['LANG'] = lc_all
1263 env['LANG'] = lc_all
1199 del env['LC_ALL']
1264 del env['LC_ALL']
1200 env['LC_MESSAGES'] = 'C'
1265 env['LC_MESSAGES'] = 'C'
1201 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1266 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1202 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1267 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1203 universal_newlines=True, env=env, **extrakw)
1268 universal_newlines=True, env=env, **extrakw)
1204 stdout, stderr = p.communicate()
1269 stdout, stderr = p.communicate()
1205 stderr = stderr.strip()
1270 stderr = stderr.strip()
1206 if not failok:
1271 if not failok:
1207 if p.returncode:
1272 if p.returncode:
1208 raise error.Abort(stderr or 'exited with code %d'
1273 raise error.Abort(stderr or 'exited with code %d'
1209 % p.returncode)
1274 % p.returncode)
1210 if stderr:
1275 if stderr:
1211 self.ui.warn(stderr + '\n')
1276 self.ui.warn(stderr + '\n')
1212 return stdout, stderr
1277 return stdout, stderr
1213
1278
1214 @propertycache
1279 @propertycache
1215 def _svnversion(self):
1280 def _svnversion(self):
1216 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1281 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1217 m = re.search(br'^(\d+)\.(\d+)', output)
1282 m = re.search(br'^(\d+)\.(\d+)', output)
1218 if not m:
1283 if not m:
1219 raise error.Abort(_('cannot retrieve svn tool version'))
1284 raise error.Abort(_('cannot retrieve svn tool version'))
1220 return (int(m.group(1)), int(m.group(2)))
1285 return (int(m.group(1)), int(m.group(2)))
1221
1286
1222 def _wcrevs(self):
1287 def _wcrevs(self):
1223 # Get the working directory revision as well as the last
1288 # Get the working directory revision as well as the last
1224 # commit revision so we can compare the subrepo state with
1289 # commit revision so we can compare the subrepo state with
1225 # both. We used to store the working directory one.
1290 # both. We used to store the working directory one.
1226 output, err = self._svncommand(['info', '--xml'])
1291 output, err = self._svncommand(['info', '--xml'])
1227 doc = xml.dom.minidom.parseString(output)
1292 doc = xml.dom.minidom.parseString(output)
1228 entries = doc.getElementsByTagName('entry')
1293 entries = doc.getElementsByTagName('entry')
1229 lastrev, rev = '0', '0'
1294 lastrev, rev = '0', '0'
1230 if entries:
1295 if entries:
1231 rev = str(entries[0].getAttribute('revision')) or '0'
1296 rev = str(entries[0].getAttribute('revision')) or '0'
1232 commits = entries[0].getElementsByTagName('commit')
1297 commits = entries[0].getElementsByTagName('commit')
1233 if commits:
1298 if commits:
1234 lastrev = str(commits[0].getAttribute('revision')) or '0'
1299 lastrev = str(commits[0].getAttribute('revision')) or '0'
1235 return (lastrev, rev)
1300 return (lastrev, rev)
1236
1301
1237 def _wcrev(self):
1302 def _wcrev(self):
1238 return self._wcrevs()[0]
1303 return self._wcrevs()[0]
1239
1304
1240 def _wcchanged(self):
1305 def _wcchanged(self):
1241 """Return (changes, extchanges, missing) where changes is True
1306 """Return (changes, extchanges, missing) where changes is True
1242 if the working directory was changed, extchanges is
1307 if the working directory was changed, extchanges is
1243 True if any of these changes concern an external entry and missing
1308 True if any of these changes concern an external entry and missing
1244 is True if any change is a missing entry.
1309 is True if any change is a missing entry.
1245 """
1310 """
1246 output, err = self._svncommand(['status', '--xml'])
1311 output, err = self._svncommand(['status', '--xml'])
1247 externals, changes, missing = [], [], []
1312 externals, changes, missing = [], [], []
1248 doc = xml.dom.minidom.parseString(output)
1313 doc = xml.dom.minidom.parseString(output)
1249 for e in doc.getElementsByTagName('entry'):
1314 for e in doc.getElementsByTagName('entry'):
1250 s = e.getElementsByTagName('wc-status')
1315 s = e.getElementsByTagName('wc-status')
1251 if not s:
1316 if not s:
1252 continue
1317 continue
1253 item = s[0].getAttribute('item')
1318 item = s[0].getAttribute('item')
1254 props = s[0].getAttribute('props')
1319 props = s[0].getAttribute('props')
1255 path = e.getAttribute('path')
1320 path = e.getAttribute('path')
1256 if item == 'external':
1321 if item == 'external':
1257 externals.append(path)
1322 externals.append(path)
1258 elif item == 'missing':
1323 elif item == 'missing':
1259 missing.append(path)
1324 missing.append(path)
1260 if (item not in ('', 'normal', 'unversioned', 'external')
1325 if (item not in ('', 'normal', 'unversioned', 'external')
1261 or props not in ('', 'none', 'normal')):
1326 or props not in ('', 'none', 'normal')):
1262 changes.append(path)
1327 changes.append(path)
1263 for path in changes:
1328 for path in changes:
1264 for ext in externals:
1329 for ext in externals:
1265 if path == ext or path.startswith(ext + pycompat.ossep):
1330 if path == ext or path.startswith(ext + pycompat.ossep):
1266 return True, True, bool(missing)
1331 return True, True, bool(missing)
1267 return bool(changes), False, bool(missing)
1332 return bool(changes), False, bool(missing)
1268
1333
1269 def dirty(self, ignoreupdate=False, missing=False):
1334 def dirty(self, ignoreupdate=False, missing=False):
1270 wcchanged = self._wcchanged()
1335 wcchanged = self._wcchanged()
1271 changed = wcchanged[0] or (missing and wcchanged[2])
1336 changed = wcchanged[0] or (missing and wcchanged[2])
1272 if not changed:
1337 if not changed:
1273 if self._state[1] in self._wcrevs() or ignoreupdate:
1338 if self._state[1] in self._wcrevs() or ignoreupdate:
1274 return False
1339 return False
1275 return True
1340 return True
1276
1341
1277 def basestate(self):
1342 def basestate(self):
1278 lastrev, rev = self._wcrevs()
1343 lastrev, rev = self._wcrevs()
1279 if lastrev != rev:
1344 if lastrev != rev:
1280 # Last committed rev is not the same than rev. We would
1345 # Last committed rev is not the same than rev. We would
1281 # like to take lastrev but we do not know if the subrepo
1346 # like to take lastrev but we do not know if the subrepo
1282 # URL exists at lastrev. Test it and fallback to rev it
1347 # URL exists at lastrev. Test it and fallback to rev it
1283 # is not there.
1348 # is not there.
1284 try:
1349 try:
1285 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1350 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1286 return lastrev
1351 return lastrev
1287 except error.Abort:
1352 except error.Abort:
1288 pass
1353 pass
1289 return rev
1354 return rev
1290
1355
1291 @annotatesubrepoerror
1356 @annotatesubrepoerror
1292 def commit(self, text, user, date):
1357 def commit(self, text, user, date):
1293 # user and date are out of our hands since svn is centralized
1358 # user and date are out of our hands since svn is centralized
1294 changed, extchanged, missing = self._wcchanged()
1359 changed, extchanged, missing = self._wcchanged()
1295 if not changed:
1360 if not changed:
1296 return self.basestate()
1361 return self.basestate()
1297 if extchanged:
1362 if extchanged:
1298 # Do not try to commit externals
1363 # Do not try to commit externals
1299 raise error.Abort(_('cannot commit svn externals'))
1364 raise error.Abort(_('cannot commit svn externals'))
1300 if missing:
1365 if missing:
1301 # svn can commit with missing entries but aborting like hg
1366 # svn can commit with missing entries but aborting like hg
1302 # seems a better approach.
1367 # seems a better approach.
1303 raise error.Abort(_('cannot commit missing svn entries'))
1368 raise error.Abort(_('cannot commit missing svn entries'))
1304 commitinfo, err = self._svncommand(['commit', '-m', text])
1369 commitinfo, err = self._svncommand(['commit', '-m', text])
1305 self.ui.status(commitinfo)
1370 self.ui.status(commitinfo)
1306 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1371 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1307 if not newrev:
1372 if not newrev:
1308 if not commitinfo.strip():
1373 if not commitinfo.strip():
1309 # Sometimes, our definition of "changed" differs from
1374 # Sometimes, our definition of "changed" differs from
1310 # svn one. For instance, svn ignores missing files
1375 # svn one. For instance, svn ignores missing files
1311 # when committing. If there are only missing files, no
1376 # when committing. If there are only missing files, no
1312 # commit is made, no output and no error code.
1377 # commit is made, no output and no error code.
1313 raise error.Abort(_('failed to commit svn changes'))
1378 raise error.Abort(_('failed to commit svn changes'))
1314 raise error.Abort(commitinfo.splitlines()[-1])
1379 raise error.Abort(commitinfo.splitlines()[-1])
1315 newrev = newrev.groups()[0]
1380 newrev = newrev.groups()[0]
1316 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1381 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1317 return newrev
1382 return newrev
1318
1383
1319 @annotatesubrepoerror
1384 @annotatesubrepoerror
1320 def remove(self):
1385 def remove(self):
1321 if self.dirty():
1386 if self.dirty():
1322 self.ui.warn(_('not removing repo %s because '
1387 self.ui.warn(_('not removing repo %s because '
1323 'it has changes.\n') % self._path)
1388 'it has changes.\n') % self._path)
1324 return
1389 return
1325 self.ui.note(_('removing subrepo %s\n') % self._path)
1390 self.ui.note(_('removing subrepo %s\n') % self._path)
1326
1391
1327 self.wvfs.rmtree(forcibly=True)
1392 self.wvfs.rmtree(forcibly=True)
1328 try:
1393 try:
1329 pwvfs = self._ctx.repo().wvfs
1394 pwvfs = self._ctx.repo().wvfs
1330 pwvfs.removedirs(pwvfs.dirname(self._path))
1395 pwvfs.removedirs(pwvfs.dirname(self._path))
1331 except OSError:
1396 except OSError:
1332 pass
1397 pass
1333
1398
1334 @annotatesubrepoerror
1399 @annotatesubrepoerror
1335 def get(self, state, overwrite=False):
1400 def get(self, state, overwrite=False):
1336 if overwrite:
1401 if overwrite:
1337 self._svncommand(['revert', '--recursive'])
1402 self._svncommand(['revert', '--recursive'])
1338 args = ['checkout']
1403 args = ['checkout']
1339 if self._svnversion >= (1, 5):
1404 if self._svnversion >= (1, 5):
1340 args.append('--force')
1405 args.append('--force')
1341 # The revision must be specified at the end of the URL to properly
1406 # The revision must be specified at the end of the URL to properly
1342 # update to a directory which has since been deleted and recreated.
1407 # update to a directory which has since been deleted and recreated.
1343 args.append('%s@%s' % (state[0], state[1]))
1408 args.append('%s@%s' % (state[0], state[1]))
1344
1409
1345 # SEC: check that the ssh url is safe
1410 # SEC: check that the ssh url is safe
1346 util.checksafessh(state[0])
1411 util.checksafessh(state[0])
1347
1412
1348 status, err = self._svncommand(args, failok=True)
1413 status, err = self._svncommand(args, failok=True)
1349 _sanitize(self.ui, self.wvfs, '.svn')
1414 _sanitize(self.ui, self.wvfs, '.svn')
1350 if not re.search('Checked out revision [0-9]+.', status):
1415 if not re.search('Checked out revision [0-9]+.', status):
1351 if ('is already a working copy for a different URL' in err
1416 if ('is already a working copy for a different URL' in err
1352 and (self._wcchanged()[:2] == (False, False))):
1417 and (self._wcchanged()[:2] == (False, False))):
1353 # obstructed but clean working copy, so just blow it away.
1418 # obstructed but clean working copy, so just blow it away.
1354 self.remove()
1419 self.remove()
1355 self.get(state, overwrite=False)
1420 self.get(state, overwrite=False)
1356 return
1421 return
1357 raise error.Abort((status or err).splitlines()[-1])
1422 raise error.Abort((status or err).splitlines()[-1])
1358 self.ui.status(status)
1423 self.ui.status(status)
1359
1424
1360 @annotatesubrepoerror
1425 @annotatesubrepoerror
1361 def merge(self, state):
1426 def merge(self, state):
1362 old = self._state[1]
1427 old = self._state[1]
1363 new = state[1]
1428 new = state[1]
1364 wcrev = self._wcrev()
1429 wcrev = self._wcrev()
1365 if new != wcrev:
1430 if new != wcrev:
1366 dirty = old == wcrev or self._wcchanged()[0]
1431 dirty = old == wcrev or self._wcchanged()[0]
1367 if _updateprompt(self.ui, self, dirty, wcrev, new):
1432 if _updateprompt(self.ui, self, dirty, wcrev, new):
1368 self.get(state, False)
1433 self.get(state, False)
1369
1434
1370 def push(self, opts):
1435 def push(self, opts):
1371 # push is a no-op for SVN
1436 # push is a no-op for SVN
1372 return True
1437 return True
1373
1438
1374 @annotatesubrepoerror
1439 @annotatesubrepoerror
1375 def files(self):
1440 def files(self):
1376 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1441 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1377 doc = xml.dom.minidom.parseString(output)
1442 doc = xml.dom.minidom.parseString(output)
1378 paths = []
1443 paths = []
1379 for e in doc.getElementsByTagName('entry'):
1444 for e in doc.getElementsByTagName('entry'):
1380 kind = str(e.getAttribute('kind'))
1445 kind = str(e.getAttribute('kind'))
1381 if kind != 'file':
1446 if kind != 'file':
1382 continue
1447 continue
1383 name = ''.join(c.data for c
1448 name = ''.join(c.data for c
1384 in e.getElementsByTagName('name')[0].childNodes
1449 in e.getElementsByTagName('name')[0].childNodes
1385 if c.nodeType == c.TEXT_NODE)
1450 if c.nodeType == c.TEXT_NODE)
1386 paths.append(name.encode('utf-8'))
1451 paths.append(name.encode('utf-8'))
1387 return paths
1452 return paths
1388
1453
1389 def filedata(self, name, decode):
1454 def filedata(self, name, decode):
1390 return self._svncommand(['cat'], name)[0]
1455 return self._svncommand(['cat'], name)[0]
1391
1456
1392
1457
1393 class gitsubrepo(abstractsubrepo):
1458 class gitsubrepo(abstractsubrepo):
1394 def __init__(self, ctx, path, state, allowcreate):
1459 def __init__(self, ctx, path, state, allowcreate):
1395 super(gitsubrepo, self).__init__(ctx, path)
1460 super(gitsubrepo, self).__init__(ctx, path)
1396 self._state = state
1461 self._state = state
1397 self._abspath = ctx.repo().wjoin(path)
1462 self._abspath = ctx.repo().wjoin(path)
1398 self._subparent = ctx.repo()
1463 self._subparent = ctx.repo()
1399 self._ensuregit()
1464 self._ensuregit()
1400
1465
1401 def _ensuregit(self):
1466 def _ensuregit(self):
1402 try:
1467 try:
1403 self._gitexecutable = 'git'
1468 self._gitexecutable = 'git'
1404 out, err = self._gitnodir(['--version'])
1469 out, err = self._gitnodir(['--version'])
1405 except OSError as e:
1470 except OSError as e:
1406 genericerror = _("error executing git for subrepo '%s': %s")
1471 genericerror = _("error executing git for subrepo '%s': %s")
1407 notfoundhint = _("check git is installed and in your PATH")
1472 notfoundhint = _("check git is installed and in your PATH")
1408 if e.errno != errno.ENOENT:
1473 if e.errno != errno.ENOENT:
1409 raise error.Abort(genericerror % (
1474 raise error.Abort(genericerror % (
1410 self._path, encoding.strtolocal(e.strerror)))
1475 self._path, encoding.strtolocal(e.strerror)))
1411 elif pycompat.iswindows:
1476 elif pycompat.iswindows:
1412 try:
1477 try:
1413 self._gitexecutable = 'git.cmd'
1478 self._gitexecutable = 'git.cmd'
1414 out, err = self._gitnodir(['--version'])
1479 out, err = self._gitnodir(['--version'])
1415 except OSError as e2:
1480 except OSError as e2:
1416 if e2.errno == errno.ENOENT:
1481 if e2.errno == errno.ENOENT:
1417 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1482 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1418 " for subrepo '%s'") % self._path,
1483 " for subrepo '%s'") % self._path,
1419 hint=notfoundhint)
1484 hint=notfoundhint)
1420 else:
1485 else:
1421 raise error.Abort(genericerror % (self._path,
1486 raise error.Abort(genericerror % (self._path,
1422 encoding.strtolocal(e2.strerror)))
1487 encoding.strtolocal(e2.strerror)))
1423 else:
1488 else:
1424 raise error.Abort(_("couldn't find git for subrepo '%s'")
1489 raise error.Abort(_("couldn't find git for subrepo '%s'")
1425 % self._path, hint=notfoundhint)
1490 % self._path, hint=notfoundhint)
1426 versionstatus = self._checkversion(out)
1491 versionstatus = self._checkversion(out)
1427 if versionstatus == 'unknown':
1492 if versionstatus == 'unknown':
1428 self.ui.warn(_('cannot retrieve git version\n'))
1493 self.ui.warn(_('cannot retrieve git version\n'))
1429 elif versionstatus == 'abort':
1494 elif versionstatus == 'abort':
1430 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1495 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1431 elif versionstatus == 'warning':
1496 elif versionstatus == 'warning':
1432 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1497 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1433
1498
1434 @staticmethod
1499 @staticmethod
1435 def _gitversion(out):
1500 def _gitversion(out):
1436 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1501 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1437 if m:
1502 if m:
1438 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1503 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1439
1504
1440 m = re.search(br'^git version (\d+)\.(\d+)', out)
1505 m = re.search(br'^git version (\d+)\.(\d+)', out)
1441 if m:
1506 if m:
1442 return (int(m.group(1)), int(m.group(2)), 0)
1507 return (int(m.group(1)), int(m.group(2)), 0)
1443
1508
1444 return -1
1509 return -1
1445
1510
1446 @staticmethod
1511 @staticmethod
1447 def _checkversion(out):
1512 def _checkversion(out):
1448 '''ensure git version is new enough
1513 '''ensure git version is new enough
1449
1514
1450 >>> _checkversion = gitsubrepo._checkversion
1515 >>> _checkversion = gitsubrepo._checkversion
1451 >>> _checkversion(b'git version 1.6.0')
1516 >>> _checkversion(b'git version 1.6.0')
1452 'ok'
1517 'ok'
1453 >>> _checkversion(b'git version 1.8.5')
1518 >>> _checkversion(b'git version 1.8.5')
1454 'ok'
1519 'ok'
1455 >>> _checkversion(b'git version 1.4.0')
1520 >>> _checkversion(b'git version 1.4.0')
1456 'abort'
1521 'abort'
1457 >>> _checkversion(b'git version 1.5.0')
1522 >>> _checkversion(b'git version 1.5.0')
1458 'warning'
1523 'warning'
1459 >>> _checkversion(b'git version 1.9-rc0')
1524 >>> _checkversion(b'git version 1.9-rc0')
1460 'ok'
1525 'ok'
1461 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1526 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1462 'ok'
1527 'ok'
1463 >>> _checkversion(b'git version 1.9.0.GIT')
1528 >>> _checkversion(b'git version 1.9.0.GIT')
1464 'ok'
1529 'ok'
1465 >>> _checkversion(b'git version 12345')
1530 >>> _checkversion(b'git version 12345')
1466 'unknown'
1531 'unknown'
1467 >>> _checkversion(b'no')
1532 >>> _checkversion(b'no')
1468 'unknown'
1533 'unknown'
1469 '''
1534 '''
1470 version = gitsubrepo._gitversion(out)
1535 version = gitsubrepo._gitversion(out)
1471 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1536 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1472 # despite the docstring comment. For now, error on 1.4.0, warn on
1537 # despite the docstring comment. For now, error on 1.4.0, warn on
1473 # 1.5.0 but attempt to continue.
1538 # 1.5.0 but attempt to continue.
1474 if version == -1:
1539 if version == -1:
1475 return 'unknown'
1540 return 'unknown'
1476 if version < (1, 5, 0):
1541 if version < (1, 5, 0):
1477 return 'abort'
1542 return 'abort'
1478 elif version < (1, 6, 0):
1543 elif version < (1, 6, 0):
1479 return 'warning'
1544 return 'warning'
1480 return 'ok'
1545 return 'ok'
1481
1546
1482 def _gitcommand(self, commands, env=None, stream=False):
1547 def _gitcommand(self, commands, env=None, stream=False):
1483 return self._gitdir(commands, env=env, stream=stream)[0]
1548 return self._gitdir(commands, env=env, stream=stream)[0]
1484
1549
1485 def _gitdir(self, commands, env=None, stream=False):
1550 def _gitdir(self, commands, env=None, stream=False):
1486 return self._gitnodir(commands, env=env, stream=stream,
1551 return self._gitnodir(commands, env=env, stream=stream,
1487 cwd=self._abspath)
1552 cwd=self._abspath)
1488
1553
1489 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1554 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1490 """Calls the git command
1555 """Calls the git command
1491
1556
1492 The methods tries to call the git command. versions prior to 1.6.0
1557 The methods tries to call the git command. versions prior to 1.6.0
1493 are not supported and very probably fail.
1558 are not supported and very probably fail.
1494 """
1559 """
1495 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1560 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1496 if env is None:
1561 if env is None:
1497 env = encoding.environ.copy()
1562 env = encoding.environ.copy()
1498 # disable localization for Git output (issue5176)
1563 # disable localization for Git output (issue5176)
1499 env['LC_ALL'] = 'C'
1564 env['LC_ALL'] = 'C'
1500 # fix for Git CVE-2015-7545
1565 # fix for Git CVE-2015-7545
1501 if 'GIT_ALLOW_PROTOCOL' not in env:
1566 if 'GIT_ALLOW_PROTOCOL' not in env:
1502 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1567 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1503 # unless ui.quiet is set, print git's stderr,
1568 # unless ui.quiet is set, print git's stderr,
1504 # which is mostly progress and useful info
1569 # which is mostly progress and useful info
1505 errpipe = None
1570 errpipe = None
1506 if self.ui.quiet:
1571 if self.ui.quiet:
1507 errpipe = open(os.devnull, 'w')
1572 errpipe = open(os.devnull, 'w')
1508 if self.ui._colormode and len(commands) and commands[0] == "diff":
1573 if self.ui._colormode and len(commands) and commands[0] == "diff":
1509 # insert the argument in the front,
1574 # insert the argument in the front,
1510 # the end of git diff arguments is used for paths
1575 # the end of git diff arguments is used for paths
1511 commands.insert(1, '--color')
1576 commands.insert(1, '--color')
1512 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1577 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1513 cwd=cwd, env=env, close_fds=util.closefds,
1578 cwd=cwd, env=env, close_fds=util.closefds,
1514 stdout=subprocess.PIPE, stderr=errpipe)
1579 stdout=subprocess.PIPE, stderr=errpipe)
1515 if stream:
1580 if stream:
1516 return p.stdout, None
1581 return p.stdout, None
1517
1582
1518 retdata = p.stdout.read().strip()
1583 retdata = p.stdout.read().strip()
1519 # wait for the child to exit to avoid race condition.
1584 # wait for the child to exit to avoid race condition.
1520 p.wait()
1585 p.wait()
1521
1586
1522 if p.returncode != 0 and p.returncode != 1:
1587 if p.returncode != 0 and p.returncode != 1:
1523 # there are certain error codes that are ok
1588 # there are certain error codes that are ok
1524 command = commands[0]
1589 command = commands[0]
1525 if command in ('cat-file', 'symbolic-ref'):
1590 if command in ('cat-file', 'symbolic-ref'):
1526 return retdata, p.returncode
1591 return retdata, p.returncode
1527 # for all others, abort
1592 # for all others, abort
1528 raise error.Abort(_('git %s error %d in %s') %
1593 raise error.Abort(_('git %s error %d in %s') %
1529 (command, p.returncode, self._relpath))
1594 (command, p.returncode, self._relpath))
1530
1595
1531 return retdata, p.returncode
1596 return retdata, p.returncode
1532
1597
1533 def _gitmissing(self):
1598 def _gitmissing(self):
1534 return not self.wvfs.exists('.git')
1599 return not self.wvfs.exists('.git')
1535
1600
1536 def _gitstate(self):
1601 def _gitstate(self):
1537 return self._gitcommand(['rev-parse', 'HEAD'])
1602 return self._gitcommand(['rev-parse', 'HEAD'])
1538
1603
1539 def _gitcurrentbranch(self):
1604 def _gitcurrentbranch(self):
1540 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1605 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1541 if err:
1606 if err:
1542 current = None
1607 current = None
1543 return current
1608 return current
1544
1609
1545 def _gitremote(self, remote):
1610 def _gitremote(self, remote):
1546 out = self._gitcommand(['remote', 'show', '-n', remote])
1611 out = self._gitcommand(['remote', 'show', '-n', remote])
1547 line = out.split('\n')[1]
1612 line = out.split('\n')[1]
1548 i = line.index('URL: ') + len('URL: ')
1613 i = line.index('URL: ') + len('URL: ')
1549 return line[i:]
1614 return line[i:]
1550
1615
1551 def _githavelocally(self, revision):
1616 def _githavelocally(self, revision):
1552 out, code = self._gitdir(['cat-file', '-e', revision])
1617 out, code = self._gitdir(['cat-file', '-e', revision])
1553 return code == 0
1618 return code == 0
1554
1619
1555 def _gitisancestor(self, r1, r2):
1620 def _gitisancestor(self, r1, r2):
1556 base = self._gitcommand(['merge-base', r1, r2])
1621 base = self._gitcommand(['merge-base', r1, r2])
1557 return base == r1
1622 return base == r1
1558
1623
1559 def _gitisbare(self):
1624 def _gitisbare(self):
1560 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1625 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1561
1626
1562 def _gitupdatestat(self):
1627 def _gitupdatestat(self):
1563 """This must be run before git diff-index.
1628 """This must be run before git diff-index.
1564 diff-index only looks at changes to file stat;
1629 diff-index only looks at changes to file stat;
1565 this command looks at file contents and updates the stat."""
1630 this command looks at file contents and updates the stat."""
1566 self._gitcommand(['update-index', '-q', '--refresh'])
1631 self._gitcommand(['update-index', '-q', '--refresh'])
1567
1632
1568 def _gitbranchmap(self):
1633 def _gitbranchmap(self):
1569 '''returns 2 things:
1634 '''returns 2 things:
1570 a map from git branch to revision
1635 a map from git branch to revision
1571 a map from revision to branches'''
1636 a map from revision to branches'''
1572 branch2rev = {}
1637 branch2rev = {}
1573 rev2branch = {}
1638 rev2branch = {}
1574
1639
1575 out = self._gitcommand(['for-each-ref', '--format',
1640 out = self._gitcommand(['for-each-ref', '--format',
1576 '%(objectname) %(refname)'])
1641 '%(objectname) %(refname)'])
1577 for line in out.split('\n'):
1642 for line in out.split('\n'):
1578 revision, ref = line.split(' ')
1643 revision, ref = line.split(' ')
1579 if (not ref.startswith('refs/heads/') and
1644 if (not ref.startswith('refs/heads/') and
1580 not ref.startswith('refs/remotes/')):
1645 not ref.startswith('refs/remotes/')):
1581 continue
1646 continue
1582 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1647 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1583 continue # ignore remote/HEAD redirects
1648 continue # ignore remote/HEAD redirects
1584 branch2rev[ref] = revision
1649 branch2rev[ref] = revision
1585 rev2branch.setdefault(revision, []).append(ref)
1650 rev2branch.setdefault(revision, []).append(ref)
1586 return branch2rev, rev2branch
1651 return branch2rev, rev2branch
1587
1652
1588 def _gittracking(self, branches):
1653 def _gittracking(self, branches):
1589 'return map of remote branch to local tracking branch'
1654 'return map of remote branch to local tracking branch'
1590 # assumes no more than one local tracking branch for each remote
1655 # assumes no more than one local tracking branch for each remote
1591 tracking = {}
1656 tracking = {}
1592 for b in branches:
1657 for b in branches:
1593 if b.startswith('refs/remotes/'):
1658 if b.startswith('refs/remotes/'):
1594 continue
1659 continue
1595 bname = b.split('/', 2)[2]
1660 bname = b.split('/', 2)[2]
1596 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1661 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1597 if remote:
1662 if remote:
1598 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1663 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1599 tracking['refs/remotes/%s/%s' %
1664 tracking['refs/remotes/%s/%s' %
1600 (remote, ref.split('/', 2)[2])] = b
1665 (remote, ref.split('/', 2)[2])] = b
1601 return tracking
1666 return tracking
1602
1667
1603 def _abssource(self, source):
1668 def _abssource(self, source):
1604 if '://' not in source:
1669 if '://' not in source:
1605 # recognize the scp syntax as an absolute source
1670 # recognize the scp syntax as an absolute source
1606 colon = source.find(':')
1671 colon = source.find(':')
1607 if colon != -1 and '/' not in source[:colon]:
1672 if colon != -1 and '/' not in source[:colon]:
1608 return source
1673 return source
1609 self._subsource = source
1674 self._subsource = source
1610 return _abssource(self)
1675 return _abssource(self)
1611
1676
1612 def _fetch(self, source, revision):
1677 def _fetch(self, source, revision):
1613 if self._gitmissing():
1678 if self._gitmissing():
1614 # SEC: check for safe ssh url
1679 # SEC: check for safe ssh url
1615 util.checksafessh(source)
1680 util.checksafessh(source)
1616
1681
1617 source = self._abssource(source)
1682 source = self._abssource(source)
1618 self.ui.status(_('cloning subrepo %s from %s\n') %
1683 self.ui.status(_('cloning subrepo %s from %s\n') %
1619 (self._relpath, source))
1684 (self._relpath, source))
1620 self._gitnodir(['clone', source, self._abspath])
1685 self._gitnodir(['clone', source, self._abspath])
1621 if self._githavelocally(revision):
1686 if self._githavelocally(revision):
1622 return
1687 return
1623 self.ui.status(_('pulling subrepo %s from %s\n') %
1688 self.ui.status(_('pulling subrepo %s from %s\n') %
1624 (self._relpath, self._gitremote('origin')))
1689 (self._relpath, self._gitremote('origin')))
1625 # try only origin: the originally cloned repo
1690 # try only origin: the originally cloned repo
1626 self._gitcommand(['fetch'])
1691 self._gitcommand(['fetch'])
1627 if not self._githavelocally(revision):
1692 if not self._githavelocally(revision):
1628 raise error.Abort(_('revision %s does not exist in subrepository '
1693 raise error.Abort(_('revision %s does not exist in subrepository '
1629 '"%s"\n') % (revision, self._relpath))
1694 '"%s"\n') % (revision, self._relpath))
1630
1695
1631 @annotatesubrepoerror
1696 @annotatesubrepoerror
1632 def dirty(self, ignoreupdate=False, missing=False):
1697 def dirty(self, ignoreupdate=False, missing=False):
1633 if self._gitmissing():
1698 if self._gitmissing():
1634 return self._state[1] != ''
1699 return self._state[1] != ''
1635 if self._gitisbare():
1700 if self._gitisbare():
1636 return True
1701 return True
1637 if not ignoreupdate and self._state[1] != self._gitstate():
1702 if not ignoreupdate and self._state[1] != self._gitstate():
1638 # different version checked out
1703 # different version checked out
1639 return True
1704 return True
1640 # check for staged changes or modified files; ignore untracked files
1705 # check for staged changes or modified files; ignore untracked files
1641 self._gitupdatestat()
1706 self._gitupdatestat()
1642 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1707 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1643 return code == 1
1708 return code == 1
1644
1709
1645 def basestate(self):
1710 def basestate(self):
1646 return self._gitstate()
1711 return self._gitstate()
1647
1712
1648 @annotatesubrepoerror
1713 @annotatesubrepoerror
1649 def get(self, state, overwrite=False):
1714 def get(self, state, overwrite=False):
1650 source, revision, kind = state
1715 source, revision, kind = state
1651 if not revision:
1716 if not revision:
1652 self.remove()
1717 self.remove()
1653 return
1718 return
1654 self._fetch(source, revision)
1719 self._fetch(source, revision)
1655 # if the repo was set to be bare, unbare it
1720 # if the repo was set to be bare, unbare it
1656 if self._gitisbare():
1721 if self._gitisbare():
1657 self._gitcommand(['config', 'core.bare', 'false'])
1722 self._gitcommand(['config', 'core.bare', 'false'])
1658 if self._gitstate() == revision:
1723 if self._gitstate() == revision:
1659 self._gitcommand(['reset', '--hard', 'HEAD'])
1724 self._gitcommand(['reset', '--hard', 'HEAD'])
1660 return
1725 return
1661 elif self._gitstate() == revision:
1726 elif self._gitstate() == revision:
1662 if overwrite:
1727 if overwrite:
1663 # first reset the index to unmark new files for commit, because
1728 # first reset the index to unmark new files for commit, because
1664 # reset --hard will otherwise throw away files added for commit,
1729 # reset --hard will otherwise throw away files added for commit,
1665 # not just unmark them.
1730 # not just unmark them.
1666 self._gitcommand(['reset', 'HEAD'])
1731 self._gitcommand(['reset', 'HEAD'])
1667 self._gitcommand(['reset', '--hard', 'HEAD'])
1732 self._gitcommand(['reset', '--hard', 'HEAD'])
1668 return
1733 return
1669 branch2rev, rev2branch = self._gitbranchmap()
1734 branch2rev, rev2branch = self._gitbranchmap()
1670
1735
1671 def checkout(args):
1736 def checkout(args):
1672 cmd = ['checkout']
1737 cmd = ['checkout']
1673 if overwrite:
1738 if overwrite:
1674 # first reset the index to unmark new files for commit, because
1739 # first reset the index to unmark new files for commit, because
1675 # the -f option will otherwise throw away files added for
1740 # the -f option will otherwise throw away files added for
1676 # commit, not just unmark them.
1741 # commit, not just unmark them.
1677 self._gitcommand(['reset', 'HEAD'])
1742 self._gitcommand(['reset', 'HEAD'])
1678 cmd.append('-f')
1743 cmd.append('-f')
1679 self._gitcommand(cmd + args)
1744 self._gitcommand(cmd + args)
1680 _sanitize(self.ui, self.wvfs, '.git')
1745 _sanitize(self.ui, self.wvfs, '.git')
1681
1746
1682 def rawcheckout():
1747 def rawcheckout():
1683 # no branch to checkout, check it out with no branch
1748 # no branch to checkout, check it out with no branch
1684 self.ui.warn(_('checking out detached HEAD in '
1749 self.ui.warn(_('checking out detached HEAD in '
1685 'subrepository "%s"\n') % self._relpath)
1750 'subrepository "%s"\n') % self._relpath)
1686 self.ui.warn(_('check out a git branch if you intend '
1751 self.ui.warn(_('check out a git branch if you intend '
1687 'to make changes\n'))
1752 'to make changes\n'))
1688 checkout(['-q', revision])
1753 checkout(['-q', revision])
1689
1754
1690 if revision not in rev2branch:
1755 if revision not in rev2branch:
1691 rawcheckout()
1756 rawcheckout()
1692 return
1757 return
1693 branches = rev2branch[revision]
1758 branches = rev2branch[revision]
1694 firstlocalbranch = None
1759 firstlocalbranch = None
1695 for b in branches:
1760 for b in branches:
1696 if b == 'refs/heads/master':
1761 if b == 'refs/heads/master':
1697 # master trumps all other branches
1762 # master trumps all other branches
1698 checkout(['refs/heads/master'])
1763 checkout(['refs/heads/master'])
1699 return
1764 return
1700 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1765 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1701 firstlocalbranch = b
1766 firstlocalbranch = b
1702 if firstlocalbranch:
1767 if firstlocalbranch:
1703 checkout([firstlocalbranch])
1768 checkout([firstlocalbranch])
1704 return
1769 return
1705
1770
1706 tracking = self._gittracking(branch2rev.keys())
1771 tracking = self._gittracking(branch2rev.keys())
1707 # choose a remote branch already tracked if possible
1772 # choose a remote branch already tracked if possible
1708 remote = branches[0]
1773 remote = branches[0]
1709 if remote not in tracking:
1774 if remote not in tracking:
1710 for b in branches:
1775 for b in branches:
1711 if b in tracking:
1776 if b in tracking:
1712 remote = b
1777 remote = b
1713 break
1778 break
1714
1779
1715 if remote not in tracking:
1780 if remote not in tracking:
1716 # create a new local tracking branch
1781 # create a new local tracking branch
1717 local = remote.split('/', 3)[3]
1782 local = remote.split('/', 3)[3]
1718 checkout(['-b', local, remote])
1783 checkout(['-b', local, remote])
1719 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1784 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1720 # When updating to a tracked remote branch,
1785 # When updating to a tracked remote branch,
1721 # if the local tracking branch is downstream of it,
1786 # if the local tracking branch is downstream of it,
1722 # a normal `git pull` would have performed a "fast-forward merge"
1787 # a normal `git pull` would have performed a "fast-forward merge"
1723 # which is equivalent to updating the local branch to the remote.
1788 # which is equivalent to updating the local branch to the remote.
1724 # Since we are only looking at branching at update, we need to
1789 # Since we are only looking at branching at update, we need to
1725 # detect this situation and perform this action lazily.
1790 # detect this situation and perform this action lazily.
1726 if tracking[remote] != self._gitcurrentbranch():
1791 if tracking[remote] != self._gitcurrentbranch():
1727 checkout([tracking[remote]])
1792 checkout([tracking[remote]])
1728 self._gitcommand(['merge', '--ff', remote])
1793 self._gitcommand(['merge', '--ff', remote])
1729 _sanitize(self.ui, self.wvfs, '.git')
1794 _sanitize(self.ui, self.wvfs, '.git')
1730 else:
1795 else:
1731 # a real merge would be required, just checkout the revision
1796 # a real merge would be required, just checkout the revision
1732 rawcheckout()
1797 rawcheckout()
1733
1798
1734 @annotatesubrepoerror
1799 @annotatesubrepoerror
1735 def commit(self, text, user, date):
1800 def commit(self, text, user, date):
1736 if self._gitmissing():
1801 if self._gitmissing():
1737 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1802 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1738 cmd = ['commit', '-a', '-m', text]
1803 cmd = ['commit', '-a', '-m', text]
1739 env = encoding.environ.copy()
1804 env = encoding.environ.copy()
1740 if user:
1805 if user:
1741 cmd += ['--author', user]
1806 cmd += ['--author', user]
1742 if date:
1807 if date:
1743 # git's date parser silently ignores when seconds < 1e9
1808 # git's date parser silently ignores when seconds < 1e9
1744 # convert to ISO8601
1809 # convert to ISO8601
1745 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1810 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1746 '%Y-%m-%dT%H:%M:%S %1%2')
1811 '%Y-%m-%dT%H:%M:%S %1%2')
1747 self._gitcommand(cmd, env=env)
1812 self._gitcommand(cmd, env=env)
1748 # make sure commit works otherwise HEAD might not exist under certain
1813 # make sure commit works otherwise HEAD might not exist under certain
1749 # circumstances
1814 # circumstances
1750 return self._gitstate()
1815 return self._gitstate()
1751
1816
1752 @annotatesubrepoerror
1817 @annotatesubrepoerror
1753 def merge(self, state):
1818 def merge(self, state):
1754 source, revision, kind = state
1819 source, revision, kind = state
1755 self._fetch(source, revision)
1820 self._fetch(source, revision)
1756 base = self._gitcommand(['merge-base', revision, self._state[1]])
1821 base = self._gitcommand(['merge-base', revision, self._state[1]])
1757 self._gitupdatestat()
1822 self._gitupdatestat()
1758 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1823 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1759
1824
1760 def mergefunc():
1825 def mergefunc():
1761 if base == revision:
1826 if base == revision:
1762 self.get(state) # fast forward merge
1827 self.get(state) # fast forward merge
1763 elif base != self._state[1]:
1828 elif base != self._state[1]:
1764 self._gitcommand(['merge', '--no-commit', revision])
1829 self._gitcommand(['merge', '--no-commit', revision])
1765 _sanitize(self.ui, self.wvfs, '.git')
1830 _sanitize(self.ui, self.wvfs, '.git')
1766
1831
1767 if self.dirty():
1832 if self.dirty():
1768 if self._gitstate() != revision:
1833 if self._gitstate() != revision:
1769 dirty = self._gitstate() == self._state[1] or code != 0
1834 dirty = self._gitstate() == self._state[1] or code != 0
1770 if _updateprompt(self.ui, self, dirty,
1835 if _updateprompt(self.ui, self, dirty,
1771 self._state[1][:7], revision[:7]):
1836 self._state[1][:7], revision[:7]):
1772 mergefunc()
1837 mergefunc()
1773 else:
1838 else:
1774 mergefunc()
1839 mergefunc()
1775
1840
1776 @annotatesubrepoerror
1841 @annotatesubrepoerror
1777 def push(self, opts):
1842 def push(self, opts):
1778 force = opts.get('force')
1843 force = opts.get('force')
1779
1844
1780 if not self._state[1]:
1845 if not self._state[1]:
1781 return True
1846 return True
1782 if self._gitmissing():
1847 if self._gitmissing():
1783 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1848 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1784 # if a branch in origin contains the revision, nothing to do
1849 # if a branch in origin contains the revision, nothing to do
1785 branch2rev, rev2branch = self._gitbranchmap()
1850 branch2rev, rev2branch = self._gitbranchmap()
1786 if self._state[1] in rev2branch:
1851 if self._state[1] in rev2branch:
1787 for b in rev2branch[self._state[1]]:
1852 for b in rev2branch[self._state[1]]:
1788 if b.startswith('refs/remotes/origin/'):
1853 if b.startswith('refs/remotes/origin/'):
1789 return True
1854 return True
1790 for b, revision in branch2rev.iteritems():
1855 for b, revision in branch2rev.iteritems():
1791 if b.startswith('refs/remotes/origin/'):
1856 if b.startswith('refs/remotes/origin/'):
1792 if self._gitisancestor(self._state[1], revision):
1857 if self._gitisancestor(self._state[1], revision):
1793 return True
1858 return True
1794 # otherwise, try to push the currently checked out branch
1859 # otherwise, try to push the currently checked out branch
1795 cmd = ['push']
1860 cmd = ['push']
1796 if force:
1861 if force:
1797 cmd.append('--force')
1862 cmd.append('--force')
1798
1863
1799 current = self._gitcurrentbranch()
1864 current = self._gitcurrentbranch()
1800 if current:
1865 if current:
1801 # determine if the current branch is even useful
1866 # determine if the current branch is even useful
1802 if not self._gitisancestor(self._state[1], current):
1867 if not self._gitisancestor(self._state[1], current):
1803 self.ui.warn(_('unrelated git branch checked out '
1868 self.ui.warn(_('unrelated git branch checked out '
1804 'in subrepository "%s"\n') % self._relpath)
1869 'in subrepository "%s"\n') % self._relpath)
1805 return False
1870 return False
1806 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1871 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1807 (current.split('/', 2)[2], self._relpath))
1872 (current.split('/', 2)[2], self._relpath))
1808 ret = self._gitdir(cmd + ['origin', current])
1873 ret = self._gitdir(cmd + ['origin', current])
1809 return ret[1] == 0
1874 return ret[1] == 0
1810 else:
1875 else:
1811 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1876 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1812 'cannot push revision %s\n') %
1877 'cannot push revision %s\n') %
1813 (self._relpath, self._state[1]))
1878 (self._relpath, self._state[1]))
1814 return False
1879 return False
1815
1880
1816 @annotatesubrepoerror
1881 @annotatesubrepoerror
1817 def add(self, ui, match, prefix, explicitonly, **opts):
1882 def add(self, ui, match, prefix, explicitonly, **opts):
1818 if self._gitmissing():
1883 if self._gitmissing():
1819 return []
1884 return []
1820
1885
1821 (modified, added, removed,
1886 (modified, added, removed,
1822 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1887 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1823 clean=True)
1888 clean=True)
1824
1889
1825 tracked = set()
1890 tracked = set()
1826 # dirstates 'amn' warn, 'r' is added again
1891 # dirstates 'amn' warn, 'r' is added again
1827 for l in (modified, added, deleted, clean):
1892 for l in (modified, added, deleted, clean):
1828 tracked.update(l)
1893 tracked.update(l)
1829
1894
1830 # Unknown files not of interest will be rejected by the matcher
1895 # Unknown files not of interest will be rejected by the matcher
1831 files = unknown
1896 files = unknown
1832 files.extend(match.files())
1897 files.extend(match.files())
1833
1898
1834 rejected = []
1899 rejected = []
1835
1900
1836 files = [f for f in sorted(set(files)) if match(f)]
1901 files = [f for f in sorted(set(files)) if match(f)]
1837 for f in files:
1902 for f in files:
1838 exact = match.exact(f)
1903 exact = match.exact(f)
1839 command = ["add"]
1904 command = ["add"]
1840 if exact:
1905 if exact:
1841 command.append("-f") #should be added, even if ignored
1906 command.append("-f") #should be added, even if ignored
1842 if ui.verbose or not exact:
1907 if ui.verbose or not exact:
1843 ui.status(_('adding %s\n') % match.rel(f))
1908 ui.status(_('adding %s\n') % match.rel(f))
1844
1909
1845 if f in tracked: # hg prints 'adding' even if already tracked
1910 if f in tracked: # hg prints 'adding' even if already tracked
1846 if exact:
1911 if exact:
1847 rejected.append(f)
1912 rejected.append(f)
1848 continue
1913 continue
1849 if not opts.get(r'dry_run'):
1914 if not opts.get(r'dry_run'):
1850 self._gitcommand(command + [f])
1915 self._gitcommand(command + [f])
1851
1916
1852 for f in rejected:
1917 for f in rejected:
1853 ui.warn(_("%s already tracked!\n") % match.abs(f))
1918 ui.warn(_("%s already tracked!\n") % match.abs(f))
1854
1919
1855 return rejected
1920 return rejected
1856
1921
1857 @annotatesubrepoerror
1922 @annotatesubrepoerror
1858 def remove(self):
1923 def remove(self):
1859 if self._gitmissing():
1924 if self._gitmissing():
1860 return
1925 return
1861 if self.dirty():
1926 if self.dirty():
1862 self.ui.warn(_('not removing repo %s because '
1927 self.ui.warn(_('not removing repo %s because '
1863 'it has changes.\n') % self._relpath)
1928 'it has changes.\n') % self._relpath)
1864 return
1929 return
1865 # we can't fully delete the repository as it may contain
1930 # we can't fully delete the repository as it may contain
1866 # local-only history
1931 # local-only history
1867 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1932 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1868 self._gitcommand(['config', 'core.bare', 'true'])
1933 self._gitcommand(['config', 'core.bare', 'true'])
1869 for f, kind in self.wvfs.readdir():
1934 for f, kind in self.wvfs.readdir():
1870 if f == '.git':
1935 if f == '.git':
1871 continue
1936 continue
1872 if kind == stat.S_IFDIR:
1937 if kind == stat.S_IFDIR:
1873 self.wvfs.rmtree(f)
1938 self.wvfs.rmtree(f)
1874 else:
1939 else:
1875 self.wvfs.unlink(f)
1940 self.wvfs.unlink(f)
1876
1941
1877 def archive(self, archiver, prefix, match=None, decode=True):
1942 def archive(self, archiver, prefix, match=None, decode=True):
1878 total = 0
1943 total = 0
1879 source, revision = self._state
1944 source, revision = self._state
1880 if not revision:
1945 if not revision:
1881 return total
1946 return total
1882 self._fetch(source, revision)
1947 self._fetch(source, revision)
1883
1948
1884 # Parse git's native archive command.
1949 # Parse git's native archive command.
1885 # This should be much faster than manually traversing the trees
1950 # This should be much faster than manually traversing the trees
1886 # and objects with many subprocess calls.
1951 # and objects with many subprocess calls.
1887 tarstream = self._gitcommand(['archive', revision], stream=True)
1952 tarstream = self._gitcommand(['archive', revision], stream=True)
1888 tar = tarfile.open(fileobj=tarstream, mode='r|')
1953 tar = tarfile.open(fileobj=tarstream, mode='r|')
1889 relpath = subrelpath(self)
1954 relpath = subrelpath(self)
1890 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1955 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1891 for i, info in enumerate(tar):
1956 for i, info in enumerate(tar):
1892 if info.isdir():
1957 if info.isdir():
1893 continue
1958 continue
1894 if match and not match(info.name):
1959 if match and not match(info.name):
1895 continue
1960 continue
1896 if info.issym():
1961 if info.issym():
1897 data = info.linkname
1962 data = info.linkname
1898 else:
1963 else:
1899 data = tar.extractfile(info).read()
1964 data = tar.extractfile(info).read()
1900 archiver.addfile(prefix + self._path + '/' + info.name,
1965 archiver.addfile(prefix + self._path + '/' + info.name,
1901 info.mode, info.issym(), data)
1966 info.mode, info.issym(), data)
1902 total += 1
1967 total += 1
1903 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1968 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1904 unit=_('files'))
1969 unit=_('files'))
1905 self.ui.progress(_('archiving (%s)') % relpath, None)
1970 self.ui.progress(_('archiving (%s)') % relpath, None)
1906 return total
1971 return total
1907
1972
1908
1973
1909 @annotatesubrepoerror
1974 @annotatesubrepoerror
1910 def cat(self, match, fm, fntemplate, prefix, **opts):
1975 def cat(self, match, fm, fntemplate, prefix, **opts):
1911 rev = self._state[1]
1976 rev = self._state[1]
1912 if match.anypats():
1977 if match.anypats():
1913 return 1 #No support for include/exclude yet
1978 return 1 #No support for include/exclude yet
1914
1979
1915 if not match.files():
1980 if not match.files():
1916 return 1
1981 return 1
1917
1982
1918 # TODO: add support for non-plain formatter (see cmdutil.cat())
1983 # TODO: add support for non-plain formatter (see cmdutil.cat())
1919 for f in match.files():
1984 for f in match.files():
1920 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1985 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1921 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1986 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1922 self._ctx.node(),
1987 self._ctx.node(),
1923 pathname=self.wvfs.reljoin(prefix, f))
1988 pathname=self.wvfs.reljoin(prefix, f))
1924 fp.write(output)
1989 fp.write(output)
1925 fp.close()
1990 fp.close()
1926 return 0
1991 return 0
1927
1992
1928
1993
1929 @annotatesubrepoerror
1994 @annotatesubrepoerror
1930 def status(self, rev2, **opts):
1995 def status(self, rev2, **opts):
1931 rev1 = self._state[1]
1996 rev1 = self._state[1]
1932 if self._gitmissing() or not rev1:
1997 if self._gitmissing() or not rev1:
1933 # if the repo is missing, return no results
1998 # if the repo is missing, return no results
1934 return scmutil.status([], [], [], [], [], [], [])
1999 return scmutil.status([], [], [], [], [], [], [])
1935 modified, added, removed = [], [], []
2000 modified, added, removed = [], [], []
1936 self._gitupdatestat()
2001 self._gitupdatestat()
1937 if rev2:
2002 if rev2:
1938 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
2003 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1939 else:
2004 else:
1940 command = ['diff-index', '--no-renames', rev1]
2005 command = ['diff-index', '--no-renames', rev1]
1941 out = self._gitcommand(command)
2006 out = self._gitcommand(command)
1942 for line in out.split('\n'):
2007 for line in out.split('\n'):
1943 tab = line.find('\t')
2008 tab = line.find('\t')
1944 if tab == -1:
2009 if tab == -1:
1945 continue
2010 continue
1946 status, f = line[tab - 1], line[tab + 1:]
2011 status, f = line[tab - 1], line[tab + 1:]
1947 if status == 'M':
2012 if status == 'M':
1948 modified.append(f)
2013 modified.append(f)
1949 elif status == 'A':
2014 elif status == 'A':
1950 added.append(f)
2015 added.append(f)
1951 elif status == 'D':
2016 elif status == 'D':
1952 removed.append(f)
2017 removed.append(f)
1953
2018
1954 deleted, unknown, ignored, clean = [], [], [], []
2019 deleted, unknown, ignored, clean = [], [], [], []
1955
2020
1956 command = ['status', '--porcelain', '-z']
2021 command = ['status', '--porcelain', '-z']
1957 if opts.get(r'unknown'):
2022 if opts.get(r'unknown'):
1958 command += ['--untracked-files=all']
2023 command += ['--untracked-files=all']
1959 if opts.get(r'ignored'):
2024 if opts.get(r'ignored'):
1960 command += ['--ignored']
2025 command += ['--ignored']
1961 out = self._gitcommand(command)
2026 out = self._gitcommand(command)
1962
2027
1963 changedfiles = set()
2028 changedfiles = set()
1964 changedfiles.update(modified)
2029 changedfiles.update(modified)
1965 changedfiles.update(added)
2030 changedfiles.update(added)
1966 changedfiles.update(removed)
2031 changedfiles.update(removed)
1967 for line in out.split('\0'):
2032 for line in out.split('\0'):
1968 if not line:
2033 if not line:
1969 continue
2034 continue
1970 st = line[0:2]
2035 st = line[0:2]
1971 #moves and copies show 2 files on one line
2036 #moves and copies show 2 files on one line
1972 if line.find('\0') >= 0:
2037 if line.find('\0') >= 0:
1973 filename1, filename2 = line[3:].split('\0')
2038 filename1, filename2 = line[3:].split('\0')
1974 else:
2039 else:
1975 filename1 = line[3:]
2040 filename1 = line[3:]
1976 filename2 = None
2041 filename2 = None
1977
2042
1978 changedfiles.add(filename1)
2043 changedfiles.add(filename1)
1979 if filename2:
2044 if filename2:
1980 changedfiles.add(filename2)
2045 changedfiles.add(filename2)
1981
2046
1982 if st == '??':
2047 if st == '??':
1983 unknown.append(filename1)
2048 unknown.append(filename1)
1984 elif st == '!!':
2049 elif st == '!!':
1985 ignored.append(filename1)
2050 ignored.append(filename1)
1986
2051
1987 if opts.get(r'clean'):
2052 if opts.get(r'clean'):
1988 out = self._gitcommand(['ls-files'])
2053 out = self._gitcommand(['ls-files'])
1989 for f in out.split('\n'):
2054 for f in out.split('\n'):
1990 if not f in changedfiles:
2055 if not f in changedfiles:
1991 clean.append(f)
2056 clean.append(f)
1992
2057
1993 return scmutil.status(modified, added, removed, deleted,
2058 return scmutil.status(modified, added, removed, deleted,
1994 unknown, ignored, clean)
2059 unknown, ignored, clean)
1995
2060
1996 @annotatesubrepoerror
2061 @annotatesubrepoerror
1997 def diff(self, ui, diffopts, node2, match, prefix, **opts):
2062 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1998 node1 = self._state[1]
2063 node1 = self._state[1]
1999 cmd = ['diff', '--no-renames']
2064 cmd = ['diff', '--no-renames']
2000 if opts[r'stat']:
2065 if opts[r'stat']:
2001 cmd.append('--stat')
2066 cmd.append('--stat')
2002 else:
2067 else:
2003 # for Git, this also implies '-p'
2068 # for Git, this also implies '-p'
2004 cmd.append('-U%d' % diffopts.context)
2069 cmd.append('-U%d' % diffopts.context)
2005
2070
2006 gitprefix = self.wvfs.reljoin(prefix, self._path)
2071 gitprefix = self.wvfs.reljoin(prefix, self._path)
2007
2072
2008 if diffopts.noprefix:
2073 if diffopts.noprefix:
2009 cmd.extend(['--src-prefix=%s/' % gitprefix,
2074 cmd.extend(['--src-prefix=%s/' % gitprefix,
2010 '--dst-prefix=%s/' % gitprefix])
2075 '--dst-prefix=%s/' % gitprefix])
2011 else:
2076 else:
2012 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
2077 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
2013 '--dst-prefix=b/%s/' % gitprefix])
2078 '--dst-prefix=b/%s/' % gitprefix])
2014
2079
2015 if diffopts.ignorews:
2080 if diffopts.ignorews:
2016 cmd.append('--ignore-all-space')
2081 cmd.append('--ignore-all-space')
2017 if diffopts.ignorewsamount:
2082 if diffopts.ignorewsamount:
2018 cmd.append('--ignore-space-change')
2083 cmd.append('--ignore-space-change')
2019 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
2084 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
2020 and diffopts.ignoreblanklines:
2085 and diffopts.ignoreblanklines:
2021 cmd.append('--ignore-blank-lines')
2086 cmd.append('--ignore-blank-lines')
2022
2087
2023 cmd.append(node1)
2088 cmd.append(node1)
2024 if node2:
2089 if node2:
2025 cmd.append(node2)
2090 cmd.append(node2)
2026
2091
2027 output = ""
2092 output = ""
2028 if match.always():
2093 if match.always():
2029 output += self._gitcommand(cmd) + '\n'
2094 output += self._gitcommand(cmd) + '\n'
2030 else:
2095 else:
2031 st = self.status(node2)[:3]
2096 st = self.status(node2)[:3]
2032 files = [f for sublist in st for f in sublist]
2097 files = [f for sublist in st for f in sublist]
2033 for f in files:
2098 for f in files:
2034 if match(f):
2099 if match(f):
2035 output += self._gitcommand(cmd + ['--', f]) + '\n'
2100 output += self._gitcommand(cmd + ['--', f]) + '\n'
2036
2101
2037 if output.strip():
2102 if output.strip():
2038 ui.write(output)
2103 ui.write(output)
2039
2104
2040 @annotatesubrepoerror
2105 @annotatesubrepoerror
2041 def revert(self, substate, *pats, **opts):
2106 def revert(self, substate, *pats, **opts):
2042 self.ui.status(_('reverting subrepo %s\n') % substate[0])
2107 self.ui.status(_('reverting subrepo %s\n') % substate[0])
2043 if not opts.get(r'no_backup'):
2108 if not opts.get(r'no_backup'):
2044 status = self.status(None)
2109 status = self.status(None)
2045 names = status.modified
2110 names = status.modified
2046 for name in names:
2111 for name in names:
2047 bakname = scmutil.origpath(self.ui, self._subparent, name)
2112 bakname = scmutil.origpath(self.ui, self._subparent, name)
2048 self.ui.note(_('saving current version of %s as %s\n') %
2113 self.ui.note(_('saving current version of %s as %s\n') %
2049 (name, bakname))
2114 (name, bakname))
2050 self.wvfs.rename(name, bakname)
2115 self.wvfs.rename(name, bakname)
2051
2116
2052 if not opts.get(r'dry_run'):
2117 if not opts.get(r'dry_run'):
2053 self.get(substate, overwrite=True)
2118 self.get(substate, overwrite=True)
2054 return []
2119 return []
2055
2120
2056 def shortid(self, revid):
2121 def shortid(self, revid):
2057 return revid[:7]
2122 return revid[:7]
2058
2123
2059 types = {
2124 types = {
2060 'hg': hgsubrepo,
2125 'hg': hgsubrepo,
2061 'svn': svnsubrepo,
2126 'svn': svnsubrepo,
2062 'git': gitsubrepo,
2127 'git': gitsubrepo,
2063 }
2128 }
General Comments 0
You need to be logged in to leave comments. Login now