##// END OF EJS Templates
localrepo: introduce shared method to check if a repository is shared...
Angel Ezquerra -
r23666:965788d9 default
parent child Browse files
Show More
@@ -1,125 +1,125 b''
1 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
1 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''share a common history between several working directories'''
6 '''share a common history between several working directories'''
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial import cmdutil, hg, util, extensions, bookmarks
9 from mercurial import cmdutil, hg, util, extensions, bookmarks
10 from mercurial.hg import repository, parseurl
10 from mercurial.hg import repository, parseurl
11 import errno
11 import errno
12
12
13 cmdtable = {}
13 cmdtable = {}
14 command = cmdutil.command(cmdtable)
14 command = cmdutil.command(cmdtable)
15 testedwith = 'internal'
15 testedwith = 'internal'
16
16
17 @command('share',
17 @command('share',
18 [('U', 'noupdate', None, _('do not create a working copy')),
18 [('U', 'noupdate', None, _('do not create a working copy')),
19 ('B', 'bookmarks', None, _('also share bookmarks'))],
19 ('B', 'bookmarks', None, _('also share bookmarks'))],
20 _('[-U] [-B] SOURCE [DEST]'),
20 _('[-U] [-B] SOURCE [DEST]'),
21 norepo=True)
21 norepo=True)
22 def share(ui, source, dest=None, noupdate=False, bookmarks=False):
22 def share(ui, source, dest=None, noupdate=False, bookmarks=False):
23 """create a new shared repository
23 """create a new shared repository
24
24
25 Initialize a new repository and working directory that shares its
25 Initialize a new repository and working directory that shares its
26 history (and optionally bookmarks) with another repository.
26 history (and optionally bookmarks) with another repository.
27
27
28 .. note::
28 .. note::
29
29
30 using rollback or extensions that destroy/modify history (mq,
30 using rollback or extensions that destroy/modify history (mq,
31 rebase, etc.) can cause considerable confusion with shared
31 rebase, etc.) can cause considerable confusion with shared
32 clones. In particular, if two shared clones are both updated to
32 clones. In particular, if two shared clones are both updated to
33 the same changeset, and one of them destroys that changeset
33 the same changeset, and one of them destroys that changeset
34 with rollback, the other clone will suddenly stop working: all
34 with rollback, the other clone will suddenly stop working: all
35 operations will fail with "abort: working directory has unknown
35 operations will fail with "abort: working directory has unknown
36 parent". The only known workaround is to use debugsetparents on
36 parent". The only known workaround is to use debugsetparents on
37 the broken clone to reset it to a changeset that still exists.
37 the broken clone to reset it to a changeset that still exists.
38 """
38 """
39
39
40 return hg.share(ui, source, dest, not noupdate, bookmarks)
40 return hg.share(ui, source, dest, not noupdate, bookmarks)
41
41
42 @command('unshare', [], '')
42 @command('unshare', [], '')
43 def unshare(ui, repo):
43 def unshare(ui, repo):
44 """convert a shared repository to a normal one
44 """convert a shared repository to a normal one
45
45
46 Copy the store data to the repo and remove the sharedpath data.
46 Copy the store data to the repo and remove the sharedpath data.
47 """
47 """
48
48
49 if repo.sharedpath == repo.path:
49 if not repo.shared():
50 raise util.Abort(_("this is not a shared repo"))
50 raise util.Abort(_("this is not a shared repo"))
51
51
52 destlock = lock = None
52 destlock = lock = None
53 lock = repo.lock()
53 lock = repo.lock()
54 try:
54 try:
55 # we use locks here because if we race with commit, we
55 # we use locks here because if we race with commit, we
56 # can end up with extra data in the cloned revlogs that's
56 # can end up with extra data in the cloned revlogs that's
57 # not pointed to by changesets, thus causing verify to
57 # not pointed to by changesets, thus causing verify to
58 # fail
58 # fail
59
59
60 destlock = hg.copystore(ui, repo, repo.path)
60 destlock = hg.copystore(ui, repo, repo.path)
61
61
62 sharefile = repo.join('sharedpath')
62 sharefile = repo.join('sharedpath')
63 util.rename(sharefile, sharefile + '.old')
63 util.rename(sharefile, sharefile + '.old')
64
64
65 repo.requirements.discard('sharedpath')
65 repo.requirements.discard('sharedpath')
66 repo._writerequirements()
66 repo._writerequirements()
67 finally:
67 finally:
68 destlock and destlock.release()
68 destlock and destlock.release()
69 lock and lock.release()
69 lock and lock.release()
70
70
71 # update store, spath, sopener and sjoin of repo
71 # update store, spath, sopener and sjoin of repo
72 repo.unfiltered().__init__(repo.baseui, repo.root)
72 repo.unfiltered().__init__(repo.baseui, repo.root)
73
73
74 def extsetup(ui):
74 def extsetup(ui):
75 extensions.wrapfunction(bookmarks.bmstore, 'getbkfile', getbkfile)
75 extensions.wrapfunction(bookmarks.bmstore, 'getbkfile', getbkfile)
76 extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange)
76 extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange)
77 extensions.wrapfunction(bookmarks.bmstore, 'write', write)
77 extensions.wrapfunction(bookmarks.bmstore, 'write', write)
78
78
79 def _hassharedbookmarks(repo):
79 def _hassharedbookmarks(repo):
80 """Returns whether this repo has shared bookmarks"""
80 """Returns whether this repo has shared bookmarks"""
81 try:
81 try:
82 repo.vfs.read('bookmarks.shared')
82 repo.vfs.read('bookmarks.shared')
83 return True
83 return True
84 except IOError, inst:
84 except IOError, inst:
85 if inst.errno != errno.ENOENT:
85 if inst.errno != errno.ENOENT:
86 raise
86 raise
87 return False
87 return False
88
88
89 def _getsrcrepo(repo):
89 def _getsrcrepo(repo):
90 """
90 """
91 Returns the source repository object for a given shared repository.
91 Returns the source repository object for a given shared repository.
92 If repo is not a shared repository, return None.
92 If repo is not a shared repository, return None.
93 """
93 """
94 if repo.sharedpath == repo.path:
94 if repo.sharedpath == repo.path:
95 return None
95 return None
96
96
97 # the sharedpath always ends in the .hg; we want the path to the repo
97 # the sharedpath always ends in the .hg; we want the path to the repo
98 source = repo.vfs.split(repo.sharedpath)[0]
98 source = repo.vfs.split(repo.sharedpath)[0]
99 srcurl, branches = parseurl(source)
99 srcurl, branches = parseurl(source)
100 return repository(repo.ui, srcurl)
100 return repository(repo.ui, srcurl)
101
101
102 def getbkfile(orig, self, repo):
102 def getbkfile(orig, self, repo):
103 if _hassharedbookmarks(repo):
103 if _hassharedbookmarks(repo):
104 srcrepo = _getsrcrepo(repo)
104 srcrepo = _getsrcrepo(repo)
105 if srcrepo is not None:
105 if srcrepo is not None:
106 repo = srcrepo
106 repo = srcrepo
107 return orig(self, repo)
107 return orig(self, repo)
108
108
109 def recordchange(orig, self, tr):
109 def recordchange(orig, self, tr):
110 # Continue with write to local bookmarks file as usual
110 # Continue with write to local bookmarks file as usual
111 orig(self, tr)
111 orig(self, tr)
112
112
113 if _hassharedbookmarks(self._repo):
113 if _hassharedbookmarks(self._repo):
114 srcrepo = _getsrcrepo(self._repo)
114 srcrepo = _getsrcrepo(self._repo)
115 if srcrepo is not None:
115 if srcrepo is not None:
116 category = 'share-bookmarks'
116 category = 'share-bookmarks'
117 tr.addpostclose(category, lambda tr: self._writerepo(srcrepo))
117 tr.addpostclose(category, lambda tr: self._writerepo(srcrepo))
118
118
119 def write(orig, self):
119 def write(orig, self):
120 # First write local bookmarks file in case we ever unshare
120 # First write local bookmarks file in case we ever unshare
121 orig(self)
121 orig(self)
122 if _hassharedbookmarks(self._repo):
122 if _hassharedbookmarks(self._repo):
123 srcrepo = _getsrcrepo(self._repo)
123 srcrepo = _getsrcrepo(self._repo)
124 if srcrepo is not None:
124 if srcrepo is not None:
125 self._writerepo(srcrepo)
125 self._writerepo(srcrepo)
@@ -1,1833 +1,1839 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import urllib
9 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock as lockmod
12 import lock as lockmod
13 import transaction, store, encoding, exchange, bundle2
13 import transaction, store, encoding, exchange, bundle2
14 import scmutil, util, extensions, hook, error, revset
14 import scmutil, util, extensions, hook, error, revset
15 import match as matchmod
15 import match as matchmod
16 import merge as mergemod
16 import merge as mergemod
17 import tags as tagsmod
17 import tags as tagsmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 import branchmap, pathutil
20 import branchmap, pathutil
21 import namespaces
21 import namespaces
22 propertycache = util.propertycache
22 propertycache = util.propertycache
23 filecache = scmutil.filecache
23 filecache = scmutil.filecache
24
24
25 class repofilecache(filecache):
25 class repofilecache(filecache):
26 """All filecache usage on repo are done for logic that should be unfiltered
26 """All filecache usage on repo are done for logic that should be unfiltered
27 """
27 """
28
28
29 def __get__(self, repo, type=None):
29 def __get__(self, repo, type=None):
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
31 def __set__(self, repo, value):
31 def __set__(self, repo, value):
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
33 def __delete__(self, repo):
33 def __delete__(self, repo):
34 return super(repofilecache, self).__delete__(repo.unfiltered())
34 return super(repofilecache, self).__delete__(repo.unfiltered())
35
35
36 class storecache(repofilecache):
36 class storecache(repofilecache):
37 """filecache for files in the store"""
37 """filecache for files in the store"""
38 def join(self, obj, fname):
38 def join(self, obj, fname):
39 return obj.sjoin(fname)
39 return obj.sjoin(fname)
40
40
41 class unfilteredpropertycache(propertycache):
41 class unfilteredpropertycache(propertycache):
42 """propertycache that apply to unfiltered repo only"""
42 """propertycache that apply to unfiltered repo only"""
43
43
44 def __get__(self, repo, type=None):
44 def __get__(self, repo, type=None):
45 unfi = repo.unfiltered()
45 unfi = repo.unfiltered()
46 if unfi is repo:
46 if unfi is repo:
47 return super(unfilteredpropertycache, self).__get__(unfi)
47 return super(unfilteredpropertycache, self).__get__(unfi)
48 return getattr(unfi, self.name)
48 return getattr(unfi, self.name)
49
49
50 class filteredpropertycache(propertycache):
50 class filteredpropertycache(propertycache):
51 """propertycache that must take filtering in account"""
51 """propertycache that must take filtering in account"""
52
52
53 def cachevalue(self, obj, value):
53 def cachevalue(self, obj, value):
54 object.__setattr__(obj, self.name, value)
54 object.__setattr__(obj, self.name, value)
55
55
56
56
57 def hasunfilteredcache(repo, name):
57 def hasunfilteredcache(repo, name):
58 """check if a repo has an unfilteredpropertycache value for <name>"""
58 """check if a repo has an unfilteredpropertycache value for <name>"""
59 return name in vars(repo.unfiltered())
59 return name in vars(repo.unfiltered())
60
60
61 def unfilteredmethod(orig):
61 def unfilteredmethod(orig):
62 """decorate method that always need to be run on unfiltered version"""
62 """decorate method that always need to be run on unfiltered version"""
63 def wrapper(repo, *args, **kwargs):
63 def wrapper(repo, *args, **kwargs):
64 return orig(repo.unfiltered(), *args, **kwargs)
64 return orig(repo.unfiltered(), *args, **kwargs)
65 return wrapper
65 return wrapper
66
66
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
68 'unbundle'))
68 'unbundle'))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
70
70
71 class localpeer(peer.peerrepository):
71 class localpeer(peer.peerrepository):
72 '''peer for a local repo; reflects only the most recent API'''
72 '''peer for a local repo; reflects only the most recent API'''
73
73
74 def __init__(self, repo, caps=moderncaps):
74 def __init__(self, repo, caps=moderncaps):
75 peer.peerrepository.__init__(self)
75 peer.peerrepository.__init__(self)
76 self._repo = repo.filtered('served')
76 self._repo = repo.filtered('served')
77 self.ui = repo.ui
77 self.ui = repo.ui
78 self._caps = repo._restrictcapabilities(caps)
78 self._caps = repo._restrictcapabilities(caps)
79 self.requirements = repo.requirements
79 self.requirements = repo.requirements
80 self.supportedformats = repo.supportedformats
80 self.supportedformats = repo.supportedformats
81
81
82 def close(self):
82 def close(self):
83 self._repo.close()
83 self._repo.close()
84
84
85 def _capabilities(self):
85 def _capabilities(self):
86 return self._caps
86 return self._caps
87
87
88 def local(self):
88 def local(self):
89 return self._repo
89 return self._repo
90
90
91 def canpush(self):
91 def canpush(self):
92 return True
92 return True
93
93
94 def url(self):
94 def url(self):
95 return self._repo.url()
95 return self._repo.url()
96
96
97 def lookup(self, key):
97 def lookup(self, key):
98 return self._repo.lookup(key)
98 return self._repo.lookup(key)
99
99
100 def branchmap(self):
100 def branchmap(self):
101 return self._repo.branchmap()
101 return self._repo.branchmap()
102
102
103 def heads(self):
103 def heads(self):
104 return self._repo.heads()
104 return self._repo.heads()
105
105
106 def known(self, nodes):
106 def known(self, nodes):
107 return self._repo.known(nodes)
107 return self._repo.known(nodes)
108
108
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
110 format='HG10', **kwargs):
110 format='HG10', **kwargs):
111 cg = exchange.getbundle(self._repo, source, heads=heads,
111 cg = exchange.getbundle(self._repo, source, heads=heads,
112 common=common, bundlecaps=bundlecaps, **kwargs)
112 common=common, bundlecaps=bundlecaps, **kwargs)
113 if bundlecaps is not None and 'HG2Y' in bundlecaps:
113 if bundlecaps is not None and 'HG2Y' in bundlecaps:
114 # When requesting a bundle2, getbundle returns a stream to make the
114 # When requesting a bundle2, getbundle returns a stream to make the
115 # wire level function happier. We need to build a proper object
115 # wire level function happier. We need to build a proper object
116 # from it in local peer.
116 # from it in local peer.
117 cg = bundle2.unbundle20(self.ui, cg)
117 cg = bundle2.unbundle20(self.ui, cg)
118 return cg
118 return cg
119
119
120 # TODO We might want to move the next two calls into legacypeer and add
120 # TODO We might want to move the next two calls into legacypeer and add
121 # unbundle instead.
121 # unbundle instead.
122
122
123 def unbundle(self, cg, heads, url):
123 def unbundle(self, cg, heads, url):
124 """apply a bundle on a repo
124 """apply a bundle on a repo
125
125
126 This function handles the repo locking itself."""
126 This function handles the repo locking itself."""
127 try:
127 try:
128 cg = exchange.readbundle(self.ui, cg, None)
128 cg = exchange.readbundle(self.ui, cg, None)
129 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
129 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
130 if util.safehasattr(ret, 'getchunks'):
130 if util.safehasattr(ret, 'getchunks'):
131 # This is a bundle20 object, turn it into an unbundler.
131 # This is a bundle20 object, turn it into an unbundler.
132 # This little dance should be dropped eventually when the API
132 # This little dance should be dropped eventually when the API
133 # is finally improved.
133 # is finally improved.
134 stream = util.chunkbuffer(ret.getchunks())
134 stream = util.chunkbuffer(ret.getchunks())
135 ret = bundle2.unbundle20(self.ui, stream)
135 ret = bundle2.unbundle20(self.ui, stream)
136 return ret
136 return ret
137 except error.PushRaced, exc:
137 except error.PushRaced, exc:
138 raise error.ResponseError(_('push failed:'), str(exc))
138 raise error.ResponseError(_('push failed:'), str(exc))
139
139
140 def lock(self):
140 def lock(self):
141 return self._repo.lock()
141 return self._repo.lock()
142
142
143 def addchangegroup(self, cg, source, url):
143 def addchangegroup(self, cg, source, url):
144 return changegroup.addchangegroup(self._repo, cg, source, url)
144 return changegroup.addchangegroup(self._repo, cg, source, url)
145
145
146 def pushkey(self, namespace, key, old, new):
146 def pushkey(self, namespace, key, old, new):
147 return self._repo.pushkey(namespace, key, old, new)
147 return self._repo.pushkey(namespace, key, old, new)
148
148
149 def listkeys(self, namespace):
149 def listkeys(self, namespace):
150 return self._repo.listkeys(namespace)
150 return self._repo.listkeys(namespace)
151
151
152 def debugwireargs(self, one, two, three=None, four=None, five=None):
152 def debugwireargs(self, one, two, three=None, four=None, five=None):
153 '''used to test argument passing over the wire'''
153 '''used to test argument passing over the wire'''
154 return "%s %s %s %s %s" % (one, two, three, four, five)
154 return "%s %s %s %s %s" % (one, two, three, four, five)
155
155
156 class locallegacypeer(localpeer):
156 class locallegacypeer(localpeer):
157 '''peer extension which implements legacy methods too; used for tests with
157 '''peer extension which implements legacy methods too; used for tests with
158 restricted capabilities'''
158 restricted capabilities'''
159
159
160 def __init__(self, repo):
160 def __init__(self, repo):
161 localpeer.__init__(self, repo, caps=legacycaps)
161 localpeer.__init__(self, repo, caps=legacycaps)
162
162
163 def branches(self, nodes):
163 def branches(self, nodes):
164 return self._repo.branches(nodes)
164 return self._repo.branches(nodes)
165
165
166 def between(self, pairs):
166 def between(self, pairs):
167 return self._repo.between(pairs)
167 return self._repo.between(pairs)
168
168
169 def changegroup(self, basenodes, source):
169 def changegroup(self, basenodes, source):
170 return changegroup.changegroup(self._repo, basenodes, source)
170 return changegroup.changegroup(self._repo, basenodes, source)
171
171
172 def changegroupsubset(self, bases, heads, source):
172 def changegroupsubset(self, bases, heads, source):
173 return changegroup.changegroupsubset(self._repo, bases, heads, source)
173 return changegroup.changegroupsubset(self._repo, bases, heads, source)
174
174
175 class localrepository(object):
175 class localrepository(object):
176
176
177 supportedformats = set(('revlogv1', 'generaldelta'))
177 supportedformats = set(('revlogv1', 'generaldelta'))
178 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
178 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
179 'dotencode'))
179 'dotencode'))
180 openerreqs = set(('revlogv1', 'generaldelta'))
180 openerreqs = set(('revlogv1', 'generaldelta'))
181 requirements = ['revlogv1']
181 requirements = ['revlogv1']
182 filtername = None
182 filtername = None
183
183
184 # a list of (ui, featureset) functions.
184 # a list of (ui, featureset) functions.
185 # only functions defined in module of enabled extensions are invoked
185 # only functions defined in module of enabled extensions are invoked
186 featuresetupfuncs = set()
186 featuresetupfuncs = set()
187
187
188 def _baserequirements(self, create):
188 def _baserequirements(self, create):
189 return self.requirements[:]
189 return self.requirements[:]
190
190
191 def __init__(self, baseui, path=None, create=False):
191 def __init__(self, baseui, path=None, create=False):
192 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
192 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
193 self.wopener = self.wvfs
193 self.wopener = self.wvfs
194 self.root = self.wvfs.base
194 self.root = self.wvfs.base
195 self.path = self.wvfs.join(".hg")
195 self.path = self.wvfs.join(".hg")
196 self.origroot = path
196 self.origroot = path
197 self.auditor = pathutil.pathauditor(self.root, self._checknested)
197 self.auditor = pathutil.pathauditor(self.root, self._checknested)
198 self.vfs = scmutil.vfs(self.path)
198 self.vfs = scmutil.vfs(self.path)
199 self.opener = self.vfs
199 self.opener = self.vfs
200 self.baseui = baseui
200 self.baseui = baseui
201 self.ui = baseui.copy()
201 self.ui = baseui.copy()
202 self.ui.copy = baseui.copy # prevent copying repo configuration
202 self.ui.copy = baseui.copy # prevent copying repo configuration
203 # A list of callback to shape the phase if no data were found.
203 # A list of callback to shape the phase if no data were found.
204 # Callback are in the form: func(repo, roots) --> processed root.
204 # Callback are in the form: func(repo, roots) --> processed root.
205 # This list it to be filled by extension during repo setup
205 # This list it to be filled by extension during repo setup
206 self._phasedefaults = []
206 self._phasedefaults = []
207 try:
207 try:
208 self.ui.readconfig(self.join("hgrc"), self.root)
208 self.ui.readconfig(self.join("hgrc"), self.root)
209 extensions.loadall(self.ui)
209 extensions.loadall(self.ui)
210 except IOError:
210 except IOError:
211 pass
211 pass
212
212
213 if self.featuresetupfuncs:
213 if self.featuresetupfuncs:
214 self.supported = set(self._basesupported) # use private copy
214 self.supported = set(self._basesupported) # use private copy
215 extmods = set(m.__name__ for n, m
215 extmods = set(m.__name__ for n, m
216 in extensions.extensions(self.ui))
216 in extensions.extensions(self.ui))
217 for setupfunc in self.featuresetupfuncs:
217 for setupfunc in self.featuresetupfuncs:
218 if setupfunc.__module__ in extmods:
218 if setupfunc.__module__ in extmods:
219 setupfunc(self.ui, self.supported)
219 setupfunc(self.ui, self.supported)
220 else:
220 else:
221 self.supported = self._basesupported
221 self.supported = self._basesupported
222
222
223 if not self.vfs.isdir():
223 if not self.vfs.isdir():
224 if create:
224 if create:
225 if not self.wvfs.exists():
225 if not self.wvfs.exists():
226 self.wvfs.makedirs()
226 self.wvfs.makedirs()
227 self.vfs.makedir(notindexed=True)
227 self.vfs.makedir(notindexed=True)
228 requirements = self._baserequirements(create)
228 requirements = self._baserequirements(create)
229 if self.ui.configbool('format', 'usestore', True):
229 if self.ui.configbool('format', 'usestore', True):
230 self.vfs.mkdir("store")
230 self.vfs.mkdir("store")
231 requirements.append("store")
231 requirements.append("store")
232 if self.ui.configbool('format', 'usefncache', True):
232 if self.ui.configbool('format', 'usefncache', True):
233 requirements.append("fncache")
233 requirements.append("fncache")
234 if self.ui.configbool('format', 'dotencode', True):
234 if self.ui.configbool('format', 'dotencode', True):
235 requirements.append('dotencode')
235 requirements.append('dotencode')
236 # create an invalid changelog
236 # create an invalid changelog
237 self.vfs.append(
237 self.vfs.append(
238 "00changelog.i",
238 "00changelog.i",
239 '\0\0\0\2' # represents revlogv2
239 '\0\0\0\2' # represents revlogv2
240 ' dummy changelog to prevent using the old repo layout'
240 ' dummy changelog to prevent using the old repo layout'
241 )
241 )
242 if self.ui.configbool('format', 'generaldelta', False):
242 if self.ui.configbool('format', 'generaldelta', False):
243 requirements.append("generaldelta")
243 requirements.append("generaldelta")
244 requirements = set(requirements)
244 requirements = set(requirements)
245 else:
245 else:
246 raise error.RepoError(_("repository %s not found") % path)
246 raise error.RepoError(_("repository %s not found") % path)
247 elif create:
247 elif create:
248 raise error.RepoError(_("repository %s already exists") % path)
248 raise error.RepoError(_("repository %s already exists") % path)
249 else:
249 else:
250 try:
250 try:
251 requirements = scmutil.readrequires(self.vfs, self.supported)
251 requirements = scmutil.readrequires(self.vfs, self.supported)
252 except IOError, inst:
252 except IOError, inst:
253 if inst.errno != errno.ENOENT:
253 if inst.errno != errno.ENOENT:
254 raise
254 raise
255 requirements = set()
255 requirements = set()
256
256
257 self.sharedpath = self.path
257 self.sharedpath = self.path
258 try:
258 try:
259 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
259 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
260 realpath=True)
260 realpath=True)
261 s = vfs.base
261 s = vfs.base
262 if not vfs.exists():
262 if not vfs.exists():
263 raise error.RepoError(
263 raise error.RepoError(
264 _('.hg/sharedpath points to nonexistent directory %s') % s)
264 _('.hg/sharedpath points to nonexistent directory %s') % s)
265 self.sharedpath = s
265 self.sharedpath = s
266 except IOError, inst:
266 except IOError, inst:
267 if inst.errno != errno.ENOENT:
267 if inst.errno != errno.ENOENT:
268 raise
268 raise
269
269
270 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
270 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
271 self.spath = self.store.path
271 self.spath = self.store.path
272 self.svfs = self.store.vfs
272 self.svfs = self.store.vfs
273 self.sopener = self.svfs
273 self.sopener = self.svfs
274 self.sjoin = self.store.join
274 self.sjoin = self.store.join
275 self.vfs.createmode = self.store.createmode
275 self.vfs.createmode = self.store.createmode
276 self._applyrequirements(requirements)
276 self._applyrequirements(requirements)
277 if create:
277 if create:
278 self._writerequirements()
278 self._writerequirements()
279
279
280
280
281 self._branchcaches = {}
281 self._branchcaches = {}
282 self.filterpats = {}
282 self.filterpats = {}
283 self._datafilters = {}
283 self._datafilters = {}
284 self._transref = self._lockref = self._wlockref = None
284 self._transref = self._lockref = self._wlockref = None
285
285
286 # A cache for various files under .hg/ that tracks file changes,
286 # A cache for various files under .hg/ that tracks file changes,
287 # (used by the filecache decorator)
287 # (used by the filecache decorator)
288 #
288 #
289 # Maps a property name to its util.filecacheentry
289 # Maps a property name to its util.filecacheentry
290 self._filecache = {}
290 self._filecache = {}
291
291
292 # hold sets of revision to be filtered
292 # hold sets of revision to be filtered
293 # should be cleared when something might have changed the filter value:
293 # should be cleared when something might have changed the filter value:
294 # - new changesets,
294 # - new changesets,
295 # - phase change,
295 # - phase change,
296 # - new obsolescence marker,
296 # - new obsolescence marker,
297 # - working directory parent change,
297 # - working directory parent change,
298 # - bookmark changes
298 # - bookmark changes
299 self.filteredrevcache = {}
299 self.filteredrevcache = {}
300
300
301 # generic mapping between names and nodes
301 # generic mapping between names and nodes
302 self.names = namespaces.namespaces()
302 self.names = namespaces.namespaces()
303
303
304 def close(self):
304 def close(self):
305 pass
305 pass
306
306
307 def _restrictcapabilities(self, caps):
307 def _restrictcapabilities(self, caps):
308 # bundle2 is not ready for prime time, drop it unless explicitly
308 # bundle2 is not ready for prime time, drop it unless explicitly
309 # required by the tests (or some brave tester)
309 # required by the tests (or some brave tester)
310 if self.ui.configbool('experimental', 'bundle2-exp', False):
310 if self.ui.configbool('experimental', 'bundle2-exp', False):
311 caps = set(caps)
311 caps = set(caps)
312 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
312 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
313 caps.add('bundle2-exp=' + urllib.quote(capsblob))
313 caps.add('bundle2-exp=' + urllib.quote(capsblob))
314 return caps
314 return caps
315
315
316 def _applyrequirements(self, requirements):
316 def _applyrequirements(self, requirements):
317 self.requirements = requirements
317 self.requirements = requirements
318 self.sopener.options = dict((r, 1) for r in requirements
318 self.sopener.options = dict((r, 1) for r in requirements
319 if r in self.openerreqs)
319 if r in self.openerreqs)
320 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
320 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
321 if chunkcachesize is not None:
321 if chunkcachesize is not None:
322 self.sopener.options['chunkcachesize'] = chunkcachesize
322 self.sopener.options['chunkcachesize'] = chunkcachesize
323 maxchainlen = self.ui.configint('format', 'maxchainlen')
323 maxchainlen = self.ui.configint('format', 'maxchainlen')
324 if maxchainlen is not None:
324 if maxchainlen is not None:
325 self.sopener.options['maxchainlen'] = maxchainlen
325 self.sopener.options['maxchainlen'] = maxchainlen
326
326
327 def _writerequirements(self):
327 def _writerequirements(self):
328 reqfile = self.opener("requires", "w")
328 reqfile = self.opener("requires", "w")
329 for r in sorted(self.requirements):
329 for r in sorted(self.requirements):
330 reqfile.write("%s\n" % r)
330 reqfile.write("%s\n" % r)
331 reqfile.close()
331 reqfile.close()
332
332
333 def _checknested(self, path):
333 def _checknested(self, path):
334 """Determine if path is a legal nested repository."""
334 """Determine if path is a legal nested repository."""
335 if not path.startswith(self.root):
335 if not path.startswith(self.root):
336 return False
336 return False
337 subpath = path[len(self.root) + 1:]
337 subpath = path[len(self.root) + 1:]
338 normsubpath = util.pconvert(subpath)
338 normsubpath = util.pconvert(subpath)
339
339
340 # XXX: Checking against the current working copy is wrong in
340 # XXX: Checking against the current working copy is wrong in
341 # the sense that it can reject things like
341 # the sense that it can reject things like
342 #
342 #
343 # $ hg cat -r 10 sub/x.txt
343 # $ hg cat -r 10 sub/x.txt
344 #
344 #
345 # if sub/ is no longer a subrepository in the working copy
345 # if sub/ is no longer a subrepository in the working copy
346 # parent revision.
346 # parent revision.
347 #
347 #
348 # However, it can of course also allow things that would have
348 # However, it can of course also allow things that would have
349 # been rejected before, such as the above cat command if sub/
349 # been rejected before, such as the above cat command if sub/
350 # is a subrepository now, but was a normal directory before.
350 # is a subrepository now, but was a normal directory before.
351 # The old path auditor would have rejected by mistake since it
351 # The old path auditor would have rejected by mistake since it
352 # panics when it sees sub/.hg/.
352 # panics when it sees sub/.hg/.
353 #
353 #
354 # All in all, checking against the working copy seems sensible
354 # All in all, checking against the working copy seems sensible
355 # since we want to prevent access to nested repositories on
355 # since we want to prevent access to nested repositories on
356 # the filesystem *now*.
356 # the filesystem *now*.
357 ctx = self[None]
357 ctx = self[None]
358 parts = util.splitpath(subpath)
358 parts = util.splitpath(subpath)
359 while parts:
359 while parts:
360 prefix = '/'.join(parts)
360 prefix = '/'.join(parts)
361 if prefix in ctx.substate:
361 if prefix in ctx.substate:
362 if prefix == normsubpath:
362 if prefix == normsubpath:
363 return True
363 return True
364 else:
364 else:
365 sub = ctx.sub(prefix)
365 sub = ctx.sub(prefix)
366 return sub.checknested(subpath[len(prefix) + 1:])
366 return sub.checknested(subpath[len(prefix) + 1:])
367 else:
367 else:
368 parts.pop()
368 parts.pop()
369 return False
369 return False
370
370
371 def peer(self):
371 def peer(self):
372 return localpeer(self) # not cached to avoid reference cycle
372 return localpeer(self) # not cached to avoid reference cycle
373
373
374 def unfiltered(self):
374 def unfiltered(self):
375 """Return unfiltered version of the repository
375 """Return unfiltered version of the repository
376
376
377 Intended to be overwritten by filtered repo."""
377 Intended to be overwritten by filtered repo."""
378 return self
378 return self
379
379
380 def filtered(self, name):
380 def filtered(self, name):
381 """Return a filtered version of a repository"""
381 """Return a filtered version of a repository"""
382 # build a new class with the mixin and the current class
382 # build a new class with the mixin and the current class
383 # (possibly subclass of the repo)
383 # (possibly subclass of the repo)
384 class proxycls(repoview.repoview, self.unfiltered().__class__):
384 class proxycls(repoview.repoview, self.unfiltered().__class__):
385 pass
385 pass
386 return proxycls(self, name)
386 return proxycls(self, name)
387
387
388 @repofilecache('bookmarks')
388 @repofilecache('bookmarks')
389 def _bookmarks(self):
389 def _bookmarks(self):
390 return bookmarks.bmstore(self)
390 return bookmarks.bmstore(self)
391
391
392 @repofilecache('bookmarks.current')
392 @repofilecache('bookmarks.current')
393 def _bookmarkcurrent(self):
393 def _bookmarkcurrent(self):
394 return bookmarks.readcurrent(self)
394 return bookmarks.readcurrent(self)
395
395
396 def bookmarkheads(self, bookmark):
396 def bookmarkheads(self, bookmark):
397 name = bookmark.split('@', 1)[0]
397 name = bookmark.split('@', 1)[0]
398 heads = []
398 heads = []
399 for mark, n in self._bookmarks.iteritems():
399 for mark, n in self._bookmarks.iteritems():
400 if mark.split('@', 1)[0] == name:
400 if mark.split('@', 1)[0] == name:
401 heads.append(n)
401 heads.append(n)
402 return heads
402 return heads
403
403
404 @storecache('phaseroots')
404 @storecache('phaseroots')
405 def _phasecache(self):
405 def _phasecache(self):
406 return phases.phasecache(self, self._phasedefaults)
406 return phases.phasecache(self, self._phasedefaults)
407
407
408 @storecache('obsstore')
408 @storecache('obsstore')
409 def obsstore(self):
409 def obsstore(self):
410 # read default format for new obsstore.
410 # read default format for new obsstore.
411 defaultformat = self.ui.configint('format', 'obsstore-version', None)
411 defaultformat = self.ui.configint('format', 'obsstore-version', None)
412 # rely on obsstore class default when possible.
412 # rely on obsstore class default when possible.
413 kwargs = {}
413 kwargs = {}
414 if defaultformat is not None:
414 if defaultformat is not None:
415 kwargs['defaultformat'] = defaultformat
415 kwargs['defaultformat'] = defaultformat
416 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
416 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
417 store = obsolete.obsstore(self.sopener, readonly=readonly,
417 store = obsolete.obsstore(self.sopener, readonly=readonly,
418 **kwargs)
418 **kwargs)
419 if store and readonly:
419 if store and readonly:
420 # message is rare enough to not be translated
420 # message is rare enough to not be translated
421 msg = 'obsolete feature not enabled but %i markers found!\n'
421 msg = 'obsolete feature not enabled but %i markers found!\n'
422 self.ui.warn(msg % len(list(store)))
422 self.ui.warn(msg % len(list(store)))
423 return store
423 return store
424
424
425 @storecache('00changelog.i')
425 @storecache('00changelog.i')
426 def changelog(self):
426 def changelog(self):
427 c = changelog.changelog(self.sopener)
427 c = changelog.changelog(self.sopener)
428 if 'HG_PENDING' in os.environ:
428 if 'HG_PENDING' in os.environ:
429 p = os.environ['HG_PENDING']
429 p = os.environ['HG_PENDING']
430 if p.startswith(self.root):
430 if p.startswith(self.root):
431 c.readpending('00changelog.i.a')
431 c.readpending('00changelog.i.a')
432 return c
432 return c
433
433
434 @storecache('00manifest.i')
434 @storecache('00manifest.i')
435 def manifest(self):
435 def manifest(self):
436 return manifest.manifest(self.sopener)
436 return manifest.manifest(self.sopener)
437
437
438 @repofilecache('dirstate')
438 @repofilecache('dirstate')
439 def dirstate(self):
439 def dirstate(self):
440 warned = [0]
440 warned = [0]
441 def validate(node):
441 def validate(node):
442 try:
442 try:
443 self.changelog.rev(node)
443 self.changelog.rev(node)
444 return node
444 return node
445 except error.LookupError:
445 except error.LookupError:
446 if not warned[0]:
446 if not warned[0]:
447 warned[0] = True
447 warned[0] = True
448 self.ui.warn(_("warning: ignoring unknown"
448 self.ui.warn(_("warning: ignoring unknown"
449 " working parent %s!\n") % short(node))
449 " working parent %s!\n") % short(node))
450 return nullid
450 return nullid
451
451
452 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
452 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
453
453
454 def __getitem__(self, changeid):
454 def __getitem__(self, changeid):
455 if changeid is None:
455 if changeid is None:
456 return context.workingctx(self)
456 return context.workingctx(self)
457 if isinstance(changeid, slice):
457 if isinstance(changeid, slice):
458 return [context.changectx(self, i)
458 return [context.changectx(self, i)
459 for i in xrange(*changeid.indices(len(self)))
459 for i in xrange(*changeid.indices(len(self)))
460 if i not in self.changelog.filteredrevs]
460 if i not in self.changelog.filteredrevs]
461 return context.changectx(self, changeid)
461 return context.changectx(self, changeid)
462
462
463 def __contains__(self, changeid):
463 def __contains__(self, changeid):
464 try:
464 try:
465 return bool(self.lookup(changeid))
465 return bool(self.lookup(changeid))
466 except error.RepoLookupError:
466 except error.RepoLookupError:
467 return False
467 return False
468
468
469 def __nonzero__(self):
469 def __nonzero__(self):
470 return True
470 return True
471
471
472 def __len__(self):
472 def __len__(self):
473 return len(self.changelog)
473 return len(self.changelog)
474
474
475 def __iter__(self):
475 def __iter__(self):
476 return iter(self.changelog)
476 return iter(self.changelog)
477
477
478 def revs(self, expr, *args):
478 def revs(self, expr, *args):
479 '''Return a list of revisions matching the given revset'''
479 '''Return a list of revisions matching the given revset'''
480 expr = revset.formatspec(expr, *args)
480 expr = revset.formatspec(expr, *args)
481 m = revset.match(None, expr)
481 m = revset.match(None, expr)
482 return m(self, revset.spanset(self))
482 return m(self, revset.spanset(self))
483
483
484 def set(self, expr, *args):
484 def set(self, expr, *args):
485 '''
485 '''
486 Yield a context for each matching revision, after doing arg
486 Yield a context for each matching revision, after doing arg
487 replacement via revset.formatspec
487 replacement via revset.formatspec
488 '''
488 '''
489 for r in self.revs(expr, *args):
489 for r in self.revs(expr, *args):
490 yield self[r]
490 yield self[r]
491
491
492 def url(self):
492 def url(self):
493 return 'file:' + self.root
493 return 'file:' + self.root
494
494
495 def hook(self, name, throw=False, **args):
495 def hook(self, name, throw=False, **args):
496 """Call a hook, passing this repo instance.
496 """Call a hook, passing this repo instance.
497
497
498 This a convenience method to aid invoking hooks. Extensions likely
498 This a convenience method to aid invoking hooks. Extensions likely
499 won't call this unless they have registered a custom hook or are
499 won't call this unless they have registered a custom hook or are
500 replacing code that is expected to call a hook.
500 replacing code that is expected to call a hook.
501 """
501 """
502 return hook.hook(self.ui, self, name, throw, **args)
502 return hook.hook(self.ui, self, name, throw, **args)
503
503
504 @unfilteredmethod
504 @unfilteredmethod
505 def _tag(self, names, node, message, local, user, date, extra={},
505 def _tag(self, names, node, message, local, user, date, extra={},
506 editor=False):
506 editor=False):
507 if isinstance(names, str):
507 if isinstance(names, str):
508 names = (names,)
508 names = (names,)
509
509
510 branches = self.branchmap()
510 branches = self.branchmap()
511 for name in names:
511 for name in names:
512 self.hook('pretag', throw=True, node=hex(node), tag=name,
512 self.hook('pretag', throw=True, node=hex(node), tag=name,
513 local=local)
513 local=local)
514 if name in branches:
514 if name in branches:
515 self.ui.warn(_("warning: tag %s conflicts with existing"
515 self.ui.warn(_("warning: tag %s conflicts with existing"
516 " branch name\n") % name)
516 " branch name\n") % name)
517
517
518 def writetags(fp, names, munge, prevtags):
518 def writetags(fp, names, munge, prevtags):
519 fp.seek(0, 2)
519 fp.seek(0, 2)
520 if prevtags and prevtags[-1] != '\n':
520 if prevtags and prevtags[-1] != '\n':
521 fp.write('\n')
521 fp.write('\n')
522 for name in names:
522 for name in names:
523 m = munge and munge(name) or name
523 m = munge and munge(name) or name
524 if (self._tagscache.tagtypes and
524 if (self._tagscache.tagtypes and
525 name in self._tagscache.tagtypes):
525 name in self._tagscache.tagtypes):
526 old = self.tags().get(name, nullid)
526 old = self.tags().get(name, nullid)
527 fp.write('%s %s\n' % (hex(old), m))
527 fp.write('%s %s\n' % (hex(old), m))
528 fp.write('%s %s\n' % (hex(node), m))
528 fp.write('%s %s\n' % (hex(node), m))
529 fp.close()
529 fp.close()
530
530
531 prevtags = ''
531 prevtags = ''
532 if local:
532 if local:
533 try:
533 try:
534 fp = self.opener('localtags', 'r+')
534 fp = self.opener('localtags', 'r+')
535 except IOError:
535 except IOError:
536 fp = self.opener('localtags', 'a')
536 fp = self.opener('localtags', 'a')
537 else:
537 else:
538 prevtags = fp.read()
538 prevtags = fp.read()
539
539
540 # local tags are stored in the current charset
540 # local tags are stored in the current charset
541 writetags(fp, names, None, prevtags)
541 writetags(fp, names, None, prevtags)
542 for name in names:
542 for name in names:
543 self.hook('tag', node=hex(node), tag=name, local=local)
543 self.hook('tag', node=hex(node), tag=name, local=local)
544 return
544 return
545
545
546 try:
546 try:
547 fp = self.wfile('.hgtags', 'rb+')
547 fp = self.wfile('.hgtags', 'rb+')
548 except IOError, e:
548 except IOError, e:
549 if e.errno != errno.ENOENT:
549 if e.errno != errno.ENOENT:
550 raise
550 raise
551 fp = self.wfile('.hgtags', 'ab')
551 fp = self.wfile('.hgtags', 'ab')
552 else:
552 else:
553 prevtags = fp.read()
553 prevtags = fp.read()
554
554
555 # committed tags are stored in UTF-8
555 # committed tags are stored in UTF-8
556 writetags(fp, names, encoding.fromlocal, prevtags)
556 writetags(fp, names, encoding.fromlocal, prevtags)
557
557
558 fp.close()
558 fp.close()
559
559
560 self.invalidatecaches()
560 self.invalidatecaches()
561
561
562 if '.hgtags' not in self.dirstate:
562 if '.hgtags' not in self.dirstate:
563 self[None].add(['.hgtags'])
563 self[None].add(['.hgtags'])
564
564
565 m = matchmod.exact(self.root, '', ['.hgtags'])
565 m = matchmod.exact(self.root, '', ['.hgtags'])
566 tagnode = self.commit(message, user, date, extra=extra, match=m,
566 tagnode = self.commit(message, user, date, extra=extra, match=m,
567 editor=editor)
567 editor=editor)
568
568
569 for name in names:
569 for name in names:
570 self.hook('tag', node=hex(node), tag=name, local=local)
570 self.hook('tag', node=hex(node), tag=name, local=local)
571
571
572 return tagnode
572 return tagnode
573
573
574 def tag(self, names, node, message, local, user, date, editor=False):
574 def tag(self, names, node, message, local, user, date, editor=False):
575 '''tag a revision with one or more symbolic names.
575 '''tag a revision with one or more symbolic names.
576
576
577 names is a list of strings or, when adding a single tag, names may be a
577 names is a list of strings or, when adding a single tag, names may be a
578 string.
578 string.
579
579
580 if local is True, the tags are stored in a per-repository file.
580 if local is True, the tags are stored in a per-repository file.
581 otherwise, they are stored in the .hgtags file, and a new
581 otherwise, they are stored in the .hgtags file, and a new
582 changeset is committed with the change.
582 changeset is committed with the change.
583
583
584 keyword arguments:
584 keyword arguments:
585
585
586 local: whether to store tags in non-version-controlled file
586 local: whether to store tags in non-version-controlled file
587 (default False)
587 (default False)
588
588
589 message: commit message to use if committing
589 message: commit message to use if committing
590
590
591 user: name of user to use if committing
591 user: name of user to use if committing
592
592
593 date: date tuple to use if committing'''
593 date: date tuple to use if committing'''
594
594
595 if not local:
595 if not local:
596 m = matchmod.exact(self.root, '', ['.hgtags'])
596 m = matchmod.exact(self.root, '', ['.hgtags'])
597 if util.any(self.status(match=m, unknown=True, ignored=True)):
597 if util.any(self.status(match=m, unknown=True, ignored=True)):
598 raise util.Abort(_('working copy of .hgtags is changed'),
598 raise util.Abort(_('working copy of .hgtags is changed'),
599 hint=_('please commit .hgtags manually'))
599 hint=_('please commit .hgtags manually'))
600
600
601 self.tags() # instantiate the cache
601 self.tags() # instantiate the cache
602 self._tag(names, node, message, local, user, date, editor=editor)
602 self._tag(names, node, message, local, user, date, editor=editor)
603
603
604 @filteredpropertycache
604 @filteredpropertycache
605 def _tagscache(self):
605 def _tagscache(self):
606 '''Returns a tagscache object that contains various tags related
606 '''Returns a tagscache object that contains various tags related
607 caches.'''
607 caches.'''
608
608
609 # This simplifies its cache management by having one decorated
609 # This simplifies its cache management by having one decorated
610 # function (this one) and the rest simply fetch things from it.
610 # function (this one) and the rest simply fetch things from it.
611 class tagscache(object):
611 class tagscache(object):
612 def __init__(self):
612 def __init__(self):
613 # These two define the set of tags for this repository. tags
613 # These two define the set of tags for this repository. tags
614 # maps tag name to node; tagtypes maps tag name to 'global' or
614 # maps tag name to node; tagtypes maps tag name to 'global' or
615 # 'local'. (Global tags are defined by .hgtags across all
615 # 'local'. (Global tags are defined by .hgtags across all
616 # heads, and local tags are defined in .hg/localtags.)
616 # heads, and local tags are defined in .hg/localtags.)
617 # They constitute the in-memory cache of tags.
617 # They constitute the in-memory cache of tags.
618 self.tags = self.tagtypes = None
618 self.tags = self.tagtypes = None
619
619
620 self.nodetagscache = self.tagslist = None
620 self.nodetagscache = self.tagslist = None
621
621
622 cache = tagscache()
622 cache = tagscache()
623 cache.tags, cache.tagtypes = self._findtags()
623 cache.tags, cache.tagtypes = self._findtags()
624
624
625 return cache
625 return cache
626
626
627 def tags(self):
627 def tags(self):
628 '''return a mapping of tag to node'''
628 '''return a mapping of tag to node'''
629 t = {}
629 t = {}
630 if self.changelog.filteredrevs:
630 if self.changelog.filteredrevs:
631 tags, tt = self._findtags()
631 tags, tt = self._findtags()
632 else:
632 else:
633 tags = self._tagscache.tags
633 tags = self._tagscache.tags
634 for k, v in tags.iteritems():
634 for k, v in tags.iteritems():
635 try:
635 try:
636 # ignore tags to unknown nodes
636 # ignore tags to unknown nodes
637 self.changelog.rev(v)
637 self.changelog.rev(v)
638 t[k] = v
638 t[k] = v
639 except (error.LookupError, ValueError):
639 except (error.LookupError, ValueError):
640 pass
640 pass
641 return t
641 return t
642
642
643 def _findtags(self):
643 def _findtags(self):
644 '''Do the hard work of finding tags. Return a pair of dicts
644 '''Do the hard work of finding tags. Return a pair of dicts
645 (tags, tagtypes) where tags maps tag name to node, and tagtypes
645 (tags, tagtypes) where tags maps tag name to node, and tagtypes
646 maps tag name to a string like \'global\' or \'local\'.
646 maps tag name to a string like \'global\' or \'local\'.
647 Subclasses or extensions are free to add their own tags, but
647 Subclasses or extensions are free to add their own tags, but
648 should be aware that the returned dicts will be retained for the
648 should be aware that the returned dicts will be retained for the
649 duration of the localrepo object.'''
649 duration of the localrepo object.'''
650
650
651 # XXX what tagtype should subclasses/extensions use? Currently
651 # XXX what tagtype should subclasses/extensions use? Currently
652 # mq and bookmarks add tags, but do not set the tagtype at all.
652 # mq and bookmarks add tags, but do not set the tagtype at all.
653 # Should each extension invent its own tag type? Should there
653 # Should each extension invent its own tag type? Should there
654 # be one tagtype for all such "virtual" tags? Or is the status
654 # be one tagtype for all such "virtual" tags? Or is the status
655 # quo fine?
655 # quo fine?
656
656
657 alltags = {} # map tag name to (node, hist)
657 alltags = {} # map tag name to (node, hist)
658 tagtypes = {}
658 tagtypes = {}
659
659
660 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
660 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
661 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
661 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
662
662
663 # Build the return dicts. Have to re-encode tag names because
663 # Build the return dicts. Have to re-encode tag names because
664 # the tags module always uses UTF-8 (in order not to lose info
664 # the tags module always uses UTF-8 (in order not to lose info
665 # writing to the cache), but the rest of Mercurial wants them in
665 # writing to the cache), but the rest of Mercurial wants them in
666 # local encoding.
666 # local encoding.
667 tags = {}
667 tags = {}
668 for (name, (node, hist)) in alltags.iteritems():
668 for (name, (node, hist)) in alltags.iteritems():
669 if node != nullid:
669 if node != nullid:
670 tags[encoding.tolocal(name)] = node
670 tags[encoding.tolocal(name)] = node
671 tags['tip'] = self.changelog.tip()
671 tags['tip'] = self.changelog.tip()
672 tagtypes = dict([(encoding.tolocal(name), value)
672 tagtypes = dict([(encoding.tolocal(name), value)
673 for (name, value) in tagtypes.iteritems()])
673 for (name, value) in tagtypes.iteritems()])
674 return (tags, tagtypes)
674 return (tags, tagtypes)
675
675
676 def tagtype(self, tagname):
676 def tagtype(self, tagname):
677 '''
677 '''
678 return the type of the given tag. result can be:
678 return the type of the given tag. result can be:
679
679
680 'local' : a local tag
680 'local' : a local tag
681 'global' : a global tag
681 'global' : a global tag
682 None : tag does not exist
682 None : tag does not exist
683 '''
683 '''
684
684
685 return self._tagscache.tagtypes.get(tagname)
685 return self._tagscache.tagtypes.get(tagname)
686
686
687 def tagslist(self):
687 def tagslist(self):
688 '''return a list of tags ordered by revision'''
688 '''return a list of tags ordered by revision'''
689 if not self._tagscache.tagslist:
689 if not self._tagscache.tagslist:
690 l = []
690 l = []
691 for t, n in self.tags().iteritems():
691 for t, n in self.tags().iteritems():
692 l.append((self.changelog.rev(n), t, n))
692 l.append((self.changelog.rev(n), t, n))
693 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
693 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
694
694
695 return self._tagscache.tagslist
695 return self._tagscache.tagslist
696
696
697 def nodetags(self, node):
697 def nodetags(self, node):
698 '''return the tags associated with a node'''
698 '''return the tags associated with a node'''
699 if not self._tagscache.nodetagscache:
699 if not self._tagscache.nodetagscache:
700 nodetagscache = {}
700 nodetagscache = {}
701 for t, n in self._tagscache.tags.iteritems():
701 for t, n in self._tagscache.tags.iteritems():
702 nodetagscache.setdefault(n, []).append(t)
702 nodetagscache.setdefault(n, []).append(t)
703 for tags in nodetagscache.itervalues():
703 for tags in nodetagscache.itervalues():
704 tags.sort()
704 tags.sort()
705 self._tagscache.nodetagscache = nodetagscache
705 self._tagscache.nodetagscache = nodetagscache
706 return self._tagscache.nodetagscache.get(node, [])
706 return self._tagscache.nodetagscache.get(node, [])
707
707
708 def nodebookmarks(self, node):
708 def nodebookmarks(self, node):
709 marks = []
709 marks = []
710 for bookmark, n in self._bookmarks.iteritems():
710 for bookmark, n in self._bookmarks.iteritems():
711 if n == node:
711 if n == node:
712 marks.append(bookmark)
712 marks.append(bookmark)
713 return sorted(marks)
713 return sorted(marks)
714
714
715 def branchmap(self):
715 def branchmap(self):
716 '''returns a dictionary {branch: [branchheads]} with branchheads
716 '''returns a dictionary {branch: [branchheads]} with branchheads
717 ordered by increasing revision number'''
717 ordered by increasing revision number'''
718 branchmap.updatecache(self)
718 branchmap.updatecache(self)
719 return self._branchcaches[self.filtername]
719 return self._branchcaches[self.filtername]
720
720
721 def branchtip(self, branch):
721 def branchtip(self, branch):
722 '''return the tip node for a given branch'''
722 '''return the tip node for a given branch'''
723 try:
723 try:
724 return self.branchmap().branchtip(branch)
724 return self.branchmap().branchtip(branch)
725 except KeyError:
725 except KeyError:
726 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
726 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
727
727
728 def lookup(self, key):
728 def lookup(self, key):
729 return self[key].node()
729 return self[key].node()
730
730
731 def lookupbranch(self, key, remote=None):
731 def lookupbranch(self, key, remote=None):
732 repo = remote or self
732 repo = remote or self
733 if key in repo.branchmap():
733 if key in repo.branchmap():
734 return key
734 return key
735
735
736 repo = (remote and remote.local()) and remote or self
736 repo = (remote and remote.local()) and remote or self
737 return repo[key].branch()
737 return repo[key].branch()
738
738
739 def known(self, nodes):
739 def known(self, nodes):
740 nm = self.changelog.nodemap
740 nm = self.changelog.nodemap
741 pc = self._phasecache
741 pc = self._phasecache
742 result = []
742 result = []
743 for n in nodes:
743 for n in nodes:
744 r = nm.get(n)
744 r = nm.get(n)
745 resp = not (r is None or pc.phase(self, r) >= phases.secret)
745 resp = not (r is None or pc.phase(self, r) >= phases.secret)
746 result.append(resp)
746 result.append(resp)
747 return result
747 return result
748
748
749 def local(self):
749 def local(self):
750 return self
750 return self
751
751
752 def cancopy(self):
752 def cancopy(self):
753 # so statichttprepo's override of local() works
753 # so statichttprepo's override of local() works
754 if not self.local():
754 if not self.local():
755 return False
755 return False
756 if not self.ui.configbool('phases', 'publish', True):
756 if not self.ui.configbool('phases', 'publish', True):
757 return True
757 return True
758 # if publishing we can't copy if there is filtered content
758 # if publishing we can't copy if there is filtered content
759 return not self.filtered('visible').changelog.filteredrevs
759 return not self.filtered('visible').changelog.filteredrevs
760
760
761 def shared(self):
762 '''the type of shared repository (None if not shared)'''
763 if self.sharedpath != self.path:
764 return 'store'
765 return None
766
761 def join(self, f, *insidef):
767 def join(self, f, *insidef):
762 return os.path.join(self.path, f, *insidef)
768 return os.path.join(self.path, f, *insidef)
763
769
764 def wjoin(self, f, *insidef):
770 def wjoin(self, f, *insidef):
765 return os.path.join(self.root, f, *insidef)
771 return os.path.join(self.root, f, *insidef)
766
772
767 def file(self, f):
773 def file(self, f):
768 if f[0] == '/':
774 if f[0] == '/':
769 f = f[1:]
775 f = f[1:]
770 return filelog.filelog(self.sopener, f)
776 return filelog.filelog(self.sopener, f)
771
777
772 def changectx(self, changeid):
778 def changectx(self, changeid):
773 return self[changeid]
779 return self[changeid]
774
780
775 def parents(self, changeid=None):
781 def parents(self, changeid=None):
776 '''get list of changectxs for parents of changeid'''
782 '''get list of changectxs for parents of changeid'''
777 return self[changeid].parents()
783 return self[changeid].parents()
778
784
779 def setparents(self, p1, p2=nullid):
785 def setparents(self, p1, p2=nullid):
780 self.dirstate.beginparentchange()
786 self.dirstate.beginparentchange()
781 copies = self.dirstate.setparents(p1, p2)
787 copies = self.dirstate.setparents(p1, p2)
782 pctx = self[p1]
788 pctx = self[p1]
783 if copies:
789 if copies:
784 # Adjust copy records, the dirstate cannot do it, it
790 # Adjust copy records, the dirstate cannot do it, it
785 # requires access to parents manifests. Preserve them
791 # requires access to parents manifests. Preserve them
786 # only for entries added to first parent.
792 # only for entries added to first parent.
787 for f in copies:
793 for f in copies:
788 if f not in pctx and copies[f] in pctx:
794 if f not in pctx and copies[f] in pctx:
789 self.dirstate.copy(copies[f], f)
795 self.dirstate.copy(copies[f], f)
790 if p2 == nullid:
796 if p2 == nullid:
791 for f, s in sorted(self.dirstate.copies().items()):
797 for f, s in sorted(self.dirstate.copies().items()):
792 if f not in pctx and s not in pctx:
798 if f not in pctx and s not in pctx:
793 self.dirstate.copy(None, f)
799 self.dirstate.copy(None, f)
794 self.dirstate.endparentchange()
800 self.dirstate.endparentchange()
795
801
796 def filectx(self, path, changeid=None, fileid=None):
802 def filectx(self, path, changeid=None, fileid=None):
797 """changeid can be a changeset revision, node, or tag.
803 """changeid can be a changeset revision, node, or tag.
798 fileid can be a file revision or node."""
804 fileid can be a file revision or node."""
799 return context.filectx(self, path, changeid, fileid)
805 return context.filectx(self, path, changeid, fileid)
800
806
801 def getcwd(self):
807 def getcwd(self):
802 return self.dirstate.getcwd()
808 return self.dirstate.getcwd()
803
809
804 def pathto(self, f, cwd=None):
810 def pathto(self, f, cwd=None):
805 return self.dirstate.pathto(f, cwd)
811 return self.dirstate.pathto(f, cwd)
806
812
807 def wfile(self, f, mode='r'):
813 def wfile(self, f, mode='r'):
808 return self.wopener(f, mode)
814 return self.wopener(f, mode)
809
815
810 def _link(self, f):
816 def _link(self, f):
811 return self.wvfs.islink(f)
817 return self.wvfs.islink(f)
812
818
813 def _loadfilter(self, filter):
819 def _loadfilter(self, filter):
814 if filter not in self.filterpats:
820 if filter not in self.filterpats:
815 l = []
821 l = []
816 for pat, cmd in self.ui.configitems(filter):
822 for pat, cmd in self.ui.configitems(filter):
817 if cmd == '!':
823 if cmd == '!':
818 continue
824 continue
819 mf = matchmod.match(self.root, '', [pat])
825 mf = matchmod.match(self.root, '', [pat])
820 fn = None
826 fn = None
821 params = cmd
827 params = cmd
822 for name, filterfn in self._datafilters.iteritems():
828 for name, filterfn in self._datafilters.iteritems():
823 if cmd.startswith(name):
829 if cmd.startswith(name):
824 fn = filterfn
830 fn = filterfn
825 params = cmd[len(name):].lstrip()
831 params = cmd[len(name):].lstrip()
826 break
832 break
827 if not fn:
833 if not fn:
828 fn = lambda s, c, **kwargs: util.filter(s, c)
834 fn = lambda s, c, **kwargs: util.filter(s, c)
829 # Wrap old filters not supporting keyword arguments
835 # Wrap old filters not supporting keyword arguments
830 if not inspect.getargspec(fn)[2]:
836 if not inspect.getargspec(fn)[2]:
831 oldfn = fn
837 oldfn = fn
832 fn = lambda s, c, **kwargs: oldfn(s, c)
838 fn = lambda s, c, **kwargs: oldfn(s, c)
833 l.append((mf, fn, params))
839 l.append((mf, fn, params))
834 self.filterpats[filter] = l
840 self.filterpats[filter] = l
835 return self.filterpats[filter]
841 return self.filterpats[filter]
836
842
837 def _filter(self, filterpats, filename, data):
843 def _filter(self, filterpats, filename, data):
838 for mf, fn, cmd in filterpats:
844 for mf, fn, cmd in filterpats:
839 if mf(filename):
845 if mf(filename):
840 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
846 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
841 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
847 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
842 break
848 break
843
849
844 return data
850 return data
845
851
846 @unfilteredpropertycache
852 @unfilteredpropertycache
847 def _encodefilterpats(self):
853 def _encodefilterpats(self):
848 return self._loadfilter('encode')
854 return self._loadfilter('encode')
849
855
850 @unfilteredpropertycache
856 @unfilteredpropertycache
851 def _decodefilterpats(self):
857 def _decodefilterpats(self):
852 return self._loadfilter('decode')
858 return self._loadfilter('decode')
853
859
854 def adddatafilter(self, name, filter):
860 def adddatafilter(self, name, filter):
855 self._datafilters[name] = filter
861 self._datafilters[name] = filter
856
862
857 def wread(self, filename):
863 def wread(self, filename):
858 if self._link(filename):
864 if self._link(filename):
859 data = self.wvfs.readlink(filename)
865 data = self.wvfs.readlink(filename)
860 else:
866 else:
861 data = self.wopener.read(filename)
867 data = self.wopener.read(filename)
862 return self._filter(self._encodefilterpats, filename, data)
868 return self._filter(self._encodefilterpats, filename, data)
863
869
864 def wwrite(self, filename, data, flags):
870 def wwrite(self, filename, data, flags):
865 data = self._filter(self._decodefilterpats, filename, data)
871 data = self._filter(self._decodefilterpats, filename, data)
866 if 'l' in flags:
872 if 'l' in flags:
867 self.wopener.symlink(data, filename)
873 self.wopener.symlink(data, filename)
868 else:
874 else:
869 self.wopener.write(filename, data)
875 self.wopener.write(filename, data)
870 if 'x' in flags:
876 if 'x' in flags:
871 self.wvfs.setflags(filename, False, True)
877 self.wvfs.setflags(filename, False, True)
872
878
873 def wwritedata(self, filename, data):
879 def wwritedata(self, filename, data):
874 return self._filter(self._decodefilterpats, filename, data)
880 return self._filter(self._decodefilterpats, filename, data)
875
881
876 def currenttransaction(self):
882 def currenttransaction(self):
877 """return the current transaction or None if non exists"""
883 """return the current transaction or None if non exists"""
878 tr = self._transref and self._transref() or None
884 tr = self._transref and self._transref() or None
879 if tr and tr.running():
885 if tr and tr.running():
880 return tr
886 return tr
881 return None
887 return None
882
888
883 def transaction(self, desc, report=None):
889 def transaction(self, desc, report=None):
884 tr = self.currenttransaction()
890 tr = self.currenttransaction()
885 if tr is not None:
891 if tr is not None:
886 return tr.nest()
892 return tr.nest()
887
893
888 # abort here if the journal already exists
894 # abort here if the journal already exists
889 if self.svfs.exists("journal"):
895 if self.svfs.exists("journal"):
890 raise error.RepoError(
896 raise error.RepoError(
891 _("abandoned transaction found"),
897 _("abandoned transaction found"),
892 hint=_("run 'hg recover' to clean up transaction"))
898 hint=_("run 'hg recover' to clean up transaction"))
893
899
894 self._writejournal(desc)
900 self._writejournal(desc)
895 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
901 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
896 rp = report and report or self.ui.warn
902 rp = report and report or self.ui.warn
897 vfsmap = {'plain': self.opener} # root of .hg/
903 vfsmap = {'plain': self.opener} # root of .hg/
898 tr = transaction.transaction(rp, self.sopener, vfsmap,
904 tr = transaction.transaction(rp, self.sopener, vfsmap,
899 "journal",
905 "journal",
900 aftertrans(renames),
906 aftertrans(renames),
901 self.store.createmode)
907 self.store.createmode)
902 # note: writing the fncache only during finalize mean that the file is
908 # note: writing the fncache only during finalize mean that the file is
903 # outdated when running hooks. As fncache is used for streaming clone,
909 # outdated when running hooks. As fncache is used for streaming clone,
904 # this is not expected to break anything that happen during the hooks.
910 # this is not expected to break anything that happen during the hooks.
905 tr.addfinalize('flush-fncache', self.store.write)
911 tr.addfinalize('flush-fncache', self.store.write)
906 self._transref = weakref.ref(tr)
912 self._transref = weakref.ref(tr)
907 return tr
913 return tr
908
914
909 def _journalfiles(self):
915 def _journalfiles(self):
910 return ((self.svfs, 'journal'),
916 return ((self.svfs, 'journal'),
911 (self.vfs, 'journal.dirstate'),
917 (self.vfs, 'journal.dirstate'),
912 (self.vfs, 'journal.branch'),
918 (self.vfs, 'journal.branch'),
913 (self.vfs, 'journal.desc'),
919 (self.vfs, 'journal.desc'),
914 (self.vfs, 'journal.bookmarks'),
920 (self.vfs, 'journal.bookmarks'),
915 (self.svfs, 'journal.phaseroots'))
921 (self.svfs, 'journal.phaseroots'))
916
922
917 def undofiles(self):
923 def undofiles(self):
918 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
924 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
919
925
920 def _writejournal(self, desc):
926 def _writejournal(self, desc):
921 self.opener.write("journal.dirstate",
927 self.opener.write("journal.dirstate",
922 self.opener.tryread("dirstate"))
928 self.opener.tryread("dirstate"))
923 self.opener.write("journal.branch",
929 self.opener.write("journal.branch",
924 encoding.fromlocal(self.dirstate.branch()))
930 encoding.fromlocal(self.dirstate.branch()))
925 self.opener.write("journal.desc",
931 self.opener.write("journal.desc",
926 "%d\n%s\n" % (len(self), desc))
932 "%d\n%s\n" % (len(self), desc))
927 self.opener.write("journal.bookmarks",
933 self.opener.write("journal.bookmarks",
928 self.opener.tryread("bookmarks"))
934 self.opener.tryread("bookmarks"))
929 self.sopener.write("journal.phaseroots",
935 self.sopener.write("journal.phaseroots",
930 self.sopener.tryread("phaseroots"))
936 self.sopener.tryread("phaseroots"))
931
937
932 def recover(self):
938 def recover(self):
933 lock = self.lock()
939 lock = self.lock()
934 try:
940 try:
935 if self.svfs.exists("journal"):
941 if self.svfs.exists("journal"):
936 self.ui.status(_("rolling back interrupted transaction\n"))
942 self.ui.status(_("rolling back interrupted transaction\n"))
937 vfsmap = {'': self.sopener,
943 vfsmap = {'': self.sopener,
938 'plain': self.opener,}
944 'plain': self.opener,}
939 transaction.rollback(self.sopener, vfsmap, "journal",
945 transaction.rollback(self.sopener, vfsmap, "journal",
940 self.ui.warn)
946 self.ui.warn)
941 self.invalidate()
947 self.invalidate()
942 return True
948 return True
943 else:
949 else:
944 self.ui.warn(_("no interrupted transaction available\n"))
950 self.ui.warn(_("no interrupted transaction available\n"))
945 return False
951 return False
946 finally:
952 finally:
947 lock.release()
953 lock.release()
948
954
949 def rollback(self, dryrun=False, force=False):
955 def rollback(self, dryrun=False, force=False):
950 wlock = lock = None
956 wlock = lock = None
951 try:
957 try:
952 wlock = self.wlock()
958 wlock = self.wlock()
953 lock = self.lock()
959 lock = self.lock()
954 if self.svfs.exists("undo"):
960 if self.svfs.exists("undo"):
955 return self._rollback(dryrun, force)
961 return self._rollback(dryrun, force)
956 else:
962 else:
957 self.ui.warn(_("no rollback information available\n"))
963 self.ui.warn(_("no rollback information available\n"))
958 return 1
964 return 1
959 finally:
965 finally:
960 release(lock, wlock)
966 release(lock, wlock)
961
967
962 @unfilteredmethod # Until we get smarter cache management
968 @unfilteredmethod # Until we get smarter cache management
963 def _rollback(self, dryrun, force):
969 def _rollback(self, dryrun, force):
964 ui = self.ui
970 ui = self.ui
965 try:
971 try:
966 args = self.opener.read('undo.desc').splitlines()
972 args = self.opener.read('undo.desc').splitlines()
967 (oldlen, desc, detail) = (int(args[0]), args[1], None)
973 (oldlen, desc, detail) = (int(args[0]), args[1], None)
968 if len(args) >= 3:
974 if len(args) >= 3:
969 detail = args[2]
975 detail = args[2]
970 oldtip = oldlen - 1
976 oldtip = oldlen - 1
971
977
972 if detail and ui.verbose:
978 if detail and ui.verbose:
973 msg = (_('repository tip rolled back to revision %s'
979 msg = (_('repository tip rolled back to revision %s'
974 ' (undo %s: %s)\n')
980 ' (undo %s: %s)\n')
975 % (oldtip, desc, detail))
981 % (oldtip, desc, detail))
976 else:
982 else:
977 msg = (_('repository tip rolled back to revision %s'
983 msg = (_('repository tip rolled back to revision %s'
978 ' (undo %s)\n')
984 ' (undo %s)\n')
979 % (oldtip, desc))
985 % (oldtip, desc))
980 except IOError:
986 except IOError:
981 msg = _('rolling back unknown transaction\n')
987 msg = _('rolling back unknown transaction\n')
982 desc = None
988 desc = None
983
989
984 if not force and self['.'] != self['tip'] and desc == 'commit':
990 if not force and self['.'] != self['tip'] and desc == 'commit':
985 raise util.Abort(
991 raise util.Abort(
986 _('rollback of last commit while not checked out '
992 _('rollback of last commit while not checked out '
987 'may lose data'), hint=_('use -f to force'))
993 'may lose data'), hint=_('use -f to force'))
988
994
989 ui.status(msg)
995 ui.status(msg)
990 if dryrun:
996 if dryrun:
991 return 0
997 return 0
992
998
993 parents = self.dirstate.parents()
999 parents = self.dirstate.parents()
994 self.destroying()
1000 self.destroying()
995 vfsmap = {'plain': self.opener}
1001 vfsmap = {'plain': self.opener}
996 transaction.rollback(self.sopener, vfsmap, 'undo', ui.warn)
1002 transaction.rollback(self.sopener, vfsmap, 'undo', ui.warn)
997 if self.vfs.exists('undo.bookmarks'):
1003 if self.vfs.exists('undo.bookmarks'):
998 self.vfs.rename('undo.bookmarks', 'bookmarks')
1004 self.vfs.rename('undo.bookmarks', 'bookmarks')
999 if self.svfs.exists('undo.phaseroots'):
1005 if self.svfs.exists('undo.phaseroots'):
1000 self.svfs.rename('undo.phaseroots', 'phaseroots')
1006 self.svfs.rename('undo.phaseroots', 'phaseroots')
1001 self.invalidate()
1007 self.invalidate()
1002
1008
1003 parentgone = (parents[0] not in self.changelog.nodemap or
1009 parentgone = (parents[0] not in self.changelog.nodemap or
1004 parents[1] not in self.changelog.nodemap)
1010 parents[1] not in self.changelog.nodemap)
1005 if parentgone:
1011 if parentgone:
1006 self.vfs.rename('undo.dirstate', 'dirstate')
1012 self.vfs.rename('undo.dirstate', 'dirstate')
1007 try:
1013 try:
1008 branch = self.opener.read('undo.branch')
1014 branch = self.opener.read('undo.branch')
1009 self.dirstate.setbranch(encoding.tolocal(branch))
1015 self.dirstate.setbranch(encoding.tolocal(branch))
1010 except IOError:
1016 except IOError:
1011 ui.warn(_('named branch could not be reset: '
1017 ui.warn(_('named branch could not be reset: '
1012 'current branch is still \'%s\'\n')
1018 'current branch is still \'%s\'\n')
1013 % self.dirstate.branch())
1019 % self.dirstate.branch())
1014
1020
1015 self.dirstate.invalidate()
1021 self.dirstate.invalidate()
1016 parents = tuple([p.rev() for p in self.parents()])
1022 parents = tuple([p.rev() for p in self.parents()])
1017 if len(parents) > 1:
1023 if len(parents) > 1:
1018 ui.status(_('working directory now based on '
1024 ui.status(_('working directory now based on '
1019 'revisions %d and %d\n') % parents)
1025 'revisions %d and %d\n') % parents)
1020 else:
1026 else:
1021 ui.status(_('working directory now based on '
1027 ui.status(_('working directory now based on '
1022 'revision %d\n') % parents)
1028 'revision %d\n') % parents)
1023 # TODO: if we know which new heads may result from this rollback, pass
1029 # TODO: if we know which new heads may result from this rollback, pass
1024 # them to destroy(), which will prevent the branchhead cache from being
1030 # them to destroy(), which will prevent the branchhead cache from being
1025 # invalidated.
1031 # invalidated.
1026 self.destroyed()
1032 self.destroyed()
1027 return 0
1033 return 0
1028
1034
1029 def invalidatecaches(self):
1035 def invalidatecaches(self):
1030
1036
1031 if '_tagscache' in vars(self):
1037 if '_tagscache' in vars(self):
1032 # can't use delattr on proxy
1038 # can't use delattr on proxy
1033 del self.__dict__['_tagscache']
1039 del self.__dict__['_tagscache']
1034
1040
1035 self.unfiltered()._branchcaches.clear()
1041 self.unfiltered()._branchcaches.clear()
1036 self.invalidatevolatilesets()
1042 self.invalidatevolatilesets()
1037
1043
1038 def invalidatevolatilesets(self):
1044 def invalidatevolatilesets(self):
1039 self.filteredrevcache.clear()
1045 self.filteredrevcache.clear()
1040 obsolete.clearobscaches(self)
1046 obsolete.clearobscaches(self)
1041
1047
1042 def invalidatedirstate(self):
1048 def invalidatedirstate(self):
1043 '''Invalidates the dirstate, causing the next call to dirstate
1049 '''Invalidates the dirstate, causing the next call to dirstate
1044 to check if it was modified since the last time it was read,
1050 to check if it was modified since the last time it was read,
1045 rereading it if it has.
1051 rereading it if it has.
1046
1052
1047 This is different to dirstate.invalidate() that it doesn't always
1053 This is different to dirstate.invalidate() that it doesn't always
1048 rereads the dirstate. Use dirstate.invalidate() if you want to
1054 rereads the dirstate. Use dirstate.invalidate() if you want to
1049 explicitly read the dirstate again (i.e. restoring it to a previous
1055 explicitly read the dirstate again (i.e. restoring it to a previous
1050 known good state).'''
1056 known good state).'''
1051 if hasunfilteredcache(self, 'dirstate'):
1057 if hasunfilteredcache(self, 'dirstate'):
1052 for k in self.dirstate._filecache:
1058 for k in self.dirstate._filecache:
1053 try:
1059 try:
1054 delattr(self.dirstate, k)
1060 delattr(self.dirstate, k)
1055 except AttributeError:
1061 except AttributeError:
1056 pass
1062 pass
1057 delattr(self.unfiltered(), 'dirstate')
1063 delattr(self.unfiltered(), 'dirstate')
1058
1064
1059 def invalidate(self):
1065 def invalidate(self):
1060 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1066 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1061 for k in self._filecache:
1067 for k in self._filecache:
1062 # dirstate is invalidated separately in invalidatedirstate()
1068 # dirstate is invalidated separately in invalidatedirstate()
1063 if k == 'dirstate':
1069 if k == 'dirstate':
1064 continue
1070 continue
1065
1071
1066 try:
1072 try:
1067 delattr(unfiltered, k)
1073 delattr(unfiltered, k)
1068 except AttributeError:
1074 except AttributeError:
1069 pass
1075 pass
1070 self.invalidatecaches()
1076 self.invalidatecaches()
1071 self.store.invalidatecaches()
1077 self.store.invalidatecaches()
1072
1078
1073 def invalidateall(self):
1079 def invalidateall(self):
1074 '''Fully invalidates both store and non-store parts, causing the
1080 '''Fully invalidates both store and non-store parts, causing the
1075 subsequent operation to reread any outside changes.'''
1081 subsequent operation to reread any outside changes.'''
1076 # extension should hook this to invalidate its caches
1082 # extension should hook this to invalidate its caches
1077 self.invalidate()
1083 self.invalidate()
1078 self.invalidatedirstate()
1084 self.invalidatedirstate()
1079
1085
1080 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1086 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1081 try:
1087 try:
1082 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1088 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1083 except error.LockHeld, inst:
1089 except error.LockHeld, inst:
1084 if not wait:
1090 if not wait:
1085 raise
1091 raise
1086 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1092 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1087 (desc, inst.locker))
1093 (desc, inst.locker))
1088 # default to 600 seconds timeout
1094 # default to 600 seconds timeout
1089 l = lockmod.lock(vfs, lockname,
1095 l = lockmod.lock(vfs, lockname,
1090 int(self.ui.config("ui", "timeout", "600")),
1096 int(self.ui.config("ui", "timeout", "600")),
1091 releasefn, desc=desc)
1097 releasefn, desc=desc)
1092 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1098 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1093 if acquirefn:
1099 if acquirefn:
1094 acquirefn()
1100 acquirefn()
1095 return l
1101 return l
1096
1102
1097 def _afterlock(self, callback):
1103 def _afterlock(self, callback):
1098 """add a callback to the current repository lock.
1104 """add a callback to the current repository lock.
1099
1105
1100 The callback will be executed on lock release."""
1106 The callback will be executed on lock release."""
1101 l = self._lockref and self._lockref()
1107 l = self._lockref and self._lockref()
1102 if l:
1108 if l:
1103 l.postrelease.append(callback)
1109 l.postrelease.append(callback)
1104 else:
1110 else:
1105 callback()
1111 callback()
1106
1112
1107 def lock(self, wait=True):
1113 def lock(self, wait=True):
1108 '''Lock the repository store (.hg/store) and return a weak reference
1114 '''Lock the repository store (.hg/store) and return a weak reference
1109 to the lock. Use this before modifying the store (e.g. committing or
1115 to the lock. Use this before modifying the store (e.g. committing or
1110 stripping). If you are opening a transaction, get a lock as well.)'''
1116 stripping). If you are opening a transaction, get a lock as well.)'''
1111 l = self._lockref and self._lockref()
1117 l = self._lockref and self._lockref()
1112 if l is not None and l.held:
1118 if l is not None and l.held:
1113 l.lock()
1119 l.lock()
1114 return l
1120 return l
1115
1121
1116 def unlock():
1122 def unlock():
1117 for k, ce in self._filecache.items():
1123 for k, ce in self._filecache.items():
1118 if k == 'dirstate' or k not in self.__dict__:
1124 if k == 'dirstate' or k not in self.__dict__:
1119 continue
1125 continue
1120 ce.refresh()
1126 ce.refresh()
1121
1127
1122 l = self._lock(self.svfs, "lock", wait, unlock,
1128 l = self._lock(self.svfs, "lock", wait, unlock,
1123 self.invalidate, _('repository %s') % self.origroot)
1129 self.invalidate, _('repository %s') % self.origroot)
1124 self._lockref = weakref.ref(l)
1130 self._lockref = weakref.ref(l)
1125 return l
1131 return l
1126
1132
1127 def wlock(self, wait=True):
1133 def wlock(self, wait=True):
1128 '''Lock the non-store parts of the repository (everything under
1134 '''Lock the non-store parts of the repository (everything under
1129 .hg except .hg/store) and return a weak reference to the lock.
1135 .hg except .hg/store) and return a weak reference to the lock.
1130 Use this before modifying files in .hg.'''
1136 Use this before modifying files in .hg.'''
1131 l = self._wlockref and self._wlockref()
1137 l = self._wlockref and self._wlockref()
1132 if l is not None and l.held:
1138 if l is not None and l.held:
1133 l.lock()
1139 l.lock()
1134 return l
1140 return l
1135
1141
1136 def unlock():
1142 def unlock():
1137 if self.dirstate.pendingparentchange():
1143 if self.dirstate.pendingparentchange():
1138 self.dirstate.invalidate()
1144 self.dirstate.invalidate()
1139 else:
1145 else:
1140 self.dirstate.write()
1146 self.dirstate.write()
1141
1147
1142 self._filecache['dirstate'].refresh()
1148 self._filecache['dirstate'].refresh()
1143
1149
1144 l = self._lock(self.vfs, "wlock", wait, unlock,
1150 l = self._lock(self.vfs, "wlock", wait, unlock,
1145 self.invalidatedirstate, _('working directory of %s') %
1151 self.invalidatedirstate, _('working directory of %s') %
1146 self.origroot)
1152 self.origroot)
1147 self._wlockref = weakref.ref(l)
1153 self._wlockref = weakref.ref(l)
1148 return l
1154 return l
1149
1155
1150 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1156 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1151 """
1157 """
1152 commit an individual file as part of a larger transaction
1158 commit an individual file as part of a larger transaction
1153 """
1159 """
1154
1160
1155 fname = fctx.path()
1161 fname = fctx.path()
1156 text = fctx.data()
1162 text = fctx.data()
1157 flog = self.file(fname)
1163 flog = self.file(fname)
1158 fparent1 = manifest1.get(fname, nullid)
1164 fparent1 = manifest1.get(fname, nullid)
1159 fparent2 = manifest2.get(fname, nullid)
1165 fparent2 = manifest2.get(fname, nullid)
1160
1166
1161 meta = {}
1167 meta = {}
1162 copy = fctx.renamed()
1168 copy = fctx.renamed()
1163 if copy and copy[0] != fname:
1169 if copy and copy[0] != fname:
1164 # Mark the new revision of this file as a copy of another
1170 # Mark the new revision of this file as a copy of another
1165 # file. This copy data will effectively act as a parent
1171 # file. This copy data will effectively act as a parent
1166 # of this new revision. If this is a merge, the first
1172 # of this new revision. If this is a merge, the first
1167 # parent will be the nullid (meaning "look up the copy data")
1173 # parent will be the nullid (meaning "look up the copy data")
1168 # and the second one will be the other parent. For example:
1174 # and the second one will be the other parent. For example:
1169 #
1175 #
1170 # 0 --- 1 --- 3 rev1 changes file foo
1176 # 0 --- 1 --- 3 rev1 changes file foo
1171 # \ / rev2 renames foo to bar and changes it
1177 # \ / rev2 renames foo to bar and changes it
1172 # \- 2 -/ rev3 should have bar with all changes and
1178 # \- 2 -/ rev3 should have bar with all changes and
1173 # should record that bar descends from
1179 # should record that bar descends from
1174 # bar in rev2 and foo in rev1
1180 # bar in rev2 and foo in rev1
1175 #
1181 #
1176 # this allows this merge to succeed:
1182 # this allows this merge to succeed:
1177 #
1183 #
1178 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1184 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1179 # \ / merging rev3 and rev4 should use bar@rev2
1185 # \ / merging rev3 and rev4 should use bar@rev2
1180 # \- 2 --- 4 as the merge base
1186 # \- 2 --- 4 as the merge base
1181 #
1187 #
1182
1188
1183 cfname = copy[0]
1189 cfname = copy[0]
1184 crev = manifest1.get(cfname)
1190 crev = manifest1.get(cfname)
1185 newfparent = fparent2
1191 newfparent = fparent2
1186
1192
1187 if manifest2: # branch merge
1193 if manifest2: # branch merge
1188 if fparent2 == nullid or crev is None: # copied on remote side
1194 if fparent2 == nullid or crev is None: # copied on remote side
1189 if cfname in manifest2:
1195 if cfname in manifest2:
1190 crev = manifest2[cfname]
1196 crev = manifest2[cfname]
1191 newfparent = fparent1
1197 newfparent = fparent1
1192
1198
1193 # find source in nearest ancestor if we've lost track
1199 # find source in nearest ancestor if we've lost track
1194 if not crev:
1200 if not crev:
1195 self.ui.debug(" %s: searching for copy revision for %s\n" %
1201 self.ui.debug(" %s: searching for copy revision for %s\n" %
1196 (fname, cfname))
1202 (fname, cfname))
1197 for ancestor in self[None].ancestors():
1203 for ancestor in self[None].ancestors():
1198 if cfname in ancestor:
1204 if cfname in ancestor:
1199 crev = ancestor[cfname].filenode()
1205 crev = ancestor[cfname].filenode()
1200 break
1206 break
1201
1207
1202 if crev:
1208 if crev:
1203 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1209 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1204 meta["copy"] = cfname
1210 meta["copy"] = cfname
1205 meta["copyrev"] = hex(crev)
1211 meta["copyrev"] = hex(crev)
1206 fparent1, fparent2 = nullid, newfparent
1212 fparent1, fparent2 = nullid, newfparent
1207 else:
1213 else:
1208 self.ui.warn(_("warning: can't find ancestor for '%s' "
1214 self.ui.warn(_("warning: can't find ancestor for '%s' "
1209 "copied from '%s'!\n") % (fname, cfname))
1215 "copied from '%s'!\n") % (fname, cfname))
1210
1216
1211 elif fparent1 == nullid:
1217 elif fparent1 == nullid:
1212 fparent1, fparent2 = fparent2, nullid
1218 fparent1, fparent2 = fparent2, nullid
1213 elif fparent2 != nullid:
1219 elif fparent2 != nullid:
1214 # is one parent an ancestor of the other?
1220 # is one parent an ancestor of the other?
1215 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1221 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1216 if fparent1 in fparentancestors:
1222 if fparent1 in fparentancestors:
1217 fparent1, fparent2 = fparent2, nullid
1223 fparent1, fparent2 = fparent2, nullid
1218 elif fparent2 in fparentancestors:
1224 elif fparent2 in fparentancestors:
1219 fparent2 = nullid
1225 fparent2 = nullid
1220
1226
1221 # is the file changed?
1227 # is the file changed?
1222 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1228 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1223 changelist.append(fname)
1229 changelist.append(fname)
1224 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1230 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1225 # are just the flags changed during merge?
1231 # are just the flags changed during merge?
1226 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1232 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1227 changelist.append(fname)
1233 changelist.append(fname)
1228
1234
1229 return fparent1
1235 return fparent1
1230
1236
1231 @unfilteredmethod
1237 @unfilteredmethod
1232 def commit(self, text="", user=None, date=None, match=None, force=False,
1238 def commit(self, text="", user=None, date=None, match=None, force=False,
1233 editor=False, extra={}):
1239 editor=False, extra={}):
1234 """Add a new revision to current repository.
1240 """Add a new revision to current repository.
1235
1241
1236 Revision information is gathered from the working directory,
1242 Revision information is gathered from the working directory,
1237 match can be used to filter the committed files. If editor is
1243 match can be used to filter the committed files. If editor is
1238 supplied, it is called to get a commit message.
1244 supplied, it is called to get a commit message.
1239 """
1245 """
1240
1246
1241 def fail(f, msg):
1247 def fail(f, msg):
1242 raise util.Abort('%s: %s' % (f, msg))
1248 raise util.Abort('%s: %s' % (f, msg))
1243
1249
1244 if not match:
1250 if not match:
1245 match = matchmod.always(self.root, '')
1251 match = matchmod.always(self.root, '')
1246
1252
1247 if not force:
1253 if not force:
1248 vdirs = []
1254 vdirs = []
1249 match.explicitdir = vdirs.append
1255 match.explicitdir = vdirs.append
1250 match.bad = fail
1256 match.bad = fail
1251
1257
1252 wlock = self.wlock()
1258 wlock = self.wlock()
1253 try:
1259 try:
1254 wctx = self[None]
1260 wctx = self[None]
1255 merge = len(wctx.parents()) > 1
1261 merge = len(wctx.parents()) > 1
1256
1262
1257 if (not force and merge and match and
1263 if (not force and merge and match and
1258 (match.files() or match.anypats())):
1264 (match.files() or match.anypats())):
1259 raise util.Abort(_('cannot partially commit a merge '
1265 raise util.Abort(_('cannot partially commit a merge '
1260 '(do not specify files or patterns)'))
1266 '(do not specify files or patterns)'))
1261
1267
1262 status = self.status(match=match, clean=force)
1268 status = self.status(match=match, clean=force)
1263 if force:
1269 if force:
1264 status.modified.extend(status.clean) # mq may commit clean files
1270 status.modified.extend(status.clean) # mq may commit clean files
1265
1271
1266 # check subrepos
1272 # check subrepos
1267 subs = []
1273 subs = []
1268 commitsubs = set()
1274 commitsubs = set()
1269 newstate = wctx.substate.copy()
1275 newstate = wctx.substate.copy()
1270 # only manage subrepos and .hgsubstate if .hgsub is present
1276 # only manage subrepos and .hgsubstate if .hgsub is present
1271 if '.hgsub' in wctx:
1277 if '.hgsub' in wctx:
1272 # we'll decide whether to track this ourselves, thanks
1278 # we'll decide whether to track this ourselves, thanks
1273 for c in status.modified, status.added, status.removed:
1279 for c in status.modified, status.added, status.removed:
1274 if '.hgsubstate' in c:
1280 if '.hgsubstate' in c:
1275 c.remove('.hgsubstate')
1281 c.remove('.hgsubstate')
1276
1282
1277 # compare current state to last committed state
1283 # compare current state to last committed state
1278 # build new substate based on last committed state
1284 # build new substate based on last committed state
1279 oldstate = wctx.p1().substate
1285 oldstate = wctx.p1().substate
1280 for s in sorted(newstate.keys()):
1286 for s in sorted(newstate.keys()):
1281 if not match(s):
1287 if not match(s):
1282 # ignore working copy, use old state if present
1288 # ignore working copy, use old state if present
1283 if s in oldstate:
1289 if s in oldstate:
1284 newstate[s] = oldstate[s]
1290 newstate[s] = oldstate[s]
1285 continue
1291 continue
1286 if not force:
1292 if not force:
1287 raise util.Abort(
1293 raise util.Abort(
1288 _("commit with new subrepo %s excluded") % s)
1294 _("commit with new subrepo %s excluded") % s)
1289 if wctx.sub(s).dirty(True):
1295 if wctx.sub(s).dirty(True):
1290 if not self.ui.configbool('ui', 'commitsubrepos'):
1296 if not self.ui.configbool('ui', 'commitsubrepos'):
1291 raise util.Abort(
1297 raise util.Abort(
1292 _("uncommitted changes in subrepo %s") % s,
1298 _("uncommitted changes in subrepo %s") % s,
1293 hint=_("use --subrepos for recursive commit"))
1299 hint=_("use --subrepos for recursive commit"))
1294 subs.append(s)
1300 subs.append(s)
1295 commitsubs.add(s)
1301 commitsubs.add(s)
1296 else:
1302 else:
1297 bs = wctx.sub(s).basestate()
1303 bs = wctx.sub(s).basestate()
1298 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1304 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1299 if oldstate.get(s, (None, None, None))[1] != bs:
1305 if oldstate.get(s, (None, None, None))[1] != bs:
1300 subs.append(s)
1306 subs.append(s)
1301
1307
1302 # check for removed subrepos
1308 # check for removed subrepos
1303 for p in wctx.parents():
1309 for p in wctx.parents():
1304 r = [s for s in p.substate if s not in newstate]
1310 r = [s for s in p.substate if s not in newstate]
1305 subs += [s for s in r if match(s)]
1311 subs += [s for s in r if match(s)]
1306 if subs:
1312 if subs:
1307 if (not match('.hgsub') and
1313 if (not match('.hgsub') and
1308 '.hgsub' in (wctx.modified() + wctx.added())):
1314 '.hgsub' in (wctx.modified() + wctx.added())):
1309 raise util.Abort(
1315 raise util.Abort(
1310 _("can't commit subrepos without .hgsub"))
1316 _("can't commit subrepos without .hgsub"))
1311 status.modified.insert(0, '.hgsubstate')
1317 status.modified.insert(0, '.hgsubstate')
1312
1318
1313 elif '.hgsub' in status.removed:
1319 elif '.hgsub' in status.removed:
1314 # clean up .hgsubstate when .hgsub is removed
1320 # clean up .hgsubstate when .hgsub is removed
1315 if ('.hgsubstate' in wctx and
1321 if ('.hgsubstate' in wctx and
1316 '.hgsubstate' not in (status.modified + status.added +
1322 '.hgsubstate' not in (status.modified + status.added +
1317 status.removed)):
1323 status.removed)):
1318 status.removed.insert(0, '.hgsubstate')
1324 status.removed.insert(0, '.hgsubstate')
1319
1325
1320 # make sure all explicit patterns are matched
1326 # make sure all explicit patterns are matched
1321 if not force and match.files():
1327 if not force and match.files():
1322 matched = set(status.modified + status.added + status.removed)
1328 matched = set(status.modified + status.added + status.removed)
1323
1329
1324 for f in match.files():
1330 for f in match.files():
1325 f = self.dirstate.normalize(f)
1331 f = self.dirstate.normalize(f)
1326 if f == '.' or f in matched or f in wctx.substate:
1332 if f == '.' or f in matched or f in wctx.substate:
1327 continue
1333 continue
1328 if f in status.deleted:
1334 if f in status.deleted:
1329 fail(f, _('file not found!'))
1335 fail(f, _('file not found!'))
1330 if f in vdirs: # visited directory
1336 if f in vdirs: # visited directory
1331 d = f + '/'
1337 d = f + '/'
1332 for mf in matched:
1338 for mf in matched:
1333 if mf.startswith(d):
1339 if mf.startswith(d):
1334 break
1340 break
1335 else:
1341 else:
1336 fail(f, _("no match under directory!"))
1342 fail(f, _("no match under directory!"))
1337 elif f not in self.dirstate:
1343 elif f not in self.dirstate:
1338 fail(f, _("file not tracked!"))
1344 fail(f, _("file not tracked!"))
1339
1345
1340 cctx = context.workingctx(self, text, user, date, extra, status)
1346 cctx = context.workingctx(self, text, user, date, extra, status)
1341
1347
1342 if (not force and not extra.get("close") and not merge
1348 if (not force and not extra.get("close") and not merge
1343 and not cctx.files()
1349 and not cctx.files()
1344 and wctx.branch() == wctx.p1().branch()):
1350 and wctx.branch() == wctx.p1().branch()):
1345 return None
1351 return None
1346
1352
1347 if merge and cctx.deleted():
1353 if merge and cctx.deleted():
1348 raise util.Abort(_("cannot commit merge with missing files"))
1354 raise util.Abort(_("cannot commit merge with missing files"))
1349
1355
1350 ms = mergemod.mergestate(self)
1356 ms = mergemod.mergestate(self)
1351 for f in status.modified:
1357 for f in status.modified:
1352 if f in ms and ms[f] == 'u':
1358 if f in ms and ms[f] == 'u':
1353 raise util.Abort(_("unresolved merge conflicts "
1359 raise util.Abort(_("unresolved merge conflicts "
1354 "(see hg help resolve)"))
1360 "(see hg help resolve)"))
1355
1361
1356 if editor:
1362 if editor:
1357 cctx._text = editor(self, cctx, subs)
1363 cctx._text = editor(self, cctx, subs)
1358 edited = (text != cctx._text)
1364 edited = (text != cctx._text)
1359
1365
1360 # Save commit message in case this transaction gets rolled back
1366 # Save commit message in case this transaction gets rolled back
1361 # (e.g. by a pretxncommit hook). Leave the content alone on
1367 # (e.g. by a pretxncommit hook). Leave the content alone on
1362 # the assumption that the user will use the same editor again.
1368 # the assumption that the user will use the same editor again.
1363 msgfn = self.savecommitmessage(cctx._text)
1369 msgfn = self.savecommitmessage(cctx._text)
1364
1370
1365 # commit subs and write new state
1371 # commit subs and write new state
1366 if subs:
1372 if subs:
1367 for s in sorted(commitsubs):
1373 for s in sorted(commitsubs):
1368 sub = wctx.sub(s)
1374 sub = wctx.sub(s)
1369 self.ui.status(_('committing subrepository %s\n') %
1375 self.ui.status(_('committing subrepository %s\n') %
1370 subrepo.subrelpath(sub))
1376 subrepo.subrelpath(sub))
1371 sr = sub.commit(cctx._text, user, date)
1377 sr = sub.commit(cctx._text, user, date)
1372 newstate[s] = (newstate[s][0], sr)
1378 newstate[s] = (newstate[s][0], sr)
1373 subrepo.writestate(self, newstate)
1379 subrepo.writestate(self, newstate)
1374
1380
1375 p1, p2 = self.dirstate.parents()
1381 p1, p2 = self.dirstate.parents()
1376 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1382 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1377 try:
1383 try:
1378 self.hook("precommit", throw=True, parent1=hookp1,
1384 self.hook("precommit", throw=True, parent1=hookp1,
1379 parent2=hookp2)
1385 parent2=hookp2)
1380 ret = self.commitctx(cctx, True)
1386 ret = self.commitctx(cctx, True)
1381 except: # re-raises
1387 except: # re-raises
1382 if edited:
1388 if edited:
1383 self.ui.write(
1389 self.ui.write(
1384 _('note: commit message saved in %s\n') % msgfn)
1390 _('note: commit message saved in %s\n') % msgfn)
1385 raise
1391 raise
1386
1392
1387 # update bookmarks, dirstate and mergestate
1393 # update bookmarks, dirstate and mergestate
1388 bookmarks.update(self, [p1, p2], ret)
1394 bookmarks.update(self, [p1, p2], ret)
1389 cctx.markcommitted(ret)
1395 cctx.markcommitted(ret)
1390 ms.reset()
1396 ms.reset()
1391 finally:
1397 finally:
1392 wlock.release()
1398 wlock.release()
1393
1399
1394 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1400 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1395 # hack for command that use a temporary commit (eg: histedit)
1401 # hack for command that use a temporary commit (eg: histedit)
1396 # temporary commit got stripped before hook release
1402 # temporary commit got stripped before hook release
1397 if node in self:
1403 if node in self:
1398 self.hook("commit", node=node, parent1=parent1,
1404 self.hook("commit", node=node, parent1=parent1,
1399 parent2=parent2)
1405 parent2=parent2)
1400 self._afterlock(commithook)
1406 self._afterlock(commithook)
1401 return ret
1407 return ret
1402
1408
1403 @unfilteredmethod
1409 @unfilteredmethod
1404 def commitctx(self, ctx, error=False):
1410 def commitctx(self, ctx, error=False):
1405 """Add a new revision to current repository.
1411 """Add a new revision to current repository.
1406 Revision information is passed via the context argument.
1412 Revision information is passed via the context argument.
1407 """
1413 """
1408
1414
1409 tr = None
1415 tr = None
1410 p1, p2 = ctx.p1(), ctx.p2()
1416 p1, p2 = ctx.p1(), ctx.p2()
1411 user = ctx.user()
1417 user = ctx.user()
1412
1418
1413 lock = self.lock()
1419 lock = self.lock()
1414 try:
1420 try:
1415 tr = self.transaction("commit")
1421 tr = self.transaction("commit")
1416 trp = weakref.proxy(tr)
1422 trp = weakref.proxy(tr)
1417
1423
1418 if ctx.files():
1424 if ctx.files():
1419 m1 = p1.manifest()
1425 m1 = p1.manifest()
1420 m2 = p2.manifest()
1426 m2 = p2.manifest()
1421 m = m1.copy()
1427 m = m1.copy()
1422
1428
1423 # check in files
1429 # check in files
1424 added = []
1430 added = []
1425 changed = []
1431 changed = []
1426 removed = list(ctx.removed())
1432 removed = list(ctx.removed())
1427 linkrev = len(self)
1433 linkrev = len(self)
1428 for f in sorted(ctx.modified() + ctx.added()):
1434 for f in sorted(ctx.modified() + ctx.added()):
1429 self.ui.note(f + "\n")
1435 self.ui.note(f + "\n")
1430 try:
1436 try:
1431 fctx = ctx[f]
1437 fctx = ctx[f]
1432 if fctx is None:
1438 if fctx is None:
1433 removed.append(f)
1439 removed.append(f)
1434 else:
1440 else:
1435 added.append(f)
1441 added.append(f)
1436 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1442 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1437 trp, changed)
1443 trp, changed)
1438 m.setflag(f, fctx.flags())
1444 m.setflag(f, fctx.flags())
1439 except OSError, inst:
1445 except OSError, inst:
1440 self.ui.warn(_("trouble committing %s!\n") % f)
1446 self.ui.warn(_("trouble committing %s!\n") % f)
1441 raise
1447 raise
1442 except IOError, inst:
1448 except IOError, inst:
1443 errcode = getattr(inst, 'errno', errno.ENOENT)
1449 errcode = getattr(inst, 'errno', errno.ENOENT)
1444 if error or errcode and errcode != errno.ENOENT:
1450 if error or errcode and errcode != errno.ENOENT:
1445 self.ui.warn(_("trouble committing %s!\n") % f)
1451 self.ui.warn(_("trouble committing %s!\n") % f)
1446 raise
1452 raise
1447
1453
1448 # update manifest
1454 # update manifest
1449 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1455 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1450 drop = [f for f in removed if f in m]
1456 drop = [f for f in removed if f in m]
1451 for f in drop:
1457 for f in drop:
1452 del m[f]
1458 del m[f]
1453 mn = self.manifest.add(m, trp, linkrev,
1459 mn = self.manifest.add(m, trp, linkrev,
1454 p1.manifestnode(), p2.manifestnode(),
1460 p1.manifestnode(), p2.manifestnode(),
1455 added, drop)
1461 added, drop)
1456 files = changed + removed
1462 files = changed + removed
1457 else:
1463 else:
1458 mn = p1.manifestnode()
1464 mn = p1.manifestnode()
1459 files = []
1465 files = []
1460
1466
1461 # update changelog
1467 # update changelog
1462 self.changelog.delayupdate(tr)
1468 self.changelog.delayupdate(tr)
1463 n = self.changelog.add(mn, files, ctx.description(),
1469 n = self.changelog.add(mn, files, ctx.description(),
1464 trp, p1.node(), p2.node(),
1470 trp, p1.node(), p2.node(),
1465 user, ctx.date(), ctx.extra().copy())
1471 user, ctx.date(), ctx.extra().copy())
1466 p = lambda: tr.writepending() and self.root or ""
1472 p = lambda: tr.writepending() and self.root or ""
1467 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1473 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1468 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1474 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1469 parent2=xp2, pending=p)
1475 parent2=xp2, pending=p)
1470 # set the new commit is proper phase
1476 # set the new commit is proper phase
1471 targetphase = subrepo.newcommitphase(self.ui, ctx)
1477 targetphase = subrepo.newcommitphase(self.ui, ctx)
1472 if targetphase:
1478 if targetphase:
1473 # retract boundary do not alter parent changeset.
1479 # retract boundary do not alter parent changeset.
1474 # if a parent have higher the resulting phase will
1480 # if a parent have higher the resulting phase will
1475 # be compliant anyway
1481 # be compliant anyway
1476 #
1482 #
1477 # if minimal phase was 0 we don't need to retract anything
1483 # if minimal phase was 0 we don't need to retract anything
1478 phases.retractboundary(self, tr, targetphase, [n])
1484 phases.retractboundary(self, tr, targetphase, [n])
1479 tr.close()
1485 tr.close()
1480 branchmap.updatecache(self.filtered('served'))
1486 branchmap.updatecache(self.filtered('served'))
1481 return n
1487 return n
1482 finally:
1488 finally:
1483 if tr:
1489 if tr:
1484 tr.release()
1490 tr.release()
1485 lock.release()
1491 lock.release()
1486
1492
1487 @unfilteredmethod
1493 @unfilteredmethod
1488 def destroying(self):
1494 def destroying(self):
1489 '''Inform the repository that nodes are about to be destroyed.
1495 '''Inform the repository that nodes are about to be destroyed.
1490 Intended for use by strip and rollback, so there's a common
1496 Intended for use by strip and rollback, so there's a common
1491 place for anything that has to be done before destroying history.
1497 place for anything that has to be done before destroying history.
1492
1498
1493 This is mostly useful for saving state that is in memory and waiting
1499 This is mostly useful for saving state that is in memory and waiting
1494 to be flushed when the current lock is released. Because a call to
1500 to be flushed when the current lock is released. Because a call to
1495 destroyed is imminent, the repo will be invalidated causing those
1501 destroyed is imminent, the repo will be invalidated causing those
1496 changes to stay in memory (waiting for the next unlock), or vanish
1502 changes to stay in memory (waiting for the next unlock), or vanish
1497 completely.
1503 completely.
1498 '''
1504 '''
1499 # When using the same lock to commit and strip, the phasecache is left
1505 # When using the same lock to commit and strip, the phasecache is left
1500 # dirty after committing. Then when we strip, the repo is invalidated,
1506 # dirty after committing. Then when we strip, the repo is invalidated,
1501 # causing those changes to disappear.
1507 # causing those changes to disappear.
1502 if '_phasecache' in vars(self):
1508 if '_phasecache' in vars(self):
1503 self._phasecache.write()
1509 self._phasecache.write()
1504
1510
1505 @unfilteredmethod
1511 @unfilteredmethod
1506 def destroyed(self):
1512 def destroyed(self):
1507 '''Inform the repository that nodes have been destroyed.
1513 '''Inform the repository that nodes have been destroyed.
1508 Intended for use by strip and rollback, so there's a common
1514 Intended for use by strip and rollback, so there's a common
1509 place for anything that has to be done after destroying history.
1515 place for anything that has to be done after destroying history.
1510 '''
1516 '''
1511 # When one tries to:
1517 # When one tries to:
1512 # 1) destroy nodes thus calling this method (e.g. strip)
1518 # 1) destroy nodes thus calling this method (e.g. strip)
1513 # 2) use phasecache somewhere (e.g. commit)
1519 # 2) use phasecache somewhere (e.g. commit)
1514 #
1520 #
1515 # then 2) will fail because the phasecache contains nodes that were
1521 # then 2) will fail because the phasecache contains nodes that were
1516 # removed. We can either remove phasecache from the filecache,
1522 # removed. We can either remove phasecache from the filecache,
1517 # causing it to reload next time it is accessed, or simply filter
1523 # causing it to reload next time it is accessed, or simply filter
1518 # the removed nodes now and write the updated cache.
1524 # the removed nodes now and write the updated cache.
1519 self._phasecache.filterunknown(self)
1525 self._phasecache.filterunknown(self)
1520 self._phasecache.write()
1526 self._phasecache.write()
1521
1527
1522 # update the 'served' branch cache to help read only server process
1528 # update the 'served' branch cache to help read only server process
1523 # Thanks to branchcache collaboration this is done from the nearest
1529 # Thanks to branchcache collaboration this is done from the nearest
1524 # filtered subset and it is expected to be fast.
1530 # filtered subset and it is expected to be fast.
1525 branchmap.updatecache(self.filtered('served'))
1531 branchmap.updatecache(self.filtered('served'))
1526
1532
1527 # Ensure the persistent tag cache is updated. Doing it now
1533 # Ensure the persistent tag cache is updated. Doing it now
1528 # means that the tag cache only has to worry about destroyed
1534 # means that the tag cache only has to worry about destroyed
1529 # heads immediately after a strip/rollback. That in turn
1535 # heads immediately after a strip/rollback. That in turn
1530 # guarantees that "cachetip == currenttip" (comparing both rev
1536 # guarantees that "cachetip == currenttip" (comparing both rev
1531 # and node) always means no nodes have been added or destroyed.
1537 # and node) always means no nodes have been added or destroyed.
1532
1538
1533 # XXX this is suboptimal when qrefresh'ing: we strip the current
1539 # XXX this is suboptimal when qrefresh'ing: we strip the current
1534 # head, refresh the tag cache, then immediately add a new head.
1540 # head, refresh the tag cache, then immediately add a new head.
1535 # But I think doing it this way is necessary for the "instant
1541 # But I think doing it this way is necessary for the "instant
1536 # tag cache retrieval" case to work.
1542 # tag cache retrieval" case to work.
1537 self.invalidate()
1543 self.invalidate()
1538
1544
1539 def walk(self, match, node=None):
1545 def walk(self, match, node=None):
1540 '''
1546 '''
1541 walk recursively through the directory tree or a given
1547 walk recursively through the directory tree or a given
1542 changeset, finding all files matched by the match
1548 changeset, finding all files matched by the match
1543 function
1549 function
1544 '''
1550 '''
1545 return self[node].walk(match)
1551 return self[node].walk(match)
1546
1552
1547 def status(self, node1='.', node2=None, match=None,
1553 def status(self, node1='.', node2=None, match=None,
1548 ignored=False, clean=False, unknown=False,
1554 ignored=False, clean=False, unknown=False,
1549 listsubrepos=False):
1555 listsubrepos=False):
1550 '''a convenience method that calls node1.status(node2)'''
1556 '''a convenience method that calls node1.status(node2)'''
1551 return self[node1].status(node2, match, ignored, clean, unknown,
1557 return self[node1].status(node2, match, ignored, clean, unknown,
1552 listsubrepos)
1558 listsubrepos)
1553
1559
1554 def heads(self, start=None):
1560 def heads(self, start=None):
1555 heads = self.changelog.heads(start)
1561 heads = self.changelog.heads(start)
1556 # sort the output in rev descending order
1562 # sort the output in rev descending order
1557 return sorted(heads, key=self.changelog.rev, reverse=True)
1563 return sorted(heads, key=self.changelog.rev, reverse=True)
1558
1564
1559 def branchheads(self, branch=None, start=None, closed=False):
1565 def branchheads(self, branch=None, start=None, closed=False):
1560 '''return a (possibly filtered) list of heads for the given branch
1566 '''return a (possibly filtered) list of heads for the given branch
1561
1567
1562 Heads are returned in topological order, from newest to oldest.
1568 Heads are returned in topological order, from newest to oldest.
1563 If branch is None, use the dirstate branch.
1569 If branch is None, use the dirstate branch.
1564 If start is not None, return only heads reachable from start.
1570 If start is not None, return only heads reachable from start.
1565 If closed is True, return heads that are marked as closed as well.
1571 If closed is True, return heads that are marked as closed as well.
1566 '''
1572 '''
1567 if branch is None:
1573 if branch is None:
1568 branch = self[None].branch()
1574 branch = self[None].branch()
1569 branches = self.branchmap()
1575 branches = self.branchmap()
1570 if branch not in branches:
1576 if branch not in branches:
1571 return []
1577 return []
1572 # the cache returns heads ordered lowest to highest
1578 # the cache returns heads ordered lowest to highest
1573 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1579 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1574 if start is not None:
1580 if start is not None:
1575 # filter out the heads that cannot be reached from startrev
1581 # filter out the heads that cannot be reached from startrev
1576 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1582 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1577 bheads = [h for h in bheads if h in fbheads]
1583 bheads = [h for h in bheads if h in fbheads]
1578 return bheads
1584 return bheads
1579
1585
1580 def branches(self, nodes):
1586 def branches(self, nodes):
1581 if not nodes:
1587 if not nodes:
1582 nodes = [self.changelog.tip()]
1588 nodes = [self.changelog.tip()]
1583 b = []
1589 b = []
1584 for n in nodes:
1590 for n in nodes:
1585 t = n
1591 t = n
1586 while True:
1592 while True:
1587 p = self.changelog.parents(n)
1593 p = self.changelog.parents(n)
1588 if p[1] != nullid or p[0] == nullid:
1594 if p[1] != nullid or p[0] == nullid:
1589 b.append((t, n, p[0], p[1]))
1595 b.append((t, n, p[0], p[1]))
1590 break
1596 break
1591 n = p[0]
1597 n = p[0]
1592 return b
1598 return b
1593
1599
1594 def between(self, pairs):
1600 def between(self, pairs):
1595 r = []
1601 r = []
1596
1602
1597 for top, bottom in pairs:
1603 for top, bottom in pairs:
1598 n, l, i = top, [], 0
1604 n, l, i = top, [], 0
1599 f = 1
1605 f = 1
1600
1606
1601 while n != bottom and n != nullid:
1607 while n != bottom and n != nullid:
1602 p = self.changelog.parents(n)[0]
1608 p = self.changelog.parents(n)[0]
1603 if i == f:
1609 if i == f:
1604 l.append(n)
1610 l.append(n)
1605 f = f * 2
1611 f = f * 2
1606 n = p
1612 n = p
1607 i += 1
1613 i += 1
1608
1614
1609 r.append(l)
1615 r.append(l)
1610
1616
1611 return r
1617 return r
1612
1618
1613 def checkpush(self, pushop):
1619 def checkpush(self, pushop):
1614 """Extensions can override this function if additional checks have
1620 """Extensions can override this function if additional checks have
1615 to be performed before pushing, or call it if they override push
1621 to be performed before pushing, or call it if they override push
1616 command.
1622 command.
1617 """
1623 """
1618 pass
1624 pass
1619
1625
1620 @unfilteredpropertycache
1626 @unfilteredpropertycache
1621 def prepushoutgoinghooks(self):
1627 def prepushoutgoinghooks(self):
1622 """Return util.hooks consists of "(repo, remote, outgoing)"
1628 """Return util.hooks consists of "(repo, remote, outgoing)"
1623 functions, which are called before pushing changesets.
1629 functions, which are called before pushing changesets.
1624 """
1630 """
1625 return util.hooks()
1631 return util.hooks()
1626
1632
1627 def stream_in(self, remote, requirements):
1633 def stream_in(self, remote, requirements):
1628 lock = self.lock()
1634 lock = self.lock()
1629 try:
1635 try:
1630 # Save remote branchmap. We will use it later
1636 # Save remote branchmap. We will use it later
1631 # to speed up branchcache creation
1637 # to speed up branchcache creation
1632 rbranchmap = None
1638 rbranchmap = None
1633 if remote.capable("branchmap"):
1639 if remote.capable("branchmap"):
1634 rbranchmap = remote.branchmap()
1640 rbranchmap = remote.branchmap()
1635
1641
1636 fp = remote.stream_out()
1642 fp = remote.stream_out()
1637 l = fp.readline()
1643 l = fp.readline()
1638 try:
1644 try:
1639 resp = int(l)
1645 resp = int(l)
1640 except ValueError:
1646 except ValueError:
1641 raise error.ResponseError(
1647 raise error.ResponseError(
1642 _('unexpected response from remote server:'), l)
1648 _('unexpected response from remote server:'), l)
1643 if resp == 1:
1649 if resp == 1:
1644 raise util.Abort(_('operation forbidden by server'))
1650 raise util.Abort(_('operation forbidden by server'))
1645 elif resp == 2:
1651 elif resp == 2:
1646 raise util.Abort(_('locking the remote repository failed'))
1652 raise util.Abort(_('locking the remote repository failed'))
1647 elif resp != 0:
1653 elif resp != 0:
1648 raise util.Abort(_('the server sent an unknown error code'))
1654 raise util.Abort(_('the server sent an unknown error code'))
1649 self.ui.status(_('streaming all changes\n'))
1655 self.ui.status(_('streaming all changes\n'))
1650 l = fp.readline()
1656 l = fp.readline()
1651 try:
1657 try:
1652 total_files, total_bytes = map(int, l.split(' ', 1))
1658 total_files, total_bytes = map(int, l.split(' ', 1))
1653 except (ValueError, TypeError):
1659 except (ValueError, TypeError):
1654 raise error.ResponseError(
1660 raise error.ResponseError(
1655 _('unexpected response from remote server:'), l)
1661 _('unexpected response from remote server:'), l)
1656 self.ui.status(_('%d files to transfer, %s of data\n') %
1662 self.ui.status(_('%d files to transfer, %s of data\n') %
1657 (total_files, util.bytecount(total_bytes)))
1663 (total_files, util.bytecount(total_bytes)))
1658 handled_bytes = 0
1664 handled_bytes = 0
1659 self.ui.progress(_('clone'), 0, total=total_bytes)
1665 self.ui.progress(_('clone'), 0, total=total_bytes)
1660 start = time.time()
1666 start = time.time()
1661
1667
1662 tr = self.transaction(_('clone'))
1668 tr = self.transaction(_('clone'))
1663 try:
1669 try:
1664 for i in xrange(total_files):
1670 for i in xrange(total_files):
1665 # XXX doesn't support '\n' or '\r' in filenames
1671 # XXX doesn't support '\n' or '\r' in filenames
1666 l = fp.readline()
1672 l = fp.readline()
1667 try:
1673 try:
1668 name, size = l.split('\0', 1)
1674 name, size = l.split('\0', 1)
1669 size = int(size)
1675 size = int(size)
1670 except (ValueError, TypeError):
1676 except (ValueError, TypeError):
1671 raise error.ResponseError(
1677 raise error.ResponseError(
1672 _('unexpected response from remote server:'), l)
1678 _('unexpected response from remote server:'), l)
1673 if self.ui.debugflag:
1679 if self.ui.debugflag:
1674 self.ui.debug('adding %s (%s)\n' %
1680 self.ui.debug('adding %s (%s)\n' %
1675 (name, util.bytecount(size)))
1681 (name, util.bytecount(size)))
1676 # for backwards compat, name was partially encoded
1682 # for backwards compat, name was partially encoded
1677 ofp = self.sopener(store.decodedir(name), 'w')
1683 ofp = self.sopener(store.decodedir(name), 'w')
1678 for chunk in util.filechunkiter(fp, limit=size):
1684 for chunk in util.filechunkiter(fp, limit=size):
1679 handled_bytes += len(chunk)
1685 handled_bytes += len(chunk)
1680 self.ui.progress(_('clone'), handled_bytes,
1686 self.ui.progress(_('clone'), handled_bytes,
1681 total=total_bytes)
1687 total=total_bytes)
1682 ofp.write(chunk)
1688 ofp.write(chunk)
1683 ofp.close()
1689 ofp.close()
1684 tr.close()
1690 tr.close()
1685 finally:
1691 finally:
1686 tr.release()
1692 tr.release()
1687
1693
1688 # Writing straight to files circumvented the inmemory caches
1694 # Writing straight to files circumvented the inmemory caches
1689 self.invalidate()
1695 self.invalidate()
1690
1696
1691 elapsed = time.time() - start
1697 elapsed = time.time() - start
1692 if elapsed <= 0:
1698 if elapsed <= 0:
1693 elapsed = 0.001
1699 elapsed = 0.001
1694 self.ui.progress(_('clone'), None)
1700 self.ui.progress(_('clone'), None)
1695 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1701 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1696 (util.bytecount(total_bytes), elapsed,
1702 (util.bytecount(total_bytes), elapsed,
1697 util.bytecount(total_bytes / elapsed)))
1703 util.bytecount(total_bytes / elapsed)))
1698
1704
1699 # new requirements = old non-format requirements +
1705 # new requirements = old non-format requirements +
1700 # new format-related
1706 # new format-related
1701 # requirements from the streamed-in repository
1707 # requirements from the streamed-in repository
1702 requirements.update(set(self.requirements) - self.supportedformats)
1708 requirements.update(set(self.requirements) - self.supportedformats)
1703 self._applyrequirements(requirements)
1709 self._applyrequirements(requirements)
1704 self._writerequirements()
1710 self._writerequirements()
1705
1711
1706 if rbranchmap:
1712 if rbranchmap:
1707 rbheads = []
1713 rbheads = []
1708 closed = []
1714 closed = []
1709 for bheads in rbranchmap.itervalues():
1715 for bheads in rbranchmap.itervalues():
1710 rbheads.extend(bheads)
1716 rbheads.extend(bheads)
1711 for h in bheads:
1717 for h in bheads:
1712 r = self.changelog.rev(h)
1718 r = self.changelog.rev(h)
1713 b, c = self.changelog.branchinfo(r)
1719 b, c = self.changelog.branchinfo(r)
1714 if c:
1720 if c:
1715 closed.append(h)
1721 closed.append(h)
1716
1722
1717 if rbheads:
1723 if rbheads:
1718 rtiprev = max((int(self.changelog.rev(node))
1724 rtiprev = max((int(self.changelog.rev(node))
1719 for node in rbheads))
1725 for node in rbheads))
1720 cache = branchmap.branchcache(rbranchmap,
1726 cache = branchmap.branchcache(rbranchmap,
1721 self[rtiprev].node(),
1727 self[rtiprev].node(),
1722 rtiprev,
1728 rtiprev,
1723 closednodes=closed)
1729 closednodes=closed)
1724 # Try to stick it as low as possible
1730 # Try to stick it as low as possible
1725 # filter above served are unlikely to be fetch from a clone
1731 # filter above served are unlikely to be fetch from a clone
1726 for candidate in ('base', 'immutable', 'served'):
1732 for candidate in ('base', 'immutable', 'served'):
1727 rview = self.filtered(candidate)
1733 rview = self.filtered(candidate)
1728 if cache.validfor(rview):
1734 if cache.validfor(rview):
1729 self._branchcaches[candidate] = cache
1735 self._branchcaches[candidate] = cache
1730 cache.write(rview)
1736 cache.write(rview)
1731 break
1737 break
1732 self.invalidate()
1738 self.invalidate()
1733 return len(self.heads()) + 1
1739 return len(self.heads()) + 1
1734 finally:
1740 finally:
1735 lock.release()
1741 lock.release()
1736
1742
1737 def clone(self, remote, heads=[], stream=None):
1743 def clone(self, remote, heads=[], stream=None):
1738 '''clone remote repository.
1744 '''clone remote repository.
1739
1745
1740 keyword arguments:
1746 keyword arguments:
1741 heads: list of revs to clone (forces use of pull)
1747 heads: list of revs to clone (forces use of pull)
1742 stream: use streaming clone if possible'''
1748 stream: use streaming clone if possible'''
1743
1749
1744 # now, all clients that can request uncompressed clones can
1750 # now, all clients that can request uncompressed clones can
1745 # read repo formats supported by all servers that can serve
1751 # read repo formats supported by all servers that can serve
1746 # them.
1752 # them.
1747
1753
1748 # if revlog format changes, client will have to check version
1754 # if revlog format changes, client will have to check version
1749 # and format flags on "stream" capability, and use
1755 # and format flags on "stream" capability, and use
1750 # uncompressed only if compatible.
1756 # uncompressed only if compatible.
1751
1757
1752 if stream is None:
1758 if stream is None:
1753 # if the server explicitly prefers to stream (for fast LANs)
1759 # if the server explicitly prefers to stream (for fast LANs)
1754 stream = remote.capable('stream-preferred')
1760 stream = remote.capable('stream-preferred')
1755
1761
1756 if stream and not heads:
1762 if stream and not heads:
1757 # 'stream' means remote revlog format is revlogv1 only
1763 # 'stream' means remote revlog format is revlogv1 only
1758 if remote.capable('stream'):
1764 if remote.capable('stream'):
1759 self.stream_in(remote, set(('revlogv1',)))
1765 self.stream_in(remote, set(('revlogv1',)))
1760 else:
1766 else:
1761 # otherwise, 'streamreqs' contains the remote revlog format
1767 # otherwise, 'streamreqs' contains the remote revlog format
1762 streamreqs = remote.capable('streamreqs')
1768 streamreqs = remote.capable('streamreqs')
1763 if streamreqs:
1769 if streamreqs:
1764 streamreqs = set(streamreqs.split(','))
1770 streamreqs = set(streamreqs.split(','))
1765 # if we support it, stream in and adjust our requirements
1771 # if we support it, stream in and adjust our requirements
1766 if not streamreqs - self.supportedformats:
1772 if not streamreqs - self.supportedformats:
1767 self.stream_in(remote, streamreqs)
1773 self.stream_in(remote, streamreqs)
1768
1774
1769 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1775 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1770 try:
1776 try:
1771 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1777 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1772 ret = exchange.pull(self, remote, heads).cgresult
1778 ret = exchange.pull(self, remote, heads).cgresult
1773 finally:
1779 finally:
1774 self.ui.restoreconfig(quiet)
1780 self.ui.restoreconfig(quiet)
1775 return ret
1781 return ret
1776
1782
1777 def pushkey(self, namespace, key, old, new):
1783 def pushkey(self, namespace, key, old, new):
1778 try:
1784 try:
1779 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1785 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1780 old=old, new=new)
1786 old=old, new=new)
1781 except error.HookAbort, exc:
1787 except error.HookAbort, exc:
1782 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1788 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1783 if exc.hint:
1789 if exc.hint:
1784 self.ui.write_err(_("(%s)\n") % exc.hint)
1790 self.ui.write_err(_("(%s)\n") % exc.hint)
1785 return False
1791 return False
1786 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1792 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1787 ret = pushkey.push(self, namespace, key, old, new)
1793 ret = pushkey.push(self, namespace, key, old, new)
1788 def runhook():
1794 def runhook():
1789 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1795 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1790 ret=ret)
1796 ret=ret)
1791 self._afterlock(runhook)
1797 self._afterlock(runhook)
1792 return ret
1798 return ret
1793
1799
1794 def listkeys(self, namespace):
1800 def listkeys(self, namespace):
1795 self.hook('prelistkeys', throw=True, namespace=namespace)
1801 self.hook('prelistkeys', throw=True, namespace=namespace)
1796 self.ui.debug('listing keys for "%s"\n' % namespace)
1802 self.ui.debug('listing keys for "%s"\n' % namespace)
1797 values = pushkey.list(self, namespace)
1803 values = pushkey.list(self, namespace)
1798 self.hook('listkeys', namespace=namespace, values=values)
1804 self.hook('listkeys', namespace=namespace, values=values)
1799 return values
1805 return values
1800
1806
1801 def debugwireargs(self, one, two, three=None, four=None, five=None):
1807 def debugwireargs(self, one, two, three=None, four=None, five=None):
1802 '''used to test argument passing over the wire'''
1808 '''used to test argument passing over the wire'''
1803 return "%s %s %s %s %s" % (one, two, three, four, five)
1809 return "%s %s %s %s %s" % (one, two, three, four, five)
1804
1810
1805 def savecommitmessage(self, text):
1811 def savecommitmessage(self, text):
1806 fp = self.opener('last-message.txt', 'wb')
1812 fp = self.opener('last-message.txt', 'wb')
1807 try:
1813 try:
1808 fp.write(text)
1814 fp.write(text)
1809 finally:
1815 finally:
1810 fp.close()
1816 fp.close()
1811 return self.pathto(fp.name[len(self.root) + 1:])
1817 return self.pathto(fp.name[len(self.root) + 1:])
1812
1818
1813 # used to avoid circular references so destructors work
1819 # used to avoid circular references so destructors work
1814 def aftertrans(files):
1820 def aftertrans(files):
1815 renamefiles = [tuple(t) for t in files]
1821 renamefiles = [tuple(t) for t in files]
1816 def a():
1822 def a():
1817 for vfs, src, dest in renamefiles:
1823 for vfs, src, dest in renamefiles:
1818 try:
1824 try:
1819 vfs.rename(src, dest)
1825 vfs.rename(src, dest)
1820 except OSError: # journal file does not yet exist
1826 except OSError: # journal file does not yet exist
1821 pass
1827 pass
1822 return a
1828 return a
1823
1829
1824 def undoname(fn):
1830 def undoname(fn):
1825 base, name = os.path.split(fn)
1831 base, name = os.path.split(fn)
1826 assert name.startswith('journal')
1832 assert name.startswith('journal')
1827 return os.path.join(base, name.replace('journal', 'undo', 1))
1833 return os.path.join(base, name.replace('journal', 'undo', 1))
1828
1834
1829 def instance(ui, path, create):
1835 def instance(ui, path, create):
1830 return localrepository(ui, util.urllocalpath(path), create)
1836 return localrepository(ui, util.urllocalpath(path), create)
1831
1837
1832 def islocal(path):
1838 def islocal(path):
1833 return True
1839 return True
@@ -1,1676 +1,1676 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import copy
8 import copy
9 import errno, os, re, shutil, posixpath, sys
9 import errno, os, re, shutil, posixpath, sys
10 import xml.dom.minidom
10 import xml.dom.minidom
11 import stat, subprocess, tarfile
11 import stat, subprocess, tarfile
12 from i18n import _
12 from i18n import _
13 import config, util, node, error, cmdutil, scmutil, match as matchmod
13 import config, util, node, error, cmdutil, scmutil, match as matchmod
14 import phases
14 import phases
15 import pathutil
15 import pathutil
16 import exchange
16 import exchange
17 hg = None
17 hg = None
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 nullstate = ('', '', 'empty')
20 nullstate = ('', '', 'empty')
21
21
22 def _expandedabspath(path):
22 def _expandedabspath(path):
23 '''
23 '''
24 get a path or url and if it is a path expand it and return an absolute path
24 get a path or url and if it is a path expand it and return an absolute path
25 '''
25 '''
26 expandedpath = util.urllocalpath(util.expandpath(path))
26 expandedpath = util.urllocalpath(util.expandpath(path))
27 u = util.url(expandedpath)
27 u = util.url(expandedpath)
28 if not u.scheme:
28 if not u.scheme:
29 path = util.normpath(os.path.abspath(u.path))
29 path = util.normpath(os.path.abspath(u.path))
30 return path
30 return path
31
31
32 def _getstorehashcachename(remotepath):
32 def _getstorehashcachename(remotepath):
33 '''get a unique filename for the store hash cache of a remote repository'''
33 '''get a unique filename for the store hash cache of a remote repository'''
34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
35
35
36 class SubrepoAbort(error.Abort):
36 class SubrepoAbort(error.Abort):
37 """Exception class used to avoid handling a subrepo error more than once"""
37 """Exception class used to avoid handling a subrepo error more than once"""
38 def __init__(self, *args, **kw):
38 def __init__(self, *args, **kw):
39 error.Abort.__init__(self, *args, **kw)
39 error.Abort.__init__(self, *args, **kw)
40 self.subrepo = kw.get('subrepo')
40 self.subrepo = kw.get('subrepo')
41 self.cause = kw.get('cause')
41 self.cause = kw.get('cause')
42
42
43 def annotatesubrepoerror(func):
43 def annotatesubrepoerror(func):
44 def decoratedmethod(self, *args, **kargs):
44 def decoratedmethod(self, *args, **kargs):
45 try:
45 try:
46 res = func(self, *args, **kargs)
46 res = func(self, *args, **kargs)
47 except SubrepoAbort, ex:
47 except SubrepoAbort, ex:
48 # This exception has already been handled
48 # This exception has already been handled
49 raise ex
49 raise ex
50 except error.Abort, ex:
50 except error.Abort, ex:
51 subrepo = subrelpath(self)
51 subrepo = subrelpath(self)
52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
53 # avoid handling this exception by raising a SubrepoAbort exception
53 # avoid handling this exception by raising a SubrepoAbort exception
54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
55 cause=sys.exc_info())
55 cause=sys.exc_info())
56 return res
56 return res
57 return decoratedmethod
57 return decoratedmethod
58
58
59 def state(ctx, ui):
59 def state(ctx, ui):
60 """return a state dict, mapping subrepo paths configured in .hgsub
60 """return a state dict, mapping subrepo paths configured in .hgsub
61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
62 (key in types dict))
62 (key in types dict))
63 """
63 """
64 p = config.config()
64 p = config.config()
65 def read(f, sections=None, remap=None):
65 def read(f, sections=None, remap=None):
66 if f in ctx:
66 if f in ctx:
67 try:
67 try:
68 data = ctx[f].data()
68 data = ctx[f].data()
69 except IOError, err:
69 except IOError, err:
70 if err.errno != errno.ENOENT:
70 if err.errno != errno.ENOENT:
71 raise
71 raise
72 # handle missing subrepo spec files as removed
72 # handle missing subrepo spec files as removed
73 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
73 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
74 return
74 return
75 p.parse(f, data, sections, remap, read)
75 p.parse(f, data, sections, remap, read)
76 else:
76 else:
77 raise util.Abort(_("subrepo spec file %s not found") % f)
77 raise util.Abort(_("subrepo spec file %s not found") % f)
78
78
79 if '.hgsub' in ctx:
79 if '.hgsub' in ctx:
80 read('.hgsub')
80 read('.hgsub')
81
81
82 for path, src in ui.configitems('subpaths'):
82 for path, src in ui.configitems('subpaths'):
83 p.set('subpaths', path, src, ui.configsource('subpaths', path))
83 p.set('subpaths', path, src, ui.configsource('subpaths', path))
84
84
85 rev = {}
85 rev = {}
86 if '.hgsubstate' in ctx:
86 if '.hgsubstate' in ctx:
87 try:
87 try:
88 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
88 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
89 l = l.lstrip()
89 l = l.lstrip()
90 if not l:
90 if not l:
91 continue
91 continue
92 try:
92 try:
93 revision, path = l.split(" ", 1)
93 revision, path = l.split(" ", 1)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_("invalid subrepository revision "
95 raise util.Abort(_("invalid subrepository revision "
96 "specifier in .hgsubstate line %d")
96 "specifier in .hgsubstate line %d")
97 % (i + 1))
97 % (i + 1))
98 rev[path] = revision
98 rev[path] = revision
99 except IOError, err:
99 except IOError, err:
100 if err.errno != errno.ENOENT:
100 if err.errno != errno.ENOENT:
101 raise
101 raise
102
102
103 def remap(src):
103 def remap(src):
104 for pattern, repl in p.items('subpaths'):
104 for pattern, repl in p.items('subpaths'):
105 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
105 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
106 # does a string decode.
106 # does a string decode.
107 repl = repl.encode('string-escape')
107 repl = repl.encode('string-escape')
108 # However, we still want to allow back references to go
108 # However, we still want to allow back references to go
109 # through unharmed, so we turn r'\\1' into r'\1'. Again,
109 # through unharmed, so we turn r'\\1' into r'\1'. Again,
110 # extra escapes are needed because re.sub string decodes.
110 # extra escapes are needed because re.sub string decodes.
111 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
111 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
112 try:
112 try:
113 src = re.sub(pattern, repl, src, 1)
113 src = re.sub(pattern, repl, src, 1)
114 except re.error, e:
114 except re.error, e:
115 raise util.Abort(_("bad subrepository pattern in %s: %s")
115 raise util.Abort(_("bad subrepository pattern in %s: %s")
116 % (p.source('subpaths', pattern), e))
116 % (p.source('subpaths', pattern), e))
117 return src
117 return src
118
118
119 state = {}
119 state = {}
120 for path, src in p[''].items():
120 for path, src in p[''].items():
121 kind = 'hg'
121 kind = 'hg'
122 if src.startswith('['):
122 if src.startswith('['):
123 if ']' not in src:
123 if ']' not in src:
124 raise util.Abort(_('missing ] in subrepo source'))
124 raise util.Abort(_('missing ] in subrepo source'))
125 kind, src = src.split(']', 1)
125 kind, src = src.split(']', 1)
126 kind = kind[1:]
126 kind = kind[1:]
127 src = src.lstrip() # strip any extra whitespace after ']'
127 src = src.lstrip() # strip any extra whitespace after ']'
128
128
129 if not util.url(src).isabs():
129 if not util.url(src).isabs():
130 parent = _abssource(ctx._repo, abort=False)
130 parent = _abssource(ctx._repo, abort=False)
131 if parent:
131 if parent:
132 parent = util.url(parent)
132 parent = util.url(parent)
133 parent.path = posixpath.join(parent.path or '', src)
133 parent.path = posixpath.join(parent.path or '', src)
134 parent.path = posixpath.normpath(parent.path)
134 parent.path = posixpath.normpath(parent.path)
135 joined = str(parent)
135 joined = str(parent)
136 # Remap the full joined path and use it if it changes,
136 # Remap the full joined path and use it if it changes,
137 # else remap the original source.
137 # else remap the original source.
138 remapped = remap(joined)
138 remapped = remap(joined)
139 if remapped == joined:
139 if remapped == joined:
140 src = remap(src)
140 src = remap(src)
141 else:
141 else:
142 src = remapped
142 src = remapped
143
143
144 src = remap(src)
144 src = remap(src)
145 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
145 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
146
146
147 return state
147 return state
148
148
149 def writestate(repo, state):
149 def writestate(repo, state):
150 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
150 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
151 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
151 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
152 repo.wwrite('.hgsubstate', ''.join(lines), '')
152 repo.wwrite('.hgsubstate', ''.join(lines), '')
153
153
154 def submerge(repo, wctx, mctx, actx, overwrite):
154 def submerge(repo, wctx, mctx, actx, overwrite):
155 """delegated from merge.applyupdates: merging of .hgsubstate file
155 """delegated from merge.applyupdates: merging of .hgsubstate file
156 in working context, merging context and ancestor context"""
156 in working context, merging context and ancestor context"""
157 if mctx == actx: # backwards?
157 if mctx == actx: # backwards?
158 actx = wctx.p1()
158 actx = wctx.p1()
159 s1 = wctx.substate
159 s1 = wctx.substate
160 s2 = mctx.substate
160 s2 = mctx.substate
161 sa = actx.substate
161 sa = actx.substate
162 sm = {}
162 sm = {}
163
163
164 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
164 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
165
165
166 def debug(s, msg, r=""):
166 def debug(s, msg, r=""):
167 if r:
167 if r:
168 r = "%s:%s:%s" % r
168 r = "%s:%s:%s" % r
169 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
169 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
170
170
171 for s, l in sorted(s1.iteritems()):
171 for s, l in sorted(s1.iteritems()):
172 a = sa.get(s, nullstate)
172 a = sa.get(s, nullstate)
173 ld = l # local state with possible dirty flag for compares
173 ld = l # local state with possible dirty flag for compares
174 if wctx.sub(s).dirty():
174 if wctx.sub(s).dirty():
175 ld = (l[0], l[1] + "+")
175 ld = (l[0], l[1] + "+")
176 if wctx == actx: # overwrite
176 if wctx == actx: # overwrite
177 a = ld
177 a = ld
178
178
179 if s in s2:
179 if s in s2:
180 r = s2[s]
180 r = s2[s]
181 if ld == r or r == a: # no change or local is newer
181 if ld == r or r == a: # no change or local is newer
182 sm[s] = l
182 sm[s] = l
183 continue
183 continue
184 elif ld == a: # other side changed
184 elif ld == a: # other side changed
185 debug(s, "other changed, get", r)
185 debug(s, "other changed, get", r)
186 wctx.sub(s).get(r, overwrite)
186 wctx.sub(s).get(r, overwrite)
187 sm[s] = r
187 sm[s] = r
188 elif ld[0] != r[0]: # sources differ
188 elif ld[0] != r[0]: # sources differ
189 if repo.ui.promptchoice(
189 if repo.ui.promptchoice(
190 _(' subrepository sources for %s differ\n'
190 _(' subrepository sources for %s differ\n'
191 'use (l)ocal source (%s) or (r)emote source (%s)?'
191 'use (l)ocal source (%s) or (r)emote source (%s)?'
192 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
192 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
193 debug(s, "prompt changed, get", r)
193 debug(s, "prompt changed, get", r)
194 wctx.sub(s).get(r, overwrite)
194 wctx.sub(s).get(r, overwrite)
195 sm[s] = r
195 sm[s] = r
196 elif ld[1] == a[1]: # local side is unchanged
196 elif ld[1] == a[1]: # local side is unchanged
197 debug(s, "other side changed, get", r)
197 debug(s, "other side changed, get", r)
198 wctx.sub(s).get(r, overwrite)
198 wctx.sub(s).get(r, overwrite)
199 sm[s] = r
199 sm[s] = r
200 else:
200 else:
201 debug(s, "both sides changed")
201 debug(s, "both sides changed")
202 srepo = wctx.sub(s)
202 srepo = wctx.sub(s)
203 option = repo.ui.promptchoice(
203 option = repo.ui.promptchoice(
204 _(' subrepository %s diverged (local revision: %s, '
204 _(' subrepository %s diverged (local revision: %s, '
205 'remote revision: %s)\n'
205 'remote revision: %s)\n'
206 '(M)erge, keep (l)ocal or keep (r)emote?'
206 '(M)erge, keep (l)ocal or keep (r)emote?'
207 '$$ &Merge $$ &Local $$ &Remote')
207 '$$ &Merge $$ &Local $$ &Remote')
208 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
208 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
209 if option == 0:
209 if option == 0:
210 wctx.sub(s).merge(r)
210 wctx.sub(s).merge(r)
211 sm[s] = l
211 sm[s] = l
212 debug(s, "merge with", r)
212 debug(s, "merge with", r)
213 elif option == 1:
213 elif option == 1:
214 sm[s] = l
214 sm[s] = l
215 debug(s, "keep local subrepo revision", l)
215 debug(s, "keep local subrepo revision", l)
216 else:
216 else:
217 wctx.sub(s).get(r, overwrite)
217 wctx.sub(s).get(r, overwrite)
218 sm[s] = r
218 sm[s] = r
219 debug(s, "get remote subrepo revision", r)
219 debug(s, "get remote subrepo revision", r)
220 elif ld == a: # remote removed, local unchanged
220 elif ld == a: # remote removed, local unchanged
221 debug(s, "remote removed, remove")
221 debug(s, "remote removed, remove")
222 wctx.sub(s).remove()
222 wctx.sub(s).remove()
223 elif a == nullstate: # not present in remote or ancestor
223 elif a == nullstate: # not present in remote or ancestor
224 debug(s, "local added, keep")
224 debug(s, "local added, keep")
225 sm[s] = l
225 sm[s] = l
226 continue
226 continue
227 else:
227 else:
228 if repo.ui.promptchoice(
228 if repo.ui.promptchoice(
229 _(' local changed subrepository %s which remote removed\n'
229 _(' local changed subrepository %s which remote removed\n'
230 'use (c)hanged version or (d)elete?'
230 'use (c)hanged version or (d)elete?'
231 '$$ &Changed $$ &Delete') % s, 0):
231 '$$ &Changed $$ &Delete') % s, 0):
232 debug(s, "prompt remove")
232 debug(s, "prompt remove")
233 wctx.sub(s).remove()
233 wctx.sub(s).remove()
234
234
235 for s, r in sorted(s2.items()):
235 for s, r in sorted(s2.items()):
236 if s in s1:
236 if s in s1:
237 continue
237 continue
238 elif s not in sa:
238 elif s not in sa:
239 debug(s, "remote added, get", r)
239 debug(s, "remote added, get", r)
240 mctx.sub(s).get(r)
240 mctx.sub(s).get(r)
241 sm[s] = r
241 sm[s] = r
242 elif r != sa[s]:
242 elif r != sa[s]:
243 if repo.ui.promptchoice(
243 if repo.ui.promptchoice(
244 _(' remote changed subrepository %s which local removed\n'
244 _(' remote changed subrepository %s which local removed\n'
245 'use (c)hanged version or (d)elete?'
245 'use (c)hanged version or (d)elete?'
246 '$$ &Changed $$ &Delete') % s, 0) == 0:
246 '$$ &Changed $$ &Delete') % s, 0) == 0:
247 debug(s, "prompt recreate", r)
247 debug(s, "prompt recreate", r)
248 wctx.sub(s).get(r)
248 wctx.sub(s).get(r)
249 sm[s] = r
249 sm[s] = r
250
250
251 # record merged .hgsubstate
251 # record merged .hgsubstate
252 writestate(repo, sm)
252 writestate(repo, sm)
253 return sm
253 return sm
254
254
255 def _updateprompt(ui, sub, dirty, local, remote):
255 def _updateprompt(ui, sub, dirty, local, remote):
256 if dirty:
256 if dirty:
257 msg = (_(' subrepository sources for %s differ\n'
257 msg = (_(' subrepository sources for %s differ\n'
258 'use (l)ocal source (%s) or (r)emote source (%s)?'
258 'use (l)ocal source (%s) or (r)emote source (%s)?'
259 '$$ &Local $$ &Remote')
259 '$$ &Local $$ &Remote')
260 % (subrelpath(sub), local, remote))
260 % (subrelpath(sub), local, remote))
261 else:
261 else:
262 msg = (_(' subrepository sources for %s differ (in checked out '
262 msg = (_(' subrepository sources for %s differ (in checked out '
263 'version)\n'
263 'version)\n'
264 'use (l)ocal source (%s) or (r)emote source (%s)?'
264 'use (l)ocal source (%s) or (r)emote source (%s)?'
265 '$$ &Local $$ &Remote')
265 '$$ &Local $$ &Remote')
266 % (subrelpath(sub), local, remote))
266 % (subrelpath(sub), local, remote))
267 return ui.promptchoice(msg, 0)
267 return ui.promptchoice(msg, 0)
268
268
269 def reporelpath(repo):
269 def reporelpath(repo):
270 """return path to this (sub)repo as seen from outermost repo"""
270 """return path to this (sub)repo as seen from outermost repo"""
271 parent = repo
271 parent = repo
272 while util.safehasattr(parent, '_subparent'):
272 while util.safehasattr(parent, '_subparent'):
273 parent = parent._subparent
273 parent = parent._subparent
274 return repo.root[len(pathutil.normasprefix(parent.root)):]
274 return repo.root[len(pathutil.normasprefix(parent.root)):]
275
275
276 def subrelpath(sub):
276 def subrelpath(sub):
277 """return path to this subrepo as seen from outermost repo"""
277 """return path to this subrepo as seen from outermost repo"""
278 if util.safehasattr(sub, '_relpath'):
278 if util.safehasattr(sub, '_relpath'):
279 return sub._relpath
279 return sub._relpath
280 if not util.safehasattr(sub, '_repo'):
280 if not util.safehasattr(sub, '_repo'):
281 return sub._path
281 return sub._path
282 return reporelpath(sub._repo)
282 return reporelpath(sub._repo)
283
283
284 def _abssource(repo, push=False, abort=True):
284 def _abssource(repo, push=False, abort=True):
285 """return pull/push path of repo - either based on parent repo .hgsub info
285 """return pull/push path of repo - either based on parent repo .hgsub info
286 or on the top repo config. Abort or return None if no source found."""
286 or on the top repo config. Abort or return None if no source found."""
287 if util.safehasattr(repo, '_subparent'):
287 if util.safehasattr(repo, '_subparent'):
288 source = util.url(repo._subsource)
288 source = util.url(repo._subsource)
289 if source.isabs():
289 if source.isabs():
290 return str(source)
290 return str(source)
291 source.path = posixpath.normpath(source.path)
291 source.path = posixpath.normpath(source.path)
292 parent = _abssource(repo._subparent, push, abort=False)
292 parent = _abssource(repo._subparent, push, abort=False)
293 if parent:
293 if parent:
294 parent = util.url(util.pconvert(parent))
294 parent = util.url(util.pconvert(parent))
295 parent.path = posixpath.join(parent.path or '', source.path)
295 parent.path = posixpath.join(parent.path or '', source.path)
296 parent.path = posixpath.normpath(parent.path)
296 parent.path = posixpath.normpath(parent.path)
297 return str(parent)
297 return str(parent)
298 else: # recursion reached top repo
298 else: # recursion reached top repo
299 if util.safehasattr(repo, '_subtoppath'):
299 if util.safehasattr(repo, '_subtoppath'):
300 return repo._subtoppath
300 return repo._subtoppath
301 if push and repo.ui.config('paths', 'default-push'):
301 if push and repo.ui.config('paths', 'default-push'):
302 return repo.ui.config('paths', 'default-push')
302 return repo.ui.config('paths', 'default-push')
303 if repo.ui.config('paths', 'default'):
303 if repo.ui.config('paths', 'default'):
304 return repo.ui.config('paths', 'default')
304 return repo.ui.config('paths', 'default')
305 if repo.sharedpath != repo.path:
305 if repo.shared():
306 # chop off the .hg component to get the default path form
306 # chop off the .hg component to get the default path form
307 return os.path.dirname(repo.sharedpath)
307 return os.path.dirname(repo.sharedpath)
308 if abort:
308 if abort:
309 raise util.Abort(_("default path for subrepository not found"))
309 raise util.Abort(_("default path for subrepository not found"))
310
310
311 def _sanitize(ui, path, ignore):
311 def _sanitize(ui, path, ignore):
312 for dirname, dirs, names in os.walk(path):
312 for dirname, dirs, names in os.walk(path):
313 for i, d in enumerate(dirs):
313 for i, d in enumerate(dirs):
314 if d.lower() == ignore:
314 if d.lower() == ignore:
315 del dirs[i]
315 del dirs[i]
316 break
316 break
317 if os.path.basename(dirname).lower() != '.hg':
317 if os.path.basename(dirname).lower() != '.hg':
318 continue
318 continue
319 for f in names:
319 for f in names:
320 if f.lower() == 'hgrc':
320 if f.lower() == 'hgrc':
321 ui.warn(_("warning: removing potentially hostile 'hgrc' "
321 ui.warn(_("warning: removing potentially hostile 'hgrc' "
322 "in '%s'\n") % dirname)
322 "in '%s'\n") % dirname)
323 os.unlink(os.path.join(dirname, f))
323 os.unlink(os.path.join(dirname, f))
324
324
325 def subrepo(ctx, path):
325 def subrepo(ctx, path):
326 """return instance of the right subrepo class for subrepo in path"""
326 """return instance of the right subrepo class for subrepo in path"""
327 # subrepo inherently violates our import layering rules
327 # subrepo inherently violates our import layering rules
328 # because it wants to make repo objects from deep inside the stack
328 # because it wants to make repo objects from deep inside the stack
329 # so we manually delay the circular imports to not break
329 # so we manually delay the circular imports to not break
330 # scripts that don't use our demand-loading
330 # scripts that don't use our demand-loading
331 global hg
331 global hg
332 import hg as h
332 import hg as h
333 hg = h
333 hg = h
334
334
335 pathutil.pathauditor(ctx._repo.root)(path)
335 pathutil.pathauditor(ctx._repo.root)(path)
336 state = ctx.substate[path]
336 state = ctx.substate[path]
337 if state[2] not in types:
337 if state[2] not in types:
338 raise util.Abort(_('unknown subrepo type %s') % state[2])
338 raise util.Abort(_('unknown subrepo type %s') % state[2])
339 return types[state[2]](ctx, path, state[:2])
339 return types[state[2]](ctx, path, state[:2])
340
340
341 def newcommitphase(ui, ctx):
341 def newcommitphase(ui, ctx):
342 commitphase = phases.newcommitphase(ui)
342 commitphase = phases.newcommitphase(ui)
343 substate = getattr(ctx, "substate", None)
343 substate = getattr(ctx, "substate", None)
344 if not substate:
344 if not substate:
345 return commitphase
345 return commitphase
346 check = ui.config('phases', 'checksubrepos', 'follow')
346 check = ui.config('phases', 'checksubrepos', 'follow')
347 if check not in ('ignore', 'follow', 'abort'):
347 if check not in ('ignore', 'follow', 'abort'):
348 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
348 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
349 % (check))
349 % (check))
350 if check == 'ignore':
350 if check == 'ignore':
351 return commitphase
351 return commitphase
352 maxphase = phases.public
352 maxphase = phases.public
353 maxsub = None
353 maxsub = None
354 for s in sorted(substate):
354 for s in sorted(substate):
355 sub = ctx.sub(s)
355 sub = ctx.sub(s)
356 subphase = sub.phase(substate[s][1])
356 subphase = sub.phase(substate[s][1])
357 if maxphase < subphase:
357 if maxphase < subphase:
358 maxphase = subphase
358 maxphase = subphase
359 maxsub = s
359 maxsub = s
360 if commitphase < maxphase:
360 if commitphase < maxphase:
361 if check == 'abort':
361 if check == 'abort':
362 raise util.Abort(_("can't commit in %s phase"
362 raise util.Abort(_("can't commit in %s phase"
363 " conflicting %s from subrepository %s") %
363 " conflicting %s from subrepository %s") %
364 (phases.phasenames[commitphase],
364 (phases.phasenames[commitphase],
365 phases.phasenames[maxphase], maxsub))
365 phases.phasenames[maxphase], maxsub))
366 ui.warn(_("warning: changes are committed in"
366 ui.warn(_("warning: changes are committed in"
367 " %s phase from subrepository %s\n") %
367 " %s phase from subrepository %s\n") %
368 (phases.phasenames[maxphase], maxsub))
368 (phases.phasenames[maxphase], maxsub))
369 return maxphase
369 return maxphase
370 return commitphase
370 return commitphase
371
371
372 # subrepo classes need to implement the following abstract class:
372 # subrepo classes need to implement the following abstract class:
373
373
374 class abstractsubrepo(object):
374 class abstractsubrepo(object):
375
375
376 def __init__(self, ui):
376 def __init__(self, ui):
377 self.ui = ui
377 self.ui = ui
378
378
379 def storeclean(self, path):
379 def storeclean(self, path):
380 """
380 """
381 returns true if the repository has not changed since it was last
381 returns true if the repository has not changed since it was last
382 cloned from or pushed to a given repository.
382 cloned from or pushed to a given repository.
383 """
383 """
384 return False
384 return False
385
385
386 def dirty(self, ignoreupdate=False):
386 def dirty(self, ignoreupdate=False):
387 """returns true if the dirstate of the subrepo is dirty or does not
387 """returns true if the dirstate of the subrepo is dirty or does not
388 match current stored state. If ignoreupdate is true, only check
388 match current stored state. If ignoreupdate is true, only check
389 whether the subrepo has uncommitted changes in its dirstate.
389 whether the subrepo has uncommitted changes in its dirstate.
390 """
390 """
391 raise NotImplementedError
391 raise NotImplementedError
392
392
393 def basestate(self):
393 def basestate(self):
394 """current working directory base state, disregarding .hgsubstate
394 """current working directory base state, disregarding .hgsubstate
395 state and working directory modifications"""
395 state and working directory modifications"""
396 raise NotImplementedError
396 raise NotImplementedError
397
397
398 def checknested(self, path):
398 def checknested(self, path):
399 """check if path is a subrepository within this repository"""
399 """check if path is a subrepository within this repository"""
400 return False
400 return False
401
401
402 def commit(self, text, user, date):
402 def commit(self, text, user, date):
403 """commit the current changes to the subrepo with the given
403 """commit the current changes to the subrepo with the given
404 log message. Use given user and date if possible. Return the
404 log message. Use given user and date if possible. Return the
405 new state of the subrepo.
405 new state of the subrepo.
406 """
406 """
407 raise NotImplementedError
407 raise NotImplementedError
408
408
409 def phase(self, state):
409 def phase(self, state):
410 """returns phase of specified state in the subrepository.
410 """returns phase of specified state in the subrepository.
411 """
411 """
412 return phases.public
412 return phases.public
413
413
414 def remove(self):
414 def remove(self):
415 """remove the subrepo
415 """remove the subrepo
416
416
417 (should verify the dirstate is not dirty first)
417 (should verify the dirstate is not dirty first)
418 """
418 """
419 raise NotImplementedError
419 raise NotImplementedError
420
420
421 def get(self, state, overwrite=False):
421 def get(self, state, overwrite=False):
422 """run whatever commands are needed to put the subrepo into
422 """run whatever commands are needed to put the subrepo into
423 this state
423 this state
424 """
424 """
425 raise NotImplementedError
425 raise NotImplementedError
426
426
427 def merge(self, state):
427 def merge(self, state):
428 """merge currently-saved state with the new state."""
428 """merge currently-saved state with the new state."""
429 raise NotImplementedError
429 raise NotImplementedError
430
430
431 def push(self, opts):
431 def push(self, opts):
432 """perform whatever action is analogous to 'hg push'
432 """perform whatever action is analogous to 'hg push'
433
433
434 This may be a no-op on some systems.
434 This may be a no-op on some systems.
435 """
435 """
436 raise NotImplementedError
436 raise NotImplementedError
437
437
438 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
438 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
439 return []
439 return []
440
440
441 def addremove(self, matcher, prefix, opts, dry_run, similarity):
441 def addremove(self, matcher, prefix, opts, dry_run, similarity):
442 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
442 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
443 return 1
443 return 1
444
444
445 def cat(self, match, prefix, **opts):
445 def cat(self, match, prefix, **opts):
446 return 1
446 return 1
447
447
448 def status(self, rev2, **opts):
448 def status(self, rev2, **opts):
449 return scmutil.status([], [], [], [], [], [], [])
449 return scmutil.status([], [], [], [], [], [], [])
450
450
451 def diff(self, ui, diffopts, node2, match, prefix, **opts):
451 def diff(self, ui, diffopts, node2, match, prefix, **opts):
452 pass
452 pass
453
453
454 def outgoing(self, ui, dest, opts):
454 def outgoing(self, ui, dest, opts):
455 return 1
455 return 1
456
456
457 def incoming(self, ui, source, opts):
457 def incoming(self, ui, source, opts):
458 return 1
458 return 1
459
459
460 def files(self):
460 def files(self):
461 """return filename iterator"""
461 """return filename iterator"""
462 raise NotImplementedError
462 raise NotImplementedError
463
463
464 def filedata(self, name):
464 def filedata(self, name):
465 """return file data"""
465 """return file data"""
466 raise NotImplementedError
466 raise NotImplementedError
467
467
468 def fileflags(self, name):
468 def fileflags(self, name):
469 """return file flags"""
469 """return file flags"""
470 return ''
470 return ''
471
471
472 def archive(self, archiver, prefix, match=None):
472 def archive(self, archiver, prefix, match=None):
473 if match is not None:
473 if match is not None:
474 files = [f for f in self.files() if match(f)]
474 files = [f for f in self.files() if match(f)]
475 else:
475 else:
476 files = self.files()
476 files = self.files()
477 total = len(files)
477 total = len(files)
478 relpath = subrelpath(self)
478 relpath = subrelpath(self)
479 self.ui.progress(_('archiving (%s)') % relpath, 0,
479 self.ui.progress(_('archiving (%s)') % relpath, 0,
480 unit=_('files'), total=total)
480 unit=_('files'), total=total)
481 for i, name in enumerate(files):
481 for i, name in enumerate(files):
482 flags = self.fileflags(name)
482 flags = self.fileflags(name)
483 mode = 'x' in flags and 0755 or 0644
483 mode = 'x' in flags and 0755 or 0644
484 symlink = 'l' in flags
484 symlink = 'l' in flags
485 archiver.addfile(os.path.join(prefix, self._path, name),
485 archiver.addfile(os.path.join(prefix, self._path, name),
486 mode, symlink, self.filedata(name))
486 mode, symlink, self.filedata(name))
487 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
487 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
488 unit=_('files'), total=total)
488 unit=_('files'), total=total)
489 self.ui.progress(_('archiving (%s)') % relpath, None)
489 self.ui.progress(_('archiving (%s)') % relpath, None)
490 return total
490 return total
491
491
492 def walk(self, match):
492 def walk(self, match):
493 '''
493 '''
494 walk recursively through the directory tree, finding all files
494 walk recursively through the directory tree, finding all files
495 matched by the match function
495 matched by the match function
496 '''
496 '''
497 pass
497 pass
498
498
499 def forget(self, match, prefix):
499 def forget(self, match, prefix):
500 return ([], [])
500 return ([], [])
501
501
502 def removefiles(self, matcher, prefix, after, force, subrepos):
502 def removefiles(self, matcher, prefix, after, force, subrepos):
503 """remove the matched files from the subrepository and the filesystem,
503 """remove the matched files from the subrepository and the filesystem,
504 possibly by force and/or after the file has been removed from the
504 possibly by force and/or after the file has been removed from the
505 filesystem. Return 0 on success, 1 on any warning.
505 filesystem. Return 0 on success, 1 on any warning.
506 """
506 """
507 return 1
507 return 1
508
508
509 def revert(self, substate, *pats, **opts):
509 def revert(self, substate, *pats, **opts):
510 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
510 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
511 % (substate[0], substate[2]))
511 % (substate[0], substate[2]))
512 return []
512 return []
513
513
514 def shortid(self, revid):
514 def shortid(self, revid):
515 return revid
515 return revid
516
516
517 class hgsubrepo(abstractsubrepo):
517 class hgsubrepo(abstractsubrepo):
518 def __init__(self, ctx, path, state):
518 def __init__(self, ctx, path, state):
519 super(hgsubrepo, self).__init__(ctx._repo.ui)
519 super(hgsubrepo, self).__init__(ctx._repo.ui)
520 self._path = path
520 self._path = path
521 self._state = state
521 self._state = state
522 r = ctx._repo
522 r = ctx._repo
523 root = r.wjoin(path)
523 root = r.wjoin(path)
524 create = not r.wvfs.exists('%s/.hg' % path)
524 create = not r.wvfs.exists('%s/.hg' % path)
525 self._repo = hg.repository(r.baseui, root, create=create)
525 self._repo = hg.repository(r.baseui, root, create=create)
526 self.ui = self._repo.ui
526 self.ui = self._repo.ui
527 for s, k in [('ui', 'commitsubrepos')]:
527 for s, k in [('ui', 'commitsubrepos')]:
528 v = r.ui.config(s, k)
528 v = r.ui.config(s, k)
529 if v:
529 if v:
530 self.ui.setconfig(s, k, v, 'subrepo')
530 self.ui.setconfig(s, k, v, 'subrepo')
531 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
531 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
532 self._initrepo(r, state[0], create)
532 self._initrepo(r, state[0], create)
533
533
534 def storeclean(self, path):
534 def storeclean(self, path):
535 lock = self._repo.lock()
535 lock = self._repo.lock()
536 try:
536 try:
537 return self._storeclean(path)
537 return self._storeclean(path)
538 finally:
538 finally:
539 lock.release()
539 lock.release()
540
540
541 def _storeclean(self, path):
541 def _storeclean(self, path):
542 clean = True
542 clean = True
543 itercache = self._calcstorehash(path)
543 itercache = self._calcstorehash(path)
544 try:
544 try:
545 for filehash in self._readstorehashcache(path):
545 for filehash in self._readstorehashcache(path):
546 if filehash != itercache.next():
546 if filehash != itercache.next():
547 clean = False
547 clean = False
548 break
548 break
549 except StopIteration:
549 except StopIteration:
550 # the cached and current pull states have a different size
550 # the cached and current pull states have a different size
551 clean = False
551 clean = False
552 if clean:
552 if clean:
553 try:
553 try:
554 itercache.next()
554 itercache.next()
555 # the cached and current pull states have a different size
555 # the cached and current pull states have a different size
556 clean = False
556 clean = False
557 except StopIteration:
557 except StopIteration:
558 pass
558 pass
559 return clean
559 return clean
560
560
561 def _calcstorehash(self, remotepath):
561 def _calcstorehash(self, remotepath):
562 '''calculate a unique "store hash"
562 '''calculate a unique "store hash"
563
563
564 This method is used to to detect when there are changes that may
564 This method is used to to detect when there are changes that may
565 require a push to a given remote path.'''
565 require a push to a given remote path.'''
566 # sort the files that will be hashed in increasing (likely) file size
566 # sort the files that will be hashed in increasing (likely) file size
567 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
567 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
568 yield '# %s\n' % _expandedabspath(remotepath)
568 yield '# %s\n' % _expandedabspath(remotepath)
569 vfs = self._repo.vfs
569 vfs = self._repo.vfs
570 for relname in filelist:
570 for relname in filelist:
571 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
571 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
572 yield '%s = %s\n' % (relname, filehash)
572 yield '%s = %s\n' % (relname, filehash)
573
573
574 @propertycache
574 @propertycache
575 def _cachestorehashvfs(self):
575 def _cachestorehashvfs(self):
576 return scmutil.vfs(self._repo.join('cache/storehash'))
576 return scmutil.vfs(self._repo.join('cache/storehash'))
577
577
578 def _readstorehashcache(self, remotepath):
578 def _readstorehashcache(self, remotepath):
579 '''read the store hash cache for a given remote repository'''
579 '''read the store hash cache for a given remote repository'''
580 cachefile = _getstorehashcachename(remotepath)
580 cachefile = _getstorehashcachename(remotepath)
581 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
581 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
582
582
583 def _cachestorehash(self, remotepath):
583 def _cachestorehash(self, remotepath):
584 '''cache the current store hash
584 '''cache the current store hash
585
585
586 Each remote repo requires its own store hash cache, because a subrepo
586 Each remote repo requires its own store hash cache, because a subrepo
587 store may be "clean" versus a given remote repo, but not versus another
587 store may be "clean" versus a given remote repo, but not versus another
588 '''
588 '''
589 cachefile = _getstorehashcachename(remotepath)
589 cachefile = _getstorehashcachename(remotepath)
590 lock = self._repo.lock()
590 lock = self._repo.lock()
591 try:
591 try:
592 storehash = list(self._calcstorehash(remotepath))
592 storehash = list(self._calcstorehash(remotepath))
593 vfs = self._cachestorehashvfs
593 vfs = self._cachestorehashvfs
594 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
594 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
595 finally:
595 finally:
596 lock.release()
596 lock.release()
597
597
598 @annotatesubrepoerror
598 @annotatesubrepoerror
599 def _initrepo(self, parentrepo, source, create):
599 def _initrepo(self, parentrepo, source, create):
600 self._repo._subparent = parentrepo
600 self._repo._subparent = parentrepo
601 self._repo._subsource = source
601 self._repo._subsource = source
602
602
603 if create:
603 if create:
604 lines = ['[paths]\n']
604 lines = ['[paths]\n']
605
605
606 def addpathconfig(key, value):
606 def addpathconfig(key, value):
607 if value:
607 if value:
608 lines.append('%s = %s\n' % (key, value))
608 lines.append('%s = %s\n' % (key, value))
609 self.ui.setconfig('paths', key, value, 'subrepo')
609 self.ui.setconfig('paths', key, value, 'subrepo')
610
610
611 defpath = _abssource(self._repo, abort=False)
611 defpath = _abssource(self._repo, abort=False)
612 defpushpath = _abssource(self._repo, True, abort=False)
612 defpushpath = _abssource(self._repo, True, abort=False)
613 addpathconfig('default', defpath)
613 addpathconfig('default', defpath)
614 if defpath != defpushpath:
614 if defpath != defpushpath:
615 addpathconfig('default-push', defpushpath)
615 addpathconfig('default-push', defpushpath)
616
616
617 fp = self._repo.opener("hgrc", "w", text=True)
617 fp = self._repo.opener("hgrc", "w", text=True)
618 try:
618 try:
619 fp.write(''.join(lines))
619 fp.write(''.join(lines))
620 finally:
620 finally:
621 fp.close()
621 fp.close()
622
622
623 @annotatesubrepoerror
623 @annotatesubrepoerror
624 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
624 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
625 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
625 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
626 os.path.join(prefix, self._path), explicitonly)
626 os.path.join(prefix, self._path), explicitonly)
627
627
628 def addremove(self, m, prefix, opts, dry_run, similarity):
628 def addremove(self, m, prefix, opts, dry_run, similarity):
629 # In the same way as sub directories are processed, once in a subrepo,
629 # In the same way as sub directories are processed, once in a subrepo,
630 # always entry any of its subrepos. Don't corrupt the options that will
630 # always entry any of its subrepos. Don't corrupt the options that will
631 # be used to process sibling subrepos however.
631 # be used to process sibling subrepos however.
632 opts = copy.copy(opts)
632 opts = copy.copy(opts)
633 opts['subrepos'] = True
633 opts['subrepos'] = True
634 return scmutil.addremove(self._repo, m,
634 return scmutil.addremove(self._repo, m,
635 os.path.join(prefix, self._path), opts,
635 os.path.join(prefix, self._path), opts,
636 dry_run, similarity)
636 dry_run, similarity)
637
637
638 @annotatesubrepoerror
638 @annotatesubrepoerror
639 def cat(self, match, prefix, **opts):
639 def cat(self, match, prefix, **opts):
640 rev = self._state[1]
640 rev = self._state[1]
641 ctx = self._repo[rev]
641 ctx = self._repo[rev]
642 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
642 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
643
643
644 @annotatesubrepoerror
644 @annotatesubrepoerror
645 def status(self, rev2, **opts):
645 def status(self, rev2, **opts):
646 try:
646 try:
647 rev1 = self._state[1]
647 rev1 = self._state[1]
648 ctx1 = self._repo[rev1]
648 ctx1 = self._repo[rev1]
649 ctx2 = self._repo[rev2]
649 ctx2 = self._repo[rev2]
650 return self._repo.status(ctx1, ctx2, **opts)
650 return self._repo.status(ctx1, ctx2, **opts)
651 except error.RepoLookupError, inst:
651 except error.RepoLookupError, inst:
652 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
652 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
653 % (inst, subrelpath(self)))
653 % (inst, subrelpath(self)))
654 return scmutil.status([], [], [], [], [], [], [])
654 return scmutil.status([], [], [], [], [], [], [])
655
655
656 @annotatesubrepoerror
656 @annotatesubrepoerror
657 def diff(self, ui, diffopts, node2, match, prefix, **opts):
657 def diff(self, ui, diffopts, node2, match, prefix, **opts):
658 try:
658 try:
659 node1 = node.bin(self._state[1])
659 node1 = node.bin(self._state[1])
660 # We currently expect node2 to come from substate and be
660 # We currently expect node2 to come from substate and be
661 # in hex format
661 # in hex format
662 if node2 is not None:
662 if node2 is not None:
663 node2 = node.bin(node2)
663 node2 = node.bin(node2)
664 cmdutil.diffordiffstat(ui, self._repo, diffopts,
664 cmdutil.diffordiffstat(ui, self._repo, diffopts,
665 node1, node2, match,
665 node1, node2, match,
666 prefix=posixpath.join(prefix, self._path),
666 prefix=posixpath.join(prefix, self._path),
667 listsubrepos=True, **opts)
667 listsubrepos=True, **opts)
668 except error.RepoLookupError, inst:
668 except error.RepoLookupError, inst:
669 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
669 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
670 % (inst, subrelpath(self)))
670 % (inst, subrelpath(self)))
671
671
672 @annotatesubrepoerror
672 @annotatesubrepoerror
673 def archive(self, archiver, prefix, match=None):
673 def archive(self, archiver, prefix, match=None):
674 self._get(self._state + ('hg',))
674 self._get(self._state + ('hg',))
675 total = abstractsubrepo.archive(self, archiver, prefix, match)
675 total = abstractsubrepo.archive(self, archiver, prefix, match)
676 rev = self._state[1]
676 rev = self._state[1]
677 ctx = self._repo[rev]
677 ctx = self._repo[rev]
678 for subpath in ctx.substate:
678 for subpath in ctx.substate:
679 s = subrepo(ctx, subpath)
679 s = subrepo(ctx, subpath)
680 submatch = matchmod.narrowmatcher(subpath, match)
680 submatch = matchmod.narrowmatcher(subpath, match)
681 total += s.archive(
681 total += s.archive(
682 archiver, os.path.join(prefix, self._path), submatch)
682 archiver, os.path.join(prefix, self._path), submatch)
683 return total
683 return total
684
684
685 @annotatesubrepoerror
685 @annotatesubrepoerror
686 def dirty(self, ignoreupdate=False):
686 def dirty(self, ignoreupdate=False):
687 r = self._state[1]
687 r = self._state[1]
688 if r == '' and not ignoreupdate: # no state recorded
688 if r == '' and not ignoreupdate: # no state recorded
689 return True
689 return True
690 w = self._repo[None]
690 w = self._repo[None]
691 if r != w.p1().hex() and not ignoreupdate:
691 if r != w.p1().hex() and not ignoreupdate:
692 # different version checked out
692 # different version checked out
693 return True
693 return True
694 return w.dirty() # working directory changed
694 return w.dirty() # working directory changed
695
695
696 def basestate(self):
696 def basestate(self):
697 return self._repo['.'].hex()
697 return self._repo['.'].hex()
698
698
699 def checknested(self, path):
699 def checknested(self, path):
700 return self._repo._checknested(self._repo.wjoin(path))
700 return self._repo._checknested(self._repo.wjoin(path))
701
701
702 @annotatesubrepoerror
702 @annotatesubrepoerror
703 def commit(self, text, user, date):
703 def commit(self, text, user, date):
704 # don't bother committing in the subrepo if it's only been
704 # don't bother committing in the subrepo if it's only been
705 # updated
705 # updated
706 if not self.dirty(True):
706 if not self.dirty(True):
707 return self._repo['.'].hex()
707 return self._repo['.'].hex()
708 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
708 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
709 n = self._repo.commit(text, user, date)
709 n = self._repo.commit(text, user, date)
710 if not n:
710 if not n:
711 return self._repo['.'].hex() # different version checked out
711 return self._repo['.'].hex() # different version checked out
712 return node.hex(n)
712 return node.hex(n)
713
713
714 @annotatesubrepoerror
714 @annotatesubrepoerror
715 def phase(self, state):
715 def phase(self, state):
716 return self._repo[state].phase()
716 return self._repo[state].phase()
717
717
718 @annotatesubrepoerror
718 @annotatesubrepoerror
719 def remove(self):
719 def remove(self):
720 # we can't fully delete the repository as it may contain
720 # we can't fully delete the repository as it may contain
721 # local-only history
721 # local-only history
722 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
722 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
723 hg.clean(self._repo, node.nullid, False)
723 hg.clean(self._repo, node.nullid, False)
724
724
725 def _get(self, state):
725 def _get(self, state):
726 source, revision, kind = state
726 source, revision, kind = state
727 if revision in self._repo.unfiltered():
727 if revision in self._repo.unfiltered():
728 return True
728 return True
729 self._repo._subsource = source
729 self._repo._subsource = source
730 srcurl = _abssource(self._repo)
730 srcurl = _abssource(self._repo)
731 other = hg.peer(self._repo, {}, srcurl)
731 other = hg.peer(self._repo, {}, srcurl)
732 if len(self._repo) == 0:
732 if len(self._repo) == 0:
733 self.ui.status(_('cloning subrepo %s from %s\n')
733 self.ui.status(_('cloning subrepo %s from %s\n')
734 % (subrelpath(self), srcurl))
734 % (subrelpath(self), srcurl))
735 parentrepo = self._repo._subparent
735 parentrepo = self._repo._subparent
736 shutil.rmtree(self._repo.path)
736 shutil.rmtree(self._repo.path)
737 other, cloned = hg.clone(self._repo._subparent.baseui, {},
737 other, cloned = hg.clone(self._repo._subparent.baseui, {},
738 other, self._repo.root,
738 other, self._repo.root,
739 update=False)
739 update=False)
740 self._repo = cloned.local()
740 self._repo = cloned.local()
741 self._initrepo(parentrepo, source, create=True)
741 self._initrepo(parentrepo, source, create=True)
742 self._cachestorehash(srcurl)
742 self._cachestorehash(srcurl)
743 else:
743 else:
744 self.ui.status(_('pulling subrepo %s from %s\n')
744 self.ui.status(_('pulling subrepo %s from %s\n')
745 % (subrelpath(self), srcurl))
745 % (subrelpath(self), srcurl))
746 cleansub = self.storeclean(srcurl)
746 cleansub = self.storeclean(srcurl)
747 exchange.pull(self._repo, other)
747 exchange.pull(self._repo, other)
748 if cleansub:
748 if cleansub:
749 # keep the repo clean after pull
749 # keep the repo clean after pull
750 self._cachestorehash(srcurl)
750 self._cachestorehash(srcurl)
751 return False
751 return False
752
752
753 @annotatesubrepoerror
753 @annotatesubrepoerror
754 def get(self, state, overwrite=False):
754 def get(self, state, overwrite=False):
755 inrepo = self._get(state)
755 inrepo = self._get(state)
756 source, revision, kind = state
756 source, revision, kind = state
757 repo = self._repo
757 repo = self._repo
758 repo.ui.debug("getting subrepo %s\n" % self._path)
758 repo.ui.debug("getting subrepo %s\n" % self._path)
759 if inrepo:
759 if inrepo:
760 urepo = repo.unfiltered()
760 urepo = repo.unfiltered()
761 ctx = urepo[revision]
761 ctx = urepo[revision]
762 if ctx.hidden():
762 if ctx.hidden():
763 urepo.ui.warn(
763 urepo.ui.warn(
764 _('revision %s in subrepo %s is hidden\n') \
764 _('revision %s in subrepo %s is hidden\n') \
765 % (revision[0:12], self._path))
765 % (revision[0:12], self._path))
766 repo = urepo
766 repo = urepo
767 hg.updaterepo(repo, revision, overwrite)
767 hg.updaterepo(repo, revision, overwrite)
768
768
769 @annotatesubrepoerror
769 @annotatesubrepoerror
770 def merge(self, state):
770 def merge(self, state):
771 self._get(state)
771 self._get(state)
772 cur = self._repo['.']
772 cur = self._repo['.']
773 dst = self._repo[state[1]]
773 dst = self._repo[state[1]]
774 anc = dst.ancestor(cur)
774 anc = dst.ancestor(cur)
775
775
776 def mergefunc():
776 def mergefunc():
777 if anc == cur and dst.branch() == cur.branch():
777 if anc == cur and dst.branch() == cur.branch():
778 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
778 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
779 hg.update(self._repo, state[1])
779 hg.update(self._repo, state[1])
780 elif anc == dst:
780 elif anc == dst:
781 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
781 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
782 else:
782 else:
783 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
783 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
784 hg.merge(self._repo, state[1], remind=False)
784 hg.merge(self._repo, state[1], remind=False)
785
785
786 wctx = self._repo[None]
786 wctx = self._repo[None]
787 if self.dirty():
787 if self.dirty():
788 if anc != dst:
788 if anc != dst:
789 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
789 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
790 mergefunc()
790 mergefunc()
791 else:
791 else:
792 mergefunc()
792 mergefunc()
793 else:
793 else:
794 mergefunc()
794 mergefunc()
795
795
796 @annotatesubrepoerror
796 @annotatesubrepoerror
797 def push(self, opts):
797 def push(self, opts):
798 force = opts.get('force')
798 force = opts.get('force')
799 newbranch = opts.get('new_branch')
799 newbranch = opts.get('new_branch')
800 ssh = opts.get('ssh')
800 ssh = opts.get('ssh')
801
801
802 # push subrepos depth-first for coherent ordering
802 # push subrepos depth-first for coherent ordering
803 c = self._repo['']
803 c = self._repo['']
804 subs = c.substate # only repos that are committed
804 subs = c.substate # only repos that are committed
805 for s in sorted(subs):
805 for s in sorted(subs):
806 if c.sub(s).push(opts) == 0:
806 if c.sub(s).push(opts) == 0:
807 return False
807 return False
808
808
809 dsturl = _abssource(self._repo, True)
809 dsturl = _abssource(self._repo, True)
810 if not force:
810 if not force:
811 if self.storeclean(dsturl):
811 if self.storeclean(dsturl):
812 self.ui.status(
812 self.ui.status(
813 _('no changes made to subrepo %s since last push to %s\n')
813 _('no changes made to subrepo %s since last push to %s\n')
814 % (subrelpath(self), dsturl))
814 % (subrelpath(self), dsturl))
815 return None
815 return None
816 self.ui.status(_('pushing subrepo %s to %s\n') %
816 self.ui.status(_('pushing subrepo %s to %s\n') %
817 (subrelpath(self), dsturl))
817 (subrelpath(self), dsturl))
818 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
818 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
819 res = exchange.push(self._repo, other, force, newbranch=newbranch)
819 res = exchange.push(self._repo, other, force, newbranch=newbranch)
820
820
821 # the repo is now clean
821 # the repo is now clean
822 self._cachestorehash(dsturl)
822 self._cachestorehash(dsturl)
823 return res.cgresult
823 return res.cgresult
824
824
825 @annotatesubrepoerror
825 @annotatesubrepoerror
826 def outgoing(self, ui, dest, opts):
826 def outgoing(self, ui, dest, opts):
827 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
827 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
828
828
829 @annotatesubrepoerror
829 @annotatesubrepoerror
830 def incoming(self, ui, source, opts):
830 def incoming(self, ui, source, opts):
831 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
831 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
832
832
833 @annotatesubrepoerror
833 @annotatesubrepoerror
834 def files(self):
834 def files(self):
835 rev = self._state[1]
835 rev = self._state[1]
836 ctx = self._repo[rev]
836 ctx = self._repo[rev]
837 return ctx.manifest()
837 return ctx.manifest()
838
838
839 def filedata(self, name):
839 def filedata(self, name):
840 rev = self._state[1]
840 rev = self._state[1]
841 return self._repo[rev][name].data()
841 return self._repo[rev][name].data()
842
842
843 def fileflags(self, name):
843 def fileflags(self, name):
844 rev = self._state[1]
844 rev = self._state[1]
845 ctx = self._repo[rev]
845 ctx = self._repo[rev]
846 return ctx.flags(name)
846 return ctx.flags(name)
847
847
848 def walk(self, match):
848 def walk(self, match):
849 ctx = self._repo[None]
849 ctx = self._repo[None]
850 return ctx.walk(match)
850 return ctx.walk(match)
851
851
852 @annotatesubrepoerror
852 @annotatesubrepoerror
853 def forget(self, match, prefix):
853 def forget(self, match, prefix):
854 return cmdutil.forget(self.ui, self._repo, match,
854 return cmdutil.forget(self.ui, self._repo, match,
855 os.path.join(prefix, self._path), True)
855 os.path.join(prefix, self._path), True)
856
856
857 @annotatesubrepoerror
857 @annotatesubrepoerror
858 def removefiles(self, matcher, prefix, after, force, subrepos):
858 def removefiles(self, matcher, prefix, after, force, subrepos):
859 return cmdutil.remove(self.ui, self._repo, matcher,
859 return cmdutil.remove(self.ui, self._repo, matcher,
860 os.path.join(prefix, self._path), after, force,
860 os.path.join(prefix, self._path), after, force,
861 subrepos)
861 subrepos)
862
862
863 @annotatesubrepoerror
863 @annotatesubrepoerror
864 def revert(self, substate, *pats, **opts):
864 def revert(self, substate, *pats, **opts):
865 # reverting a subrepo is a 2 step process:
865 # reverting a subrepo is a 2 step process:
866 # 1. if the no_backup is not set, revert all modified
866 # 1. if the no_backup is not set, revert all modified
867 # files inside the subrepo
867 # files inside the subrepo
868 # 2. update the subrepo to the revision specified in
868 # 2. update the subrepo to the revision specified in
869 # the corresponding substate dictionary
869 # the corresponding substate dictionary
870 self.ui.status(_('reverting subrepo %s\n') % substate[0])
870 self.ui.status(_('reverting subrepo %s\n') % substate[0])
871 if not opts.get('no_backup'):
871 if not opts.get('no_backup'):
872 # Revert all files on the subrepo, creating backups
872 # Revert all files on the subrepo, creating backups
873 # Note that this will not recursively revert subrepos
873 # Note that this will not recursively revert subrepos
874 # We could do it if there was a set:subrepos() predicate
874 # We could do it if there was a set:subrepos() predicate
875 opts = opts.copy()
875 opts = opts.copy()
876 opts['date'] = None
876 opts['date'] = None
877 opts['rev'] = substate[1]
877 opts['rev'] = substate[1]
878
878
879 pats = []
879 pats = []
880 if not opts.get('all'):
880 if not opts.get('all'):
881 pats = ['set:modified()']
881 pats = ['set:modified()']
882 self.filerevert(*pats, **opts)
882 self.filerevert(*pats, **opts)
883
883
884 # Update the repo to the revision specified in the given substate
884 # Update the repo to the revision specified in the given substate
885 self.get(substate, overwrite=True)
885 self.get(substate, overwrite=True)
886
886
887 def filerevert(self, *pats, **opts):
887 def filerevert(self, *pats, **opts):
888 ctx = self._repo[opts['rev']]
888 ctx = self._repo[opts['rev']]
889 parents = self._repo.dirstate.parents()
889 parents = self._repo.dirstate.parents()
890 if opts.get('all'):
890 if opts.get('all'):
891 pats = ['set:modified()']
891 pats = ['set:modified()']
892 else:
892 else:
893 pats = []
893 pats = []
894 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
894 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
895
895
896 def shortid(self, revid):
896 def shortid(self, revid):
897 return revid[:12]
897 return revid[:12]
898
898
899 class svnsubrepo(abstractsubrepo):
899 class svnsubrepo(abstractsubrepo):
900 def __init__(self, ctx, path, state):
900 def __init__(self, ctx, path, state):
901 super(svnsubrepo, self).__init__(ctx._repo.ui)
901 super(svnsubrepo, self).__init__(ctx._repo.ui)
902 self._path = path
902 self._path = path
903 self._state = state
903 self._state = state
904 self._ctx = ctx
904 self._ctx = ctx
905 self._exe = util.findexe('svn')
905 self._exe = util.findexe('svn')
906 if not self._exe:
906 if not self._exe:
907 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
907 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
908 % self._path)
908 % self._path)
909
909
910 def _svncommand(self, commands, filename='', failok=False):
910 def _svncommand(self, commands, filename='', failok=False):
911 cmd = [self._exe]
911 cmd = [self._exe]
912 extrakw = {}
912 extrakw = {}
913 if not self.ui.interactive():
913 if not self.ui.interactive():
914 # Making stdin be a pipe should prevent svn from behaving
914 # Making stdin be a pipe should prevent svn from behaving
915 # interactively even if we can't pass --non-interactive.
915 # interactively even if we can't pass --non-interactive.
916 extrakw['stdin'] = subprocess.PIPE
916 extrakw['stdin'] = subprocess.PIPE
917 # Starting in svn 1.5 --non-interactive is a global flag
917 # Starting in svn 1.5 --non-interactive is a global flag
918 # instead of being per-command, but we need to support 1.4 so
918 # instead of being per-command, but we need to support 1.4 so
919 # we have to be intelligent about what commands take
919 # we have to be intelligent about what commands take
920 # --non-interactive.
920 # --non-interactive.
921 if commands[0] in ('update', 'checkout', 'commit'):
921 if commands[0] in ('update', 'checkout', 'commit'):
922 cmd.append('--non-interactive')
922 cmd.append('--non-interactive')
923 cmd.extend(commands)
923 cmd.extend(commands)
924 if filename is not None:
924 if filename is not None:
925 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
925 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
926 cmd.append(path)
926 cmd.append(path)
927 env = dict(os.environ)
927 env = dict(os.environ)
928 # Avoid localized output, preserve current locale for everything else.
928 # Avoid localized output, preserve current locale for everything else.
929 lc_all = env.get('LC_ALL')
929 lc_all = env.get('LC_ALL')
930 if lc_all:
930 if lc_all:
931 env['LANG'] = lc_all
931 env['LANG'] = lc_all
932 del env['LC_ALL']
932 del env['LC_ALL']
933 env['LC_MESSAGES'] = 'C'
933 env['LC_MESSAGES'] = 'C'
934 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
934 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
935 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
935 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
936 universal_newlines=True, env=env, **extrakw)
936 universal_newlines=True, env=env, **extrakw)
937 stdout, stderr = p.communicate()
937 stdout, stderr = p.communicate()
938 stderr = stderr.strip()
938 stderr = stderr.strip()
939 if not failok:
939 if not failok:
940 if p.returncode:
940 if p.returncode:
941 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
941 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
942 if stderr:
942 if stderr:
943 self.ui.warn(stderr + '\n')
943 self.ui.warn(stderr + '\n')
944 return stdout, stderr
944 return stdout, stderr
945
945
946 @propertycache
946 @propertycache
947 def _svnversion(self):
947 def _svnversion(self):
948 output, err = self._svncommand(['--version', '--quiet'], filename=None)
948 output, err = self._svncommand(['--version', '--quiet'], filename=None)
949 m = re.search(r'^(\d+)\.(\d+)', output)
949 m = re.search(r'^(\d+)\.(\d+)', output)
950 if not m:
950 if not m:
951 raise util.Abort(_('cannot retrieve svn tool version'))
951 raise util.Abort(_('cannot retrieve svn tool version'))
952 return (int(m.group(1)), int(m.group(2)))
952 return (int(m.group(1)), int(m.group(2)))
953
953
954 def _wcrevs(self):
954 def _wcrevs(self):
955 # Get the working directory revision as well as the last
955 # Get the working directory revision as well as the last
956 # commit revision so we can compare the subrepo state with
956 # commit revision so we can compare the subrepo state with
957 # both. We used to store the working directory one.
957 # both. We used to store the working directory one.
958 output, err = self._svncommand(['info', '--xml'])
958 output, err = self._svncommand(['info', '--xml'])
959 doc = xml.dom.minidom.parseString(output)
959 doc = xml.dom.minidom.parseString(output)
960 entries = doc.getElementsByTagName('entry')
960 entries = doc.getElementsByTagName('entry')
961 lastrev, rev = '0', '0'
961 lastrev, rev = '0', '0'
962 if entries:
962 if entries:
963 rev = str(entries[0].getAttribute('revision')) or '0'
963 rev = str(entries[0].getAttribute('revision')) or '0'
964 commits = entries[0].getElementsByTagName('commit')
964 commits = entries[0].getElementsByTagName('commit')
965 if commits:
965 if commits:
966 lastrev = str(commits[0].getAttribute('revision')) or '0'
966 lastrev = str(commits[0].getAttribute('revision')) or '0'
967 return (lastrev, rev)
967 return (lastrev, rev)
968
968
969 def _wcrev(self):
969 def _wcrev(self):
970 return self._wcrevs()[0]
970 return self._wcrevs()[0]
971
971
972 def _wcchanged(self):
972 def _wcchanged(self):
973 """Return (changes, extchanges, missing) where changes is True
973 """Return (changes, extchanges, missing) where changes is True
974 if the working directory was changed, extchanges is
974 if the working directory was changed, extchanges is
975 True if any of these changes concern an external entry and missing
975 True if any of these changes concern an external entry and missing
976 is True if any change is a missing entry.
976 is True if any change is a missing entry.
977 """
977 """
978 output, err = self._svncommand(['status', '--xml'])
978 output, err = self._svncommand(['status', '--xml'])
979 externals, changes, missing = [], [], []
979 externals, changes, missing = [], [], []
980 doc = xml.dom.minidom.parseString(output)
980 doc = xml.dom.minidom.parseString(output)
981 for e in doc.getElementsByTagName('entry'):
981 for e in doc.getElementsByTagName('entry'):
982 s = e.getElementsByTagName('wc-status')
982 s = e.getElementsByTagName('wc-status')
983 if not s:
983 if not s:
984 continue
984 continue
985 item = s[0].getAttribute('item')
985 item = s[0].getAttribute('item')
986 props = s[0].getAttribute('props')
986 props = s[0].getAttribute('props')
987 path = e.getAttribute('path')
987 path = e.getAttribute('path')
988 if item == 'external':
988 if item == 'external':
989 externals.append(path)
989 externals.append(path)
990 elif item == 'missing':
990 elif item == 'missing':
991 missing.append(path)
991 missing.append(path)
992 if (item not in ('', 'normal', 'unversioned', 'external')
992 if (item not in ('', 'normal', 'unversioned', 'external')
993 or props not in ('', 'none', 'normal')):
993 or props not in ('', 'none', 'normal')):
994 changes.append(path)
994 changes.append(path)
995 for path in changes:
995 for path in changes:
996 for ext in externals:
996 for ext in externals:
997 if path == ext or path.startswith(ext + os.sep):
997 if path == ext or path.startswith(ext + os.sep):
998 return True, True, bool(missing)
998 return True, True, bool(missing)
999 return bool(changes), False, bool(missing)
999 return bool(changes), False, bool(missing)
1000
1000
1001 def dirty(self, ignoreupdate=False):
1001 def dirty(self, ignoreupdate=False):
1002 if not self._wcchanged()[0]:
1002 if not self._wcchanged()[0]:
1003 if self._state[1] in self._wcrevs() or ignoreupdate:
1003 if self._state[1] in self._wcrevs() or ignoreupdate:
1004 return False
1004 return False
1005 return True
1005 return True
1006
1006
1007 def basestate(self):
1007 def basestate(self):
1008 lastrev, rev = self._wcrevs()
1008 lastrev, rev = self._wcrevs()
1009 if lastrev != rev:
1009 if lastrev != rev:
1010 # Last committed rev is not the same than rev. We would
1010 # Last committed rev is not the same than rev. We would
1011 # like to take lastrev but we do not know if the subrepo
1011 # like to take lastrev but we do not know if the subrepo
1012 # URL exists at lastrev. Test it and fallback to rev it
1012 # URL exists at lastrev. Test it and fallback to rev it
1013 # is not there.
1013 # is not there.
1014 try:
1014 try:
1015 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1015 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1016 return lastrev
1016 return lastrev
1017 except error.Abort:
1017 except error.Abort:
1018 pass
1018 pass
1019 return rev
1019 return rev
1020
1020
1021 @annotatesubrepoerror
1021 @annotatesubrepoerror
1022 def commit(self, text, user, date):
1022 def commit(self, text, user, date):
1023 # user and date are out of our hands since svn is centralized
1023 # user and date are out of our hands since svn is centralized
1024 changed, extchanged, missing = self._wcchanged()
1024 changed, extchanged, missing = self._wcchanged()
1025 if not changed:
1025 if not changed:
1026 return self.basestate()
1026 return self.basestate()
1027 if extchanged:
1027 if extchanged:
1028 # Do not try to commit externals
1028 # Do not try to commit externals
1029 raise util.Abort(_('cannot commit svn externals'))
1029 raise util.Abort(_('cannot commit svn externals'))
1030 if missing:
1030 if missing:
1031 # svn can commit with missing entries but aborting like hg
1031 # svn can commit with missing entries but aborting like hg
1032 # seems a better approach.
1032 # seems a better approach.
1033 raise util.Abort(_('cannot commit missing svn entries'))
1033 raise util.Abort(_('cannot commit missing svn entries'))
1034 commitinfo, err = self._svncommand(['commit', '-m', text])
1034 commitinfo, err = self._svncommand(['commit', '-m', text])
1035 self.ui.status(commitinfo)
1035 self.ui.status(commitinfo)
1036 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1036 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1037 if not newrev:
1037 if not newrev:
1038 if not commitinfo.strip():
1038 if not commitinfo.strip():
1039 # Sometimes, our definition of "changed" differs from
1039 # Sometimes, our definition of "changed" differs from
1040 # svn one. For instance, svn ignores missing files
1040 # svn one. For instance, svn ignores missing files
1041 # when committing. If there are only missing files, no
1041 # when committing. If there are only missing files, no
1042 # commit is made, no output and no error code.
1042 # commit is made, no output and no error code.
1043 raise util.Abort(_('failed to commit svn changes'))
1043 raise util.Abort(_('failed to commit svn changes'))
1044 raise util.Abort(commitinfo.splitlines()[-1])
1044 raise util.Abort(commitinfo.splitlines()[-1])
1045 newrev = newrev.groups()[0]
1045 newrev = newrev.groups()[0]
1046 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1046 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1047 return newrev
1047 return newrev
1048
1048
1049 @annotatesubrepoerror
1049 @annotatesubrepoerror
1050 def remove(self):
1050 def remove(self):
1051 if self.dirty():
1051 if self.dirty():
1052 self.ui.warn(_('not removing repo %s because '
1052 self.ui.warn(_('not removing repo %s because '
1053 'it has changes.\n') % self._path)
1053 'it has changes.\n') % self._path)
1054 return
1054 return
1055 self.ui.note(_('removing subrepo %s\n') % self._path)
1055 self.ui.note(_('removing subrepo %s\n') % self._path)
1056
1056
1057 def onerror(function, path, excinfo):
1057 def onerror(function, path, excinfo):
1058 if function is not os.remove:
1058 if function is not os.remove:
1059 raise
1059 raise
1060 # read-only files cannot be unlinked under Windows
1060 # read-only files cannot be unlinked under Windows
1061 s = os.stat(path)
1061 s = os.stat(path)
1062 if (s.st_mode & stat.S_IWRITE) != 0:
1062 if (s.st_mode & stat.S_IWRITE) != 0:
1063 raise
1063 raise
1064 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
1064 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
1065 os.remove(path)
1065 os.remove(path)
1066
1066
1067 path = self._ctx._repo.wjoin(self._path)
1067 path = self._ctx._repo.wjoin(self._path)
1068 shutil.rmtree(path, onerror=onerror)
1068 shutil.rmtree(path, onerror=onerror)
1069 try:
1069 try:
1070 os.removedirs(os.path.dirname(path))
1070 os.removedirs(os.path.dirname(path))
1071 except OSError:
1071 except OSError:
1072 pass
1072 pass
1073
1073
1074 @annotatesubrepoerror
1074 @annotatesubrepoerror
1075 def get(self, state, overwrite=False):
1075 def get(self, state, overwrite=False):
1076 if overwrite:
1076 if overwrite:
1077 self._svncommand(['revert', '--recursive'])
1077 self._svncommand(['revert', '--recursive'])
1078 args = ['checkout']
1078 args = ['checkout']
1079 if self._svnversion >= (1, 5):
1079 if self._svnversion >= (1, 5):
1080 args.append('--force')
1080 args.append('--force')
1081 # The revision must be specified at the end of the URL to properly
1081 # The revision must be specified at the end of the URL to properly
1082 # update to a directory which has since been deleted and recreated.
1082 # update to a directory which has since been deleted and recreated.
1083 args.append('%s@%s' % (state[0], state[1]))
1083 args.append('%s@%s' % (state[0], state[1]))
1084 status, err = self._svncommand(args, failok=True)
1084 status, err = self._svncommand(args, failok=True)
1085 _sanitize(self.ui, self._ctx._repo.wjoin(self._path), '.svn')
1085 _sanitize(self.ui, self._ctx._repo.wjoin(self._path), '.svn')
1086 if not re.search('Checked out revision [0-9]+.', status):
1086 if not re.search('Checked out revision [0-9]+.', status):
1087 if ('is already a working copy for a different URL' in err
1087 if ('is already a working copy for a different URL' in err
1088 and (self._wcchanged()[:2] == (False, False))):
1088 and (self._wcchanged()[:2] == (False, False))):
1089 # obstructed but clean working copy, so just blow it away.
1089 # obstructed but clean working copy, so just blow it away.
1090 self.remove()
1090 self.remove()
1091 self.get(state, overwrite=False)
1091 self.get(state, overwrite=False)
1092 return
1092 return
1093 raise util.Abort((status or err).splitlines()[-1])
1093 raise util.Abort((status or err).splitlines()[-1])
1094 self.ui.status(status)
1094 self.ui.status(status)
1095
1095
1096 @annotatesubrepoerror
1096 @annotatesubrepoerror
1097 def merge(self, state):
1097 def merge(self, state):
1098 old = self._state[1]
1098 old = self._state[1]
1099 new = state[1]
1099 new = state[1]
1100 wcrev = self._wcrev()
1100 wcrev = self._wcrev()
1101 if new != wcrev:
1101 if new != wcrev:
1102 dirty = old == wcrev or self._wcchanged()[0]
1102 dirty = old == wcrev or self._wcchanged()[0]
1103 if _updateprompt(self.ui, self, dirty, wcrev, new):
1103 if _updateprompt(self.ui, self, dirty, wcrev, new):
1104 self.get(state, False)
1104 self.get(state, False)
1105
1105
1106 def push(self, opts):
1106 def push(self, opts):
1107 # push is a no-op for SVN
1107 # push is a no-op for SVN
1108 return True
1108 return True
1109
1109
1110 @annotatesubrepoerror
1110 @annotatesubrepoerror
1111 def files(self):
1111 def files(self):
1112 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1112 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1113 doc = xml.dom.minidom.parseString(output)
1113 doc = xml.dom.minidom.parseString(output)
1114 paths = []
1114 paths = []
1115 for e in doc.getElementsByTagName('entry'):
1115 for e in doc.getElementsByTagName('entry'):
1116 kind = str(e.getAttribute('kind'))
1116 kind = str(e.getAttribute('kind'))
1117 if kind != 'file':
1117 if kind != 'file':
1118 continue
1118 continue
1119 name = ''.join(c.data for c
1119 name = ''.join(c.data for c
1120 in e.getElementsByTagName('name')[0].childNodes
1120 in e.getElementsByTagName('name')[0].childNodes
1121 if c.nodeType == c.TEXT_NODE)
1121 if c.nodeType == c.TEXT_NODE)
1122 paths.append(name.encode('utf-8'))
1122 paths.append(name.encode('utf-8'))
1123 return paths
1123 return paths
1124
1124
1125 def filedata(self, name):
1125 def filedata(self, name):
1126 return self._svncommand(['cat'], name)[0]
1126 return self._svncommand(['cat'], name)[0]
1127
1127
1128
1128
1129 class gitsubrepo(abstractsubrepo):
1129 class gitsubrepo(abstractsubrepo):
1130 def __init__(self, ctx, path, state):
1130 def __init__(self, ctx, path, state):
1131 super(gitsubrepo, self).__init__(ctx._repo.ui)
1131 super(gitsubrepo, self).__init__(ctx._repo.ui)
1132 self._state = state
1132 self._state = state
1133 self._ctx = ctx
1133 self._ctx = ctx
1134 self._path = path
1134 self._path = path
1135 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1135 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1136 self._abspath = ctx._repo.wjoin(path)
1136 self._abspath = ctx._repo.wjoin(path)
1137 self._subparent = ctx._repo
1137 self._subparent = ctx._repo
1138 self._ensuregit()
1138 self._ensuregit()
1139
1139
1140 def _ensuregit(self):
1140 def _ensuregit(self):
1141 try:
1141 try:
1142 self._gitexecutable = 'git'
1142 self._gitexecutable = 'git'
1143 out, err = self._gitnodir(['--version'])
1143 out, err = self._gitnodir(['--version'])
1144 except OSError, e:
1144 except OSError, e:
1145 if e.errno != 2 or os.name != 'nt':
1145 if e.errno != 2 or os.name != 'nt':
1146 raise
1146 raise
1147 self._gitexecutable = 'git.cmd'
1147 self._gitexecutable = 'git.cmd'
1148 out, err = self._gitnodir(['--version'])
1148 out, err = self._gitnodir(['--version'])
1149 versionstatus = self._checkversion(out)
1149 versionstatus = self._checkversion(out)
1150 if versionstatus == 'unknown':
1150 if versionstatus == 'unknown':
1151 self.ui.warn(_('cannot retrieve git version\n'))
1151 self.ui.warn(_('cannot retrieve git version\n'))
1152 elif versionstatus == 'abort':
1152 elif versionstatus == 'abort':
1153 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1153 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1154 elif versionstatus == 'warning':
1154 elif versionstatus == 'warning':
1155 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1155 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1156
1156
1157 @staticmethod
1157 @staticmethod
1158 def _gitversion(out):
1158 def _gitversion(out):
1159 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1159 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1160 if m:
1160 if m:
1161 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1161 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1162
1162
1163 m = re.search(r'^git version (\d+)\.(\d+)', out)
1163 m = re.search(r'^git version (\d+)\.(\d+)', out)
1164 if m:
1164 if m:
1165 return (int(m.group(1)), int(m.group(2)), 0)
1165 return (int(m.group(1)), int(m.group(2)), 0)
1166
1166
1167 return -1
1167 return -1
1168
1168
1169 @staticmethod
1169 @staticmethod
1170 def _checkversion(out):
1170 def _checkversion(out):
1171 '''ensure git version is new enough
1171 '''ensure git version is new enough
1172
1172
1173 >>> _checkversion = gitsubrepo._checkversion
1173 >>> _checkversion = gitsubrepo._checkversion
1174 >>> _checkversion('git version 1.6.0')
1174 >>> _checkversion('git version 1.6.0')
1175 'ok'
1175 'ok'
1176 >>> _checkversion('git version 1.8.5')
1176 >>> _checkversion('git version 1.8.5')
1177 'ok'
1177 'ok'
1178 >>> _checkversion('git version 1.4.0')
1178 >>> _checkversion('git version 1.4.0')
1179 'abort'
1179 'abort'
1180 >>> _checkversion('git version 1.5.0')
1180 >>> _checkversion('git version 1.5.0')
1181 'warning'
1181 'warning'
1182 >>> _checkversion('git version 1.9-rc0')
1182 >>> _checkversion('git version 1.9-rc0')
1183 'ok'
1183 'ok'
1184 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1184 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1185 'ok'
1185 'ok'
1186 >>> _checkversion('git version 1.9.0.GIT')
1186 >>> _checkversion('git version 1.9.0.GIT')
1187 'ok'
1187 'ok'
1188 >>> _checkversion('git version 12345')
1188 >>> _checkversion('git version 12345')
1189 'unknown'
1189 'unknown'
1190 >>> _checkversion('no')
1190 >>> _checkversion('no')
1191 'unknown'
1191 'unknown'
1192 '''
1192 '''
1193 version = gitsubrepo._gitversion(out)
1193 version = gitsubrepo._gitversion(out)
1194 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1194 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1195 # despite the docstring comment. For now, error on 1.4.0, warn on
1195 # despite the docstring comment. For now, error on 1.4.0, warn on
1196 # 1.5.0 but attempt to continue.
1196 # 1.5.0 but attempt to continue.
1197 if version == -1:
1197 if version == -1:
1198 return 'unknown'
1198 return 'unknown'
1199 if version < (1, 5, 0):
1199 if version < (1, 5, 0):
1200 return 'abort'
1200 return 'abort'
1201 elif version < (1, 6, 0):
1201 elif version < (1, 6, 0):
1202 return 'warning'
1202 return 'warning'
1203 return 'ok'
1203 return 'ok'
1204
1204
1205 def _gitcommand(self, commands, env=None, stream=False):
1205 def _gitcommand(self, commands, env=None, stream=False):
1206 return self._gitdir(commands, env=env, stream=stream)[0]
1206 return self._gitdir(commands, env=env, stream=stream)[0]
1207
1207
1208 def _gitdir(self, commands, env=None, stream=False):
1208 def _gitdir(self, commands, env=None, stream=False):
1209 return self._gitnodir(commands, env=env, stream=stream,
1209 return self._gitnodir(commands, env=env, stream=stream,
1210 cwd=self._abspath)
1210 cwd=self._abspath)
1211
1211
1212 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1212 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1213 """Calls the git command
1213 """Calls the git command
1214
1214
1215 The methods tries to call the git command. versions prior to 1.6.0
1215 The methods tries to call the git command. versions prior to 1.6.0
1216 are not supported and very probably fail.
1216 are not supported and very probably fail.
1217 """
1217 """
1218 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1218 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1219 # unless ui.quiet is set, print git's stderr,
1219 # unless ui.quiet is set, print git's stderr,
1220 # which is mostly progress and useful info
1220 # which is mostly progress and useful info
1221 errpipe = None
1221 errpipe = None
1222 if self.ui.quiet:
1222 if self.ui.quiet:
1223 errpipe = open(os.devnull, 'w')
1223 errpipe = open(os.devnull, 'w')
1224 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1224 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1225 cwd=cwd, env=env, close_fds=util.closefds,
1225 cwd=cwd, env=env, close_fds=util.closefds,
1226 stdout=subprocess.PIPE, stderr=errpipe)
1226 stdout=subprocess.PIPE, stderr=errpipe)
1227 if stream:
1227 if stream:
1228 return p.stdout, None
1228 return p.stdout, None
1229
1229
1230 retdata = p.stdout.read().strip()
1230 retdata = p.stdout.read().strip()
1231 # wait for the child to exit to avoid race condition.
1231 # wait for the child to exit to avoid race condition.
1232 p.wait()
1232 p.wait()
1233
1233
1234 if p.returncode != 0 and p.returncode != 1:
1234 if p.returncode != 0 and p.returncode != 1:
1235 # there are certain error codes that are ok
1235 # there are certain error codes that are ok
1236 command = commands[0]
1236 command = commands[0]
1237 if command in ('cat-file', 'symbolic-ref'):
1237 if command in ('cat-file', 'symbolic-ref'):
1238 return retdata, p.returncode
1238 return retdata, p.returncode
1239 # for all others, abort
1239 # for all others, abort
1240 raise util.Abort('git %s error %d in %s' %
1240 raise util.Abort('git %s error %d in %s' %
1241 (command, p.returncode, self._relpath))
1241 (command, p.returncode, self._relpath))
1242
1242
1243 return retdata, p.returncode
1243 return retdata, p.returncode
1244
1244
1245 def _gitmissing(self):
1245 def _gitmissing(self):
1246 return not os.path.exists(os.path.join(self._abspath, '.git'))
1246 return not os.path.exists(os.path.join(self._abspath, '.git'))
1247
1247
1248 def _gitstate(self):
1248 def _gitstate(self):
1249 return self._gitcommand(['rev-parse', 'HEAD'])
1249 return self._gitcommand(['rev-parse', 'HEAD'])
1250
1250
1251 def _gitcurrentbranch(self):
1251 def _gitcurrentbranch(self):
1252 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1252 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1253 if err:
1253 if err:
1254 current = None
1254 current = None
1255 return current
1255 return current
1256
1256
1257 def _gitremote(self, remote):
1257 def _gitremote(self, remote):
1258 out = self._gitcommand(['remote', 'show', '-n', remote])
1258 out = self._gitcommand(['remote', 'show', '-n', remote])
1259 line = out.split('\n')[1]
1259 line = out.split('\n')[1]
1260 i = line.index('URL: ') + len('URL: ')
1260 i = line.index('URL: ') + len('URL: ')
1261 return line[i:]
1261 return line[i:]
1262
1262
1263 def _githavelocally(self, revision):
1263 def _githavelocally(self, revision):
1264 out, code = self._gitdir(['cat-file', '-e', revision])
1264 out, code = self._gitdir(['cat-file', '-e', revision])
1265 return code == 0
1265 return code == 0
1266
1266
1267 def _gitisancestor(self, r1, r2):
1267 def _gitisancestor(self, r1, r2):
1268 base = self._gitcommand(['merge-base', r1, r2])
1268 base = self._gitcommand(['merge-base', r1, r2])
1269 return base == r1
1269 return base == r1
1270
1270
1271 def _gitisbare(self):
1271 def _gitisbare(self):
1272 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1272 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1273
1273
1274 def _gitupdatestat(self):
1274 def _gitupdatestat(self):
1275 """This must be run before git diff-index.
1275 """This must be run before git diff-index.
1276 diff-index only looks at changes to file stat;
1276 diff-index only looks at changes to file stat;
1277 this command looks at file contents and updates the stat."""
1277 this command looks at file contents and updates the stat."""
1278 self._gitcommand(['update-index', '-q', '--refresh'])
1278 self._gitcommand(['update-index', '-q', '--refresh'])
1279
1279
1280 def _gitbranchmap(self):
1280 def _gitbranchmap(self):
1281 '''returns 2 things:
1281 '''returns 2 things:
1282 a map from git branch to revision
1282 a map from git branch to revision
1283 a map from revision to branches'''
1283 a map from revision to branches'''
1284 branch2rev = {}
1284 branch2rev = {}
1285 rev2branch = {}
1285 rev2branch = {}
1286
1286
1287 out = self._gitcommand(['for-each-ref', '--format',
1287 out = self._gitcommand(['for-each-ref', '--format',
1288 '%(objectname) %(refname)'])
1288 '%(objectname) %(refname)'])
1289 for line in out.split('\n'):
1289 for line in out.split('\n'):
1290 revision, ref = line.split(' ')
1290 revision, ref = line.split(' ')
1291 if (not ref.startswith('refs/heads/') and
1291 if (not ref.startswith('refs/heads/') and
1292 not ref.startswith('refs/remotes/')):
1292 not ref.startswith('refs/remotes/')):
1293 continue
1293 continue
1294 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1294 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1295 continue # ignore remote/HEAD redirects
1295 continue # ignore remote/HEAD redirects
1296 branch2rev[ref] = revision
1296 branch2rev[ref] = revision
1297 rev2branch.setdefault(revision, []).append(ref)
1297 rev2branch.setdefault(revision, []).append(ref)
1298 return branch2rev, rev2branch
1298 return branch2rev, rev2branch
1299
1299
1300 def _gittracking(self, branches):
1300 def _gittracking(self, branches):
1301 'return map of remote branch to local tracking branch'
1301 'return map of remote branch to local tracking branch'
1302 # assumes no more than one local tracking branch for each remote
1302 # assumes no more than one local tracking branch for each remote
1303 tracking = {}
1303 tracking = {}
1304 for b in branches:
1304 for b in branches:
1305 if b.startswith('refs/remotes/'):
1305 if b.startswith('refs/remotes/'):
1306 continue
1306 continue
1307 bname = b.split('/', 2)[2]
1307 bname = b.split('/', 2)[2]
1308 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1308 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1309 if remote:
1309 if remote:
1310 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1310 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1311 tracking['refs/remotes/%s/%s' %
1311 tracking['refs/remotes/%s/%s' %
1312 (remote, ref.split('/', 2)[2])] = b
1312 (remote, ref.split('/', 2)[2])] = b
1313 return tracking
1313 return tracking
1314
1314
1315 def _abssource(self, source):
1315 def _abssource(self, source):
1316 if '://' not in source:
1316 if '://' not in source:
1317 # recognize the scp syntax as an absolute source
1317 # recognize the scp syntax as an absolute source
1318 colon = source.find(':')
1318 colon = source.find(':')
1319 if colon != -1 and '/' not in source[:colon]:
1319 if colon != -1 and '/' not in source[:colon]:
1320 return source
1320 return source
1321 self._subsource = source
1321 self._subsource = source
1322 return _abssource(self)
1322 return _abssource(self)
1323
1323
1324 def _fetch(self, source, revision):
1324 def _fetch(self, source, revision):
1325 if self._gitmissing():
1325 if self._gitmissing():
1326 source = self._abssource(source)
1326 source = self._abssource(source)
1327 self.ui.status(_('cloning subrepo %s from %s\n') %
1327 self.ui.status(_('cloning subrepo %s from %s\n') %
1328 (self._relpath, source))
1328 (self._relpath, source))
1329 self._gitnodir(['clone', source, self._abspath])
1329 self._gitnodir(['clone', source, self._abspath])
1330 if self._githavelocally(revision):
1330 if self._githavelocally(revision):
1331 return
1331 return
1332 self.ui.status(_('pulling subrepo %s from %s\n') %
1332 self.ui.status(_('pulling subrepo %s from %s\n') %
1333 (self._relpath, self._gitremote('origin')))
1333 (self._relpath, self._gitremote('origin')))
1334 # try only origin: the originally cloned repo
1334 # try only origin: the originally cloned repo
1335 self._gitcommand(['fetch'])
1335 self._gitcommand(['fetch'])
1336 if not self._githavelocally(revision):
1336 if not self._githavelocally(revision):
1337 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1337 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1338 (revision, self._relpath))
1338 (revision, self._relpath))
1339
1339
1340 @annotatesubrepoerror
1340 @annotatesubrepoerror
1341 def dirty(self, ignoreupdate=False):
1341 def dirty(self, ignoreupdate=False):
1342 if self._gitmissing():
1342 if self._gitmissing():
1343 return self._state[1] != ''
1343 return self._state[1] != ''
1344 if self._gitisbare():
1344 if self._gitisbare():
1345 return True
1345 return True
1346 if not ignoreupdate and self._state[1] != self._gitstate():
1346 if not ignoreupdate and self._state[1] != self._gitstate():
1347 # different version checked out
1347 # different version checked out
1348 return True
1348 return True
1349 # check for staged changes or modified files; ignore untracked files
1349 # check for staged changes or modified files; ignore untracked files
1350 self._gitupdatestat()
1350 self._gitupdatestat()
1351 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1351 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1352 return code == 1
1352 return code == 1
1353
1353
1354 def basestate(self):
1354 def basestate(self):
1355 return self._gitstate()
1355 return self._gitstate()
1356
1356
1357 @annotatesubrepoerror
1357 @annotatesubrepoerror
1358 def get(self, state, overwrite=False):
1358 def get(self, state, overwrite=False):
1359 source, revision, kind = state
1359 source, revision, kind = state
1360 if not revision:
1360 if not revision:
1361 self.remove()
1361 self.remove()
1362 return
1362 return
1363 self._fetch(source, revision)
1363 self._fetch(source, revision)
1364 # if the repo was set to be bare, unbare it
1364 # if the repo was set to be bare, unbare it
1365 if self._gitisbare():
1365 if self._gitisbare():
1366 self._gitcommand(['config', 'core.bare', 'false'])
1366 self._gitcommand(['config', 'core.bare', 'false'])
1367 if self._gitstate() == revision:
1367 if self._gitstate() == revision:
1368 self._gitcommand(['reset', '--hard', 'HEAD'])
1368 self._gitcommand(['reset', '--hard', 'HEAD'])
1369 return
1369 return
1370 elif self._gitstate() == revision:
1370 elif self._gitstate() == revision:
1371 if overwrite:
1371 if overwrite:
1372 # first reset the index to unmark new files for commit, because
1372 # first reset the index to unmark new files for commit, because
1373 # reset --hard will otherwise throw away files added for commit,
1373 # reset --hard will otherwise throw away files added for commit,
1374 # not just unmark them.
1374 # not just unmark them.
1375 self._gitcommand(['reset', 'HEAD'])
1375 self._gitcommand(['reset', 'HEAD'])
1376 self._gitcommand(['reset', '--hard', 'HEAD'])
1376 self._gitcommand(['reset', '--hard', 'HEAD'])
1377 return
1377 return
1378 branch2rev, rev2branch = self._gitbranchmap()
1378 branch2rev, rev2branch = self._gitbranchmap()
1379
1379
1380 def checkout(args):
1380 def checkout(args):
1381 cmd = ['checkout']
1381 cmd = ['checkout']
1382 if overwrite:
1382 if overwrite:
1383 # first reset the index to unmark new files for commit, because
1383 # first reset the index to unmark new files for commit, because
1384 # the -f option will otherwise throw away files added for
1384 # the -f option will otherwise throw away files added for
1385 # commit, not just unmark them.
1385 # commit, not just unmark them.
1386 self._gitcommand(['reset', 'HEAD'])
1386 self._gitcommand(['reset', 'HEAD'])
1387 cmd.append('-f')
1387 cmd.append('-f')
1388 self._gitcommand(cmd + args)
1388 self._gitcommand(cmd + args)
1389 _sanitize(self.ui, self._abspath, '.git')
1389 _sanitize(self.ui, self._abspath, '.git')
1390
1390
1391 def rawcheckout():
1391 def rawcheckout():
1392 # no branch to checkout, check it out with no branch
1392 # no branch to checkout, check it out with no branch
1393 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1393 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1394 self._relpath)
1394 self._relpath)
1395 self.ui.warn(_('check out a git branch if you intend '
1395 self.ui.warn(_('check out a git branch if you intend '
1396 'to make changes\n'))
1396 'to make changes\n'))
1397 checkout(['-q', revision])
1397 checkout(['-q', revision])
1398
1398
1399 if revision not in rev2branch:
1399 if revision not in rev2branch:
1400 rawcheckout()
1400 rawcheckout()
1401 return
1401 return
1402 branches = rev2branch[revision]
1402 branches = rev2branch[revision]
1403 firstlocalbranch = None
1403 firstlocalbranch = None
1404 for b in branches:
1404 for b in branches:
1405 if b == 'refs/heads/master':
1405 if b == 'refs/heads/master':
1406 # master trumps all other branches
1406 # master trumps all other branches
1407 checkout(['refs/heads/master'])
1407 checkout(['refs/heads/master'])
1408 return
1408 return
1409 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1409 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1410 firstlocalbranch = b
1410 firstlocalbranch = b
1411 if firstlocalbranch:
1411 if firstlocalbranch:
1412 checkout([firstlocalbranch])
1412 checkout([firstlocalbranch])
1413 return
1413 return
1414
1414
1415 tracking = self._gittracking(branch2rev.keys())
1415 tracking = self._gittracking(branch2rev.keys())
1416 # choose a remote branch already tracked if possible
1416 # choose a remote branch already tracked if possible
1417 remote = branches[0]
1417 remote = branches[0]
1418 if remote not in tracking:
1418 if remote not in tracking:
1419 for b in branches:
1419 for b in branches:
1420 if b in tracking:
1420 if b in tracking:
1421 remote = b
1421 remote = b
1422 break
1422 break
1423
1423
1424 if remote not in tracking:
1424 if remote not in tracking:
1425 # create a new local tracking branch
1425 # create a new local tracking branch
1426 local = remote.split('/', 3)[3]
1426 local = remote.split('/', 3)[3]
1427 checkout(['-b', local, remote])
1427 checkout(['-b', local, remote])
1428 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1428 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1429 # When updating to a tracked remote branch,
1429 # When updating to a tracked remote branch,
1430 # if the local tracking branch is downstream of it,
1430 # if the local tracking branch is downstream of it,
1431 # a normal `git pull` would have performed a "fast-forward merge"
1431 # a normal `git pull` would have performed a "fast-forward merge"
1432 # which is equivalent to updating the local branch to the remote.
1432 # which is equivalent to updating the local branch to the remote.
1433 # Since we are only looking at branching at update, we need to
1433 # Since we are only looking at branching at update, we need to
1434 # detect this situation and perform this action lazily.
1434 # detect this situation and perform this action lazily.
1435 if tracking[remote] != self._gitcurrentbranch():
1435 if tracking[remote] != self._gitcurrentbranch():
1436 checkout([tracking[remote]])
1436 checkout([tracking[remote]])
1437 self._gitcommand(['merge', '--ff', remote])
1437 self._gitcommand(['merge', '--ff', remote])
1438 _sanitize(self.ui, self._abspath, '.git')
1438 _sanitize(self.ui, self._abspath, '.git')
1439 else:
1439 else:
1440 # a real merge would be required, just checkout the revision
1440 # a real merge would be required, just checkout the revision
1441 rawcheckout()
1441 rawcheckout()
1442
1442
1443 @annotatesubrepoerror
1443 @annotatesubrepoerror
1444 def commit(self, text, user, date):
1444 def commit(self, text, user, date):
1445 if self._gitmissing():
1445 if self._gitmissing():
1446 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1446 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1447 cmd = ['commit', '-a', '-m', text]
1447 cmd = ['commit', '-a', '-m', text]
1448 env = os.environ.copy()
1448 env = os.environ.copy()
1449 if user:
1449 if user:
1450 cmd += ['--author', user]
1450 cmd += ['--author', user]
1451 if date:
1451 if date:
1452 # git's date parser silently ignores when seconds < 1e9
1452 # git's date parser silently ignores when seconds < 1e9
1453 # convert to ISO8601
1453 # convert to ISO8601
1454 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1454 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1455 '%Y-%m-%dT%H:%M:%S %1%2')
1455 '%Y-%m-%dT%H:%M:%S %1%2')
1456 self._gitcommand(cmd, env=env)
1456 self._gitcommand(cmd, env=env)
1457 # make sure commit works otherwise HEAD might not exist under certain
1457 # make sure commit works otherwise HEAD might not exist under certain
1458 # circumstances
1458 # circumstances
1459 return self._gitstate()
1459 return self._gitstate()
1460
1460
1461 @annotatesubrepoerror
1461 @annotatesubrepoerror
1462 def merge(self, state):
1462 def merge(self, state):
1463 source, revision, kind = state
1463 source, revision, kind = state
1464 self._fetch(source, revision)
1464 self._fetch(source, revision)
1465 base = self._gitcommand(['merge-base', revision, self._state[1]])
1465 base = self._gitcommand(['merge-base', revision, self._state[1]])
1466 self._gitupdatestat()
1466 self._gitupdatestat()
1467 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1467 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1468
1468
1469 def mergefunc():
1469 def mergefunc():
1470 if base == revision:
1470 if base == revision:
1471 self.get(state) # fast forward merge
1471 self.get(state) # fast forward merge
1472 elif base != self._state[1]:
1472 elif base != self._state[1]:
1473 self._gitcommand(['merge', '--no-commit', revision])
1473 self._gitcommand(['merge', '--no-commit', revision])
1474 _sanitize(self.ui, self._abspath, '.git')
1474 _sanitize(self.ui, self._abspath, '.git')
1475
1475
1476 if self.dirty():
1476 if self.dirty():
1477 if self._gitstate() != revision:
1477 if self._gitstate() != revision:
1478 dirty = self._gitstate() == self._state[1] or code != 0
1478 dirty = self._gitstate() == self._state[1] or code != 0
1479 if _updateprompt(self.ui, self, dirty,
1479 if _updateprompt(self.ui, self, dirty,
1480 self._state[1][:7], revision[:7]):
1480 self._state[1][:7], revision[:7]):
1481 mergefunc()
1481 mergefunc()
1482 else:
1482 else:
1483 mergefunc()
1483 mergefunc()
1484
1484
1485 @annotatesubrepoerror
1485 @annotatesubrepoerror
1486 def push(self, opts):
1486 def push(self, opts):
1487 force = opts.get('force')
1487 force = opts.get('force')
1488
1488
1489 if not self._state[1]:
1489 if not self._state[1]:
1490 return True
1490 return True
1491 if self._gitmissing():
1491 if self._gitmissing():
1492 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1492 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1493 # if a branch in origin contains the revision, nothing to do
1493 # if a branch in origin contains the revision, nothing to do
1494 branch2rev, rev2branch = self._gitbranchmap()
1494 branch2rev, rev2branch = self._gitbranchmap()
1495 if self._state[1] in rev2branch:
1495 if self._state[1] in rev2branch:
1496 for b in rev2branch[self._state[1]]:
1496 for b in rev2branch[self._state[1]]:
1497 if b.startswith('refs/remotes/origin/'):
1497 if b.startswith('refs/remotes/origin/'):
1498 return True
1498 return True
1499 for b, revision in branch2rev.iteritems():
1499 for b, revision in branch2rev.iteritems():
1500 if b.startswith('refs/remotes/origin/'):
1500 if b.startswith('refs/remotes/origin/'):
1501 if self._gitisancestor(self._state[1], revision):
1501 if self._gitisancestor(self._state[1], revision):
1502 return True
1502 return True
1503 # otherwise, try to push the currently checked out branch
1503 # otherwise, try to push the currently checked out branch
1504 cmd = ['push']
1504 cmd = ['push']
1505 if force:
1505 if force:
1506 cmd.append('--force')
1506 cmd.append('--force')
1507
1507
1508 current = self._gitcurrentbranch()
1508 current = self._gitcurrentbranch()
1509 if current:
1509 if current:
1510 # determine if the current branch is even useful
1510 # determine if the current branch is even useful
1511 if not self._gitisancestor(self._state[1], current):
1511 if not self._gitisancestor(self._state[1], current):
1512 self.ui.warn(_('unrelated git branch checked out '
1512 self.ui.warn(_('unrelated git branch checked out '
1513 'in subrepo %s\n') % self._relpath)
1513 'in subrepo %s\n') % self._relpath)
1514 return False
1514 return False
1515 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1515 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1516 (current.split('/', 2)[2], self._relpath))
1516 (current.split('/', 2)[2], self._relpath))
1517 ret = self._gitdir(cmd + ['origin', current])
1517 ret = self._gitdir(cmd + ['origin', current])
1518 return ret[1] == 0
1518 return ret[1] == 0
1519 else:
1519 else:
1520 self.ui.warn(_('no branch checked out in subrepo %s\n'
1520 self.ui.warn(_('no branch checked out in subrepo %s\n'
1521 'cannot push revision %s\n') %
1521 'cannot push revision %s\n') %
1522 (self._relpath, self._state[1]))
1522 (self._relpath, self._state[1]))
1523 return False
1523 return False
1524
1524
1525 @annotatesubrepoerror
1525 @annotatesubrepoerror
1526 def remove(self):
1526 def remove(self):
1527 if self._gitmissing():
1527 if self._gitmissing():
1528 return
1528 return
1529 if self.dirty():
1529 if self.dirty():
1530 self.ui.warn(_('not removing repo %s because '
1530 self.ui.warn(_('not removing repo %s because '
1531 'it has changes.\n') % self._relpath)
1531 'it has changes.\n') % self._relpath)
1532 return
1532 return
1533 # we can't fully delete the repository as it may contain
1533 # we can't fully delete the repository as it may contain
1534 # local-only history
1534 # local-only history
1535 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1535 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1536 self._gitcommand(['config', 'core.bare', 'true'])
1536 self._gitcommand(['config', 'core.bare', 'true'])
1537 for f in os.listdir(self._abspath):
1537 for f in os.listdir(self._abspath):
1538 if f == '.git':
1538 if f == '.git':
1539 continue
1539 continue
1540 path = os.path.join(self._abspath, f)
1540 path = os.path.join(self._abspath, f)
1541 if os.path.isdir(path) and not os.path.islink(path):
1541 if os.path.isdir(path) and not os.path.islink(path):
1542 shutil.rmtree(path)
1542 shutil.rmtree(path)
1543 else:
1543 else:
1544 os.remove(path)
1544 os.remove(path)
1545
1545
1546 def archive(self, archiver, prefix, match=None):
1546 def archive(self, archiver, prefix, match=None):
1547 total = 0
1547 total = 0
1548 source, revision = self._state
1548 source, revision = self._state
1549 if not revision:
1549 if not revision:
1550 return total
1550 return total
1551 self._fetch(source, revision)
1551 self._fetch(source, revision)
1552
1552
1553 # Parse git's native archive command.
1553 # Parse git's native archive command.
1554 # This should be much faster than manually traversing the trees
1554 # This should be much faster than manually traversing the trees
1555 # and objects with many subprocess calls.
1555 # and objects with many subprocess calls.
1556 tarstream = self._gitcommand(['archive', revision], stream=True)
1556 tarstream = self._gitcommand(['archive', revision], stream=True)
1557 tar = tarfile.open(fileobj=tarstream, mode='r|')
1557 tar = tarfile.open(fileobj=tarstream, mode='r|')
1558 relpath = subrelpath(self)
1558 relpath = subrelpath(self)
1559 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1559 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1560 for i, info in enumerate(tar):
1560 for i, info in enumerate(tar):
1561 if info.isdir():
1561 if info.isdir():
1562 continue
1562 continue
1563 if match and not match(info.name):
1563 if match and not match(info.name):
1564 continue
1564 continue
1565 if info.issym():
1565 if info.issym():
1566 data = info.linkname
1566 data = info.linkname
1567 else:
1567 else:
1568 data = tar.extractfile(info).read()
1568 data = tar.extractfile(info).read()
1569 archiver.addfile(os.path.join(prefix, self._path, info.name),
1569 archiver.addfile(os.path.join(prefix, self._path, info.name),
1570 info.mode, info.issym(), data)
1570 info.mode, info.issym(), data)
1571 total += 1
1571 total += 1
1572 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1572 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1573 unit=_('files'))
1573 unit=_('files'))
1574 self.ui.progress(_('archiving (%s)') % relpath, None)
1574 self.ui.progress(_('archiving (%s)') % relpath, None)
1575 return total
1575 return total
1576
1576
1577
1577
1578 @annotatesubrepoerror
1578 @annotatesubrepoerror
1579 def status(self, rev2, **opts):
1579 def status(self, rev2, **opts):
1580 rev1 = self._state[1]
1580 rev1 = self._state[1]
1581 if self._gitmissing() or not rev1:
1581 if self._gitmissing() or not rev1:
1582 # if the repo is missing, return no results
1582 # if the repo is missing, return no results
1583 return [], [], [], [], [], [], []
1583 return [], [], [], [], [], [], []
1584 modified, added, removed = [], [], []
1584 modified, added, removed = [], [], []
1585 self._gitupdatestat()
1585 self._gitupdatestat()
1586 if rev2:
1586 if rev2:
1587 command = ['diff-tree', rev1, rev2]
1587 command = ['diff-tree', rev1, rev2]
1588 else:
1588 else:
1589 command = ['diff-index', rev1]
1589 command = ['diff-index', rev1]
1590 out = self._gitcommand(command)
1590 out = self._gitcommand(command)
1591 for line in out.split('\n'):
1591 for line in out.split('\n'):
1592 tab = line.find('\t')
1592 tab = line.find('\t')
1593 if tab == -1:
1593 if tab == -1:
1594 continue
1594 continue
1595 status, f = line[tab - 1], line[tab + 1:]
1595 status, f = line[tab - 1], line[tab + 1:]
1596 if status == 'M':
1596 if status == 'M':
1597 modified.append(f)
1597 modified.append(f)
1598 elif status == 'A':
1598 elif status == 'A':
1599 added.append(f)
1599 added.append(f)
1600 elif status == 'D':
1600 elif status == 'D':
1601 removed.append(f)
1601 removed.append(f)
1602
1602
1603 deleted, unknown, ignored, clean = [], [], [], []
1603 deleted, unknown, ignored, clean = [], [], [], []
1604
1604
1605 if not rev2:
1605 if not rev2:
1606 command = ['ls-files', '--others', '--exclude-standard']
1606 command = ['ls-files', '--others', '--exclude-standard']
1607 out = self._gitcommand(command)
1607 out = self._gitcommand(command)
1608 for line in out.split('\n'):
1608 for line in out.split('\n'):
1609 if len(line) == 0:
1609 if len(line) == 0:
1610 continue
1610 continue
1611 unknown.append(line)
1611 unknown.append(line)
1612
1612
1613 return scmutil.status(modified, added, removed, deleted,
1613 return scmutil.status(modified, added, removed, deleted,
1614 unknown, ignored, clean)
1614 unknown, ignored, clean)
1615
1615
1616 @annotatesubrepoerror
1616 @annotatesubrepoerror
1617 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1617 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1618 node1 = self._state[1]
1618 node1 = self._state[1]
1619 cmd = ['diff']
1619 cmd = ['diff']
1620 if opts['stat']:
1620 if opts['stat']:
1621 cmd.append('--stat')
1621 cmd.append('--stat')
1622 else:
1622 else:
1623 # for Git, this also implies '-p'
1623 # for Git, this also implies '-p'
1624 cmd.append('-U%d' % diffopts.context)
1624 cmd.append('-U%d' % diffopts.context)
1625
1625
1626 gitprefix = os.path.join(prefix, self._path)
1626 gitprefix = os.path.join(prefix, self._path)
1627
1627
1628 if diffopts.noprefix:
1628 if diffopts.noprefix:
1629 cmd.extend(['--src-prefix=%s/' % gitprefix,
1629 cmd.extend(['--src-prefix=%s/' % gitprefix,
1630 '--dst-prefix=%s/' % gitprefix])
1630 '--dst-prefix=%s/' % gitprefix])
1631 else:
1631 else:
1632 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1632 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1633 '--dst-prefix=b/%s/' % gitprefix])
1633 '--dst-prefix=b/%s/' % gitprefix])
1634
1634
1635 if diffopts.ignorews:
1635 if diffopts.ignorews:
1636 cmd.append('--ignore-all-space')
1636 cmd.append('--ignore-all-space')
1637 if diffopts.ignorewsamount:
1637 if diffopts.ignorewsamount:
1638 cmd.append('--ignore-space-change')
1638 cmd.append('--ignore-space-change')
1639 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1639 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1640 and diffopts.ignoreblanklines:
1640 and diffopts.ignoreblanklines:
1641 cmd.append('--ignore-blank-lines')
1641 cmd.append('--ignore-blank-lines')
1642
1642
1643 cmd.append(node1)
1643 cmd.append(node1)
1644 if node2:
1644 if node2:
1645 cmd.append(node2)
1645 cmd.append(node2)
1646
1646
1647 if match.anypats():
1647 if match.anypats():
1648 return #No support for include/exclude yet
1648 return #No support for include/exclude yet
1649
1649
1650 if match.always():
1650 if match.always():
1651 ui.write(self._gitcommand(cmd))
1651 ui.write(self._gitcommand(cmd))
1652 elif match.files():
1652 elif match.files():
1653 for f in match.files():
1653 for f in match.files():
1654 ui.write(self._gitcommand(cmd + [f]))
1654 ui.write(self._gitcommand(cmd + [f]))
1655 elif match(gitprefix): #Subrepo is matched
1655 elif match(gitprefix): #Subrepo is matched
1656 ui.write(self._gitcommand(cmd))
1656 ui.write(self._gitcommand(cmd))
1657
1657
1658 def revert(self, substate, *pats, **opts):
1658 def revert(self, substate, *pats, **opts):
1659 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1659 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1660 if not opts.get('no_backup'):
1660 if not opts.get('no_backup'):
1661 self.ui.warn('%s: reverting %s subrepos without '
1661 self.ui.warn('%s: reverting %s subrepos without '
1662 '--no-backup is unsupported\n'
1662 '--no-backup is unsupported\n'
1663 % (substate[0], substate[2]))
1663 % (substate[0], substate[2]))
1664 return []
1664 return []
1665
1665
1666 self.get(substate, overwrite=True)
1666 self.get(substate, overwrite=True)
1667 return []
1667 return []
1668
1668
1669 def shortid(self, revid):
1669 def shortid(self, revid):
1670 return revid[:7]
1670 return revid[:7]
1671
1671
1672 types = {
1672 types = {
1673 'hg': hgsubrepo,
1673 'hg': hgsubrepo,
1674 'svn': svnsubrepo,
1674 'svn': svnsubrepo,
1675 'git': gitsubrepo,
1675 'git': gitsubrepo,
1676 }
1676 }
General Comments 0
You need to be logged in to leave comments. Login now