##// END OF EJS Templates
grammar: use does instead of do where appropriate
timeless@mozdev.org -
r26779:aaa33ec3 default
parent child Browse files
Show More
@@ -1,287 +1,287 b''
1 # bzr.py - bzr support for the convert extension
1 # bzr.py - bzr support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
9 # it cannot access 'bar' repositories, but they were never used very much
9 # it cannot access 'bar' repositories, but they were never used very much
10
10
11 import os
11 import os
12 from mercurial import demandimport, error
12 from mercurial import demandimport, error
13 # these do not work with demandimport, blacklist
13 # these do not work with demandimport, blacklist
14 demandimport.ignore.extend([
14 demandimport.ignore.extend([
15 'bzrlib.transactions',
15 'bzrlib.transactions',
16 'bzrlib.urlutils',
16 'bzrlib.urlutils',
17 'ElementPath',
17 'ElementPath',
18 ])
18 ])
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import error
21 from mercurial import error
22 from common import NoRepo, commit, converter_source
22 from common import NoRepo, commit, converter_source
23
23
24 try:
24 try:
25 # bazaar imports
25 # bazaar imports
26 from bzrlib import bzrdir, revision, errors
26 from bzrlib import bzrdir, revision, errors
27 from bzrlib.revisionspec import RevisionSpec
27 from bzrlib.revisionspec import RevisionSpec
28 except ImportError:
28 except ImportError:
29 pass
29 pass
30
30
31 supportedkinds = ('file', 'symlink')
31 supportedkinds = ('file', 'symlink')
32
32
33 class bzr_source(converter_source):
33 class bzr_source(converter_source):
34 """Reads Bazaar repositories by using the Bazaar Python libraries"""
34 """Reads Bazaar repositories by using the Bazaar Python libraries"""
35
35
36 def __init__(self, ui, path, revs=None):
36 def __init__(self, ui, path, revs=None):
37 super(bzr_source, self).__init__(ui, path, revs=revs)
37 super(bzr_source, self).__init__(ui, path, revs=revs)
38
38
39 if not os.path.exists(os.path.join(path, '.bzr')):
39 if not os.path.exists(os.path.join(path, '.bzr')):
40 raise NoRepo(_('%s does not look like a Bazaar repository')
40 raise NoRepo(_('%s does not look like a Bazaar repository')
41 % path)
41 % path)
42
42
43 try:
43 try:
44 # access bzrlib stuff
44 # access bzrlib stuff
45 bzrdir
45 bzrdir
46 except NameError:
46 except NameError:
47 raise NoRepo(_('Bazaar modules could not be loaded'))
47 raise NoRepo(_('Bazaar modules could not be loaded'))
48
48
49 path = os.path.abspath(path)
49 path = os.path.abspath(path)
50 self._checkrepotype(path)
50 self._checkrepotype(path)
51 try:
51 try:
52 self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
52 self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
53 except errors.NoRepositoryPresent:
53 except errors.NoRepositoryPresent:
54 raise NoRepo(_('%s does not look like a Bazaar repository')
54 raise NoRepo(_('%s does not look like a Bazaar repository')
55 % path)
55 % path)
56 self._parentids = {}
56 self._parentids = {}
57
57
58 def _checkrepotype(self, path):
58 def _checkrepotype(self, path):
59 # Lightweight checkouts detection is informational but probably
59 # Lightweight checkouts detection is informational but probably
60 # fragile at API level. It should not terminate the conversion.
60 # fragile at API level. It should not terminate the conversion.
61 try:
61 try:
62 from bzrlib import bzrdir
62 from bzrlib import bzrdir
63 dir = bzrdir.BzrDir.open_containing(path)[0]
63 dir = bzrdir.BzrDir.open_containing(path)[0]
64 try:
64 try:
65 tree = dir.open_workingtree(recommend_upgrade=False)
65 tree = dir.open_workingtree(recommend_upgrade=False)
66 branch = tree.branch
66 branch = tree.branch
67 except (errors.NoWorkingTree, errors.NotLocalUrl):
67 except (errors.NoWorkingTree, errors.NotLocalUrl):
68 tree = None
68 tree = None
69 branch = dir.open_branch()
69 branch = dir.open_branch()
70 if (tree is not None and tree.bzrdir.root_transport.base !=
70 if (tree is not None and tree.bzrdir.root_transport.base !=
71 branch.bzrdir.root_transport.base):
71 branch.bzrdir.root_transport.base):
72 self.ui.warn(_('warning: lightweight checkouts may cause '
72 self.ui.warn(_('warning: lightweight checkouts may cause '
73 'conversion failures, try with a regular '
73 'conversion failures, try with a regular '
74 'branch instead.\n'))
74 'branch instead.\n'))
75 except Exception:
75 except Exception:
76 self.ui.note(_('bzr source type could not be determined\n'))
76 self.ui.note(_('bzr source type could not be determined\n'))
77
77
78 def before(self):
78 def before(self):
79 """Before the conversion begins, acquire a read lock
79 """Before the conversion begins, acquire a read lock
80 for all the operations that might need it. Fortunately
80 for all the operations that might need it. Fortunately
81 read locks don't block other reads or writes to the
81 read locks don't block other reads or writes to the
82 repository, so this shouldn't have any impact on the usage of
82 repository, so this shouldn't have any impact on the usage of
83 the source repository.
83 the source repository.
84
84
85 The alternative would be locking on every operation that
85 The alternative would be locking on every operation that
86 needs locks (there are currently two: getting the file and
86 needs locks (there are currently two: getting the file and
87 getting the parent map) and releasing immediately after,
87 getting the parent map) and releasing immediately after,
88 but this approach can take even 40% longer."""
88 but this approach can take even 40% longer."""
89 self.sourcerepo.lock_read()
89 self.sourcerepo.lock_read()
90
90
91 def after(self):
91 def after(self):
92 self.sourcerepo.unlock()
92 self.sourcerepo.unlock()
93
93
94 def _bzrbranches(self):
94 def _bzrbranches(self):
95 return self.sourcerepo.find_branches(using=True)
95 return self.sourcerepo.find_branches(using=True)
96
96
97 def getheads(self):
97 def getheads(self):
98 if not self.revs:
98 if not self.revs:
99 # Set using=True to avoid nested repositories (see issue3254)
99 # Set using=True to avoid nested repositories (see issue3254)
100 heads = sorted([b.last_revision() for b in self._bzrbranches()])
100 heads = sorted([b.last_revision() for b in self._bzrbranches()])
101 else:
101 else:
102 revid = None
102 revid = None
103 for branch in self._bzrbranches():
103 for branch in self._bzrbranches():
104 try:
104 try:
105 r = RevisionSpec.from_string(self.revs[0])
105 r = RevisionSpec.from_string(self.revs[0])
106 info = r.in_history(branch)
106 info = r.in_history(branch)
107 except errors.BzrError:
107 except errors.BzrError:
108 pass
108 pass
109 revid = info.rev_id
109 revid = info.rev_id
110 if revid is None:
110 if revid is None:
111 raise error.Abort(_('%s is not a valid revision')
111 raise error.Abort(_('%s is not a valid revision')
112 % self.revs[0])
112 % self.revs[0])
113 heads = [revid]
113 heads = [revid]
114 # Empty repositories return 'null:', which cannot be retrieved
114 # Empty repositories return 'null:', which cannot be retrieved
115 heads = [h for h in heads if h != 'null:']
115 heads = [h for h in heads if h != 'null:']
116 return heads
116 return heads
117
117
118 def getfile(self, name, rev):
118 def getfile(self, name, rev):
119 revtree = self.sourcerepo.revision_tree(rev)
119 revtree = self.sourcerepo.revision_tree(rev)
120 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
120 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
121 kind = None
121 kind = None
122 if fileid is not None:
122 if fileid is not None:
123 kind = revtree.kind(fileid)
123 kind = revtree.kind(fileid)
124 if kind not in supportedkinds:
124 if kind not in supportedkinds:
125 # the file is not available anymore - was deleted
125 # the file is not available anymore - was deleted
126 return None, None
126 return None, None
127 mode = self._modecache[(name, rev)]
127 mode = self._modecache[(name, rev)]
128 if kind == 'symlink':
128 if kind == 'symlink':
129 target = revtree.get_symlink_target(fileid)
129 target = revtree.get_symlink_target(fileid)
130 if target is None:
130 if target is None:
131 raise error.Abort(_('%s.%s symlink has no target')
131 raise error.Abort(_('%s.%s symlink has no target')
132 % (name, rev))
132 % (name, rev))
133 return target, mode
133 return target, mode
134 else:
134 else:
135 sio = revtree.get_file(fileid)
135 sio = revtree.get_file(fileid)
136 return sio.read(), mode
136 return sio.read(), mode
137
137
138 def getchanges(self, version, full):
138 def getchanges(self, version, full):
139 if full:
139 if full:
140 raise error.Abort(_("convert from cvs do not support --full"))
140 raise error.Abort(_("convert from cvs does not support --full"))
141 self._modecache = {}
141 self._modecache = {}
142 self._revtree = self.sourcerepo.revision_tree(version)
142 self._revtree = self.sourcerepo.revision_tree(version)
143 # get the parentids from the cache
143 # get the parentids from the cache
144 parentids = self._parentids.pop(version)
144 parentids = self._parentids.pop(version)
145 # only diff against first parent id
145 # only diff against first parent id
146 prevtree = self.sourcerepo.revision_tree(parentids[0])
146 prevtree = self.sourcerepo.revision_tree(parentids[0])
147 files, changes = self._gettreechanges(self._revtree, prevtree)
147 files, changes = self._gettreechanges(self._revtree, prevtree)
148 return files, changes, set()
148 return files, changes, set()
149
149
150 def getcommit(self, version):
150 def getcommit(self, version):
151 rev = self.sourcerepo.get_revision(version)
151 rev = self.sourcerepo.get_revision(version)
152 # populate parent id cache
152 # populate parent id cache
153 if not rev.parent_ids:
153 if not rev.parent_ids:
154 parents = []
154 parents = []
155 self._parentids[version] = (revision.NULL_REVISION,)
155 self._parentids[version] = (revision.NULL_REVISION,)
156 else:
156 else:
157 parents = self._filterghosts(rev.parent_ids)
157 parents = self._filterghosts(rev.parent_ids)
158 self._parentids[version] = parents
158 self._parentids[version] = parents
159
159
160 branch = self.recode(rev.properties.get('branch-nick', u'default'))
160 branch = self.recode(rev.properties.get('branch-nick', u'default'))
161 if branch == 'trunk':
161 if branch == 'trunk':
162 branch = 'default'
162 branch = 'default'
163 return commit(parents=parents,
163 return commit(parents=parents,
164 date='%d %d' % (rev.timestamp, -rev.timezone),
164 date='%d %d' % (rev.timestamp, -rev.timezone),
165 author=self.recode(rev.committer),
165 author=self.recode(rev.committer),
166 desc=self.recode(rev.message),
166 desc=self.recode(rev.message),
167 branch=branch,
167 branch=branch,
168 rev=version)
168 rev=version)
169
169
170 def gettags(self):
170 def gettags(self):
171 bytetags = {}
171 bytetags = {}
172 for branch in self._bzrbranches():
172 for branch in self._bzrbranches():
173 if not branch.supports_tags():
173 if not branch.supports_tags():
174 return {}
174 return {}
175 tagdict = branch.tags.get_tag_dict()
175 tagdict = branch.tags.get_tag_dict()
176 for name, rev in tagdict.iteritems():
176 for name, rev in tagdict.iteritems():
177 bytetags[self.recode(name)] = rev
177 bytetags[self.recode(name)] = rev
178 return bytetags
178 return bytetags
179
179
180 def getchangedfiles(self, rev, i):
180 def getchangedfiles(self, rev, i):
181 self._modecache = {}
181 self._modecache = {}
182 curtree = self.sourcerepo.revision_tree(rev)
182 curtree = self.sourcerepo.revision_tree(rev)
183 if i is not None:
183 if i is not None:
184 parentid = self._parentids[rev][i]
184 parentid = self._parentids[rev][i]
185 else:
185 else:
186 # no parent id, get the empty revision
186 # no parent id, get the empty revision
187 parentid = revision.NULL_REVISION
187 parentid = revision.NULL_REVISION
188
188
189 prevtree = self.sourcerepo.revision_tree(parentid)
189 prevtree = self.sourcerepo.revision_tree(parentid)
190 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
190 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
191 return changes
191 return changes
192
192
193 def _gettreechanges(self, current, origin):
193 def _gettreechanges(self, current, origin):
194 revid = current._revision_id
194 revid = current._revision_id
195 changes = []
195 changes = []
196 renames = {}
196 renames = {}
197 seen = set()
197 seen = set()
198 # Process the entries by reverse lexicographic name order to
198 # Process the entries by reverse lexicographic name order to
199 # handle nested renames correctly, most specific first.
199 # handle nested renames correctly, most specific first.
200 curchanges = sorted(current.iter_changes(origin),
200 curchanges = sorted(current.iter_changes(origin),
201 key=lambda c: c[1][0] or c[1][1],
201 key=lambda c: c[1][0] or c[1][1],
202 reverse=True)
202 reverse=True)
203 for (fileid, paths, changed_content, versioned, parent, name,
203 for (fileid, paths, changed_content, versioned, parent, name,
204 kind, executable) in curchanges:
204 kind, executable) in curchanges:
205
205
206 if paths[0] == u'' or paths[1] == u'':
206 if paths[0] == u'' or paths[1] == u'':
207 # ignore changes to tree root
207 # ignore changes to tree root
208 continue
208 continue
209
209
210 # bazaar tracks directories, mercurial does not, so
210 # bazaar tracks directories, mercurial does not, so
211 # we have to rename the directory contents
211 # we have to rename the directory contents
212 if kind[1] == 'directory':
212 if kind[1] == 'directory':
213 if kind[0] not in (None, 'directory'):
213 if kind[0] not in (None, 'directory'):
214 # Replacing 'something' with a directory, record it
214 # Replacing 'something' with a directory, record it
215 # so it can be removed.
215 # so it can be removed.
216 changes.append((self.recode(paths[0]), revid))
216 changes.append((self.recode(paths[0]), revid))
217
217
218 if kind[0] == 'directory' and None not in paths:
218 if kind[0] == 'directory' and None not in paths:
219 renaming = paths[0] != paths[1]
219 renaming = paths[0] != paths[1]
220 # neither an add nor an delete - a move
220 # neither an add nor an delete - a move
221 # rename all directory contents manually
221 # rename all directory contents manually
222 subdir = origin.inventory.path2id(paths[0])
222 subdir = origin.inventory.path2id(paths[0])
223 # get all child-entries of the directory
223 # get all child-entries of the directory
224 for name, entry in origin.inventory.iter_entries(subdir):
224 for name, entry in origin.inventory.iter_entries(subdir):
225 # hg does not track directory renames
225 # hg does not track directory renames
226 if entry.kind == 'directory':
226 if entry.kind == 'directory':
227 continue
227 continue
228 frompath = self.recode(paths[0] + '/' + name)
228 frompath = self.recode(paths[0] + '/' + name)
229 if frompath in seen:
229 if frompath in seen:
230 # Already handled by a more specific change entry
230 # Already handled by a more specific change entry
231 # This is important when you have:
231 # This is important when you have:
232 # a => b
232 # a => b
233 # a/c => a/c
233 # a/c => a/c
234 # Here a/c must not be renamed into b/c
234 # Here a/c must not be renamed into b/c
235 continue
235 continue
236 seen.add(frompath)
236 seen.add(frompath)
237 if not renaming:
237 if not renaming:
238 continue
238 continue
239 topath = self.recode(paths[1] + '/' + name)
239 topath = self.recode(paths[1] + '/' + name)
240 # register the files as changed
240 # register the files as changed
241 changes.append((frompath, revid))
241 changes.append((frompath, revid))
242 changes.append((topath, revid))
242 changes.append((topath, revid))
243 # add to mode cache
243 # add to mode cache
244 mode = ((entry.executable and 'x')
244 mode = ((entry.executable and 'x')
245 or (entry.kind == 'symlink' and 's')
245 or (entry.kind == 'symlink' and 's')
246 or '')
246 or '')
247 self._modecache[(topath, revid)] = mode
247 self._modecache[(topath, revid)] = mode
248 # register the change as move
248 # register the change as move
249 renames[topath] = frompath
249 renames[topath] = frompath
250
250
251 # no further changes, go to the next change
251 # no further changes, go to the next change
252 continue
252 continue
253
253
254 # we got unicode paths, need to convert them
254 # we got unicode paths, need to convert them
255 path, topath = paths
255 path, topath = paths
256 if path is not None:
256 if path is not None:
257 path = self.recode(path)
257 path = self.recode(path)
258 if topath is not None:
258 if topath is not None:
259 topath = self.recode(topath)
259 topath = self.recode(topath)
260 seen.add(path or topath)
260 seen.add(path or topath)
261
261
262 if topath is None:
262 if topath is None:
263 # file deleted
263 # file deleted
264 changes.append((path, revid))
264 changes.append((path, revid))
265 continue
265 continue
266
266
267 # renamed
267 # renamed
268 if path and path != topath:
268 if path and path != topath:
269 renames[topath] = path
269 renames[topath] = path
270 changes.append((path, revid))
270 changes.append((path, revid))
271
271
272 # populate the mode cache
272 # populate the mode cache
273 kind, executable = [e[1] for e in (kind, executable)]
273 kind, executable = [e[1] for e in (kind, executable)]
274 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
274 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
275 or '')
275 or '')
276 self._modecache[(topath, revid)] = mode
276 self._modecache[(topath, revid)] = mode
277 changes.append((topath, revid))
277 changes.append((topath, revid))
278
278
279 return changes, renames
279 return changes, renames
280
280
281 def _filterghosts(self, ids):
281 def _filterghosts(self, ids):
282 """Filters out ghost revisions which hg does not support, see
282 """Filters out ghost revisions which hg does not support, see
283 <http://bazaar-vcs.org/GhostRevision>
283 <http://bazaar-vcs.org/GhostRevision>
284 """
284 """
285 parentmap = self.sourcerepo.get_parent_map(ids)
285 parentmap = self.sourcerepo.get_parent_map(ids)
286 parents = tuple([parent for parent in ids if parent in parentmap])
286 parents = tuple([parent for parent in ids if parent in parentmap])
287 return parents
287 return parents
@@ -1,280 +1,280 b''
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os, re, socket, errno
8 import os, re, socket, errno
9 from cStringIO import StringIO
9 from cStringIO import StringIO
10 from mercurial import encoding, util, error
10 from mercurial import encoding, util, error
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 from common import NoRepo, commit, converter_source, checktool
13 from common import NoRepo, commit, converter_source, checktool
14 from common import makedatetimestamp
14 from common import makedatetimestamp
15 import cvsps
15 import cvsps
16
16
17 class convert_cvs(converter_source):
17 class convert_cvs(converter_source):
18 def __init__(self, ui, path, revs=None):
18 def __init__(self, ui, path, revs=None):
19 super(convert_cvs, self).__init__(ui, path, revs=revs)
19 super(convert_cvs, self).__init__(ui, path, revs=revs)
20
20
21 cvs = os.path.join(path, "CVS")
21 cvs = os.path.join(path, "CVS")
22 if not os.path.exists(cvs):
22 if not os.path.exists(cvs):
23 raise NoRepo(_("%s does not look like a CVS checkout") % path)
23 raise NoRepo(_("%s does not look like a CVS checkout") % path)
24
24
25 checktool('cvs')
25 checktool('cvs')
26
26
27 self.changeset = None
27 self.changeset = None
28 self.files = {}
28 self.files = {}
29 self.tags = {}
29 self.tags = {}
30 self.lastbranch = {}
30 self.lastbranch = {}
31 self.socket = None
31 self.socket = None
32 self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
32 self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
33 self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
33 self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
34 self.encoding = encoding.encoding
34 self.encoding = encoding.encoding
35
35
36 self._connect()
36 self._connect()
37
37
38 def _parse(self):
38 def _parse(self):
39 if self.changeset is not None:
39 if self.changeset is not None:
40 return
40 return
41 self.changeset = {}
41 self.changeset = {}
42
42
43 maxrev = 0
43 maxrev = 0
44 if self.revs:
44 if self.revs:
45 if len(self.revs) > 1:
45 if len(self.revs) > 1:
46 raise error.Abort(_('cvs source does not support specifying '
46 raise error.Abort(_('cvs source does not support specifying '
47 'multiple revs'))
47 'multiple revs'))
48 # TODO: handle tags
48 # TODO: handle tags
49 try:
49 try:
50 # patchset number?
50 # patchset number?
51 maxrev = int(self.revs[0])
51 maxrev = int(self.revs[0])
52 except ValueError:
52 except ValueError:
53 raise error.Abort(_('revision %s is not a patchset number')
53 raise error.Abort(_('revision %s is not a patchset number')
54 % self.revs[0])
54 % self.revs[0])
55
55
56 d = os.getcwd()
56 d = os.getcwd()
57 try:
57 try:
58 os.chdir(self.path)
58 os.chdir(self.path)
59 id = None
59 id = None
60
60
61 cache = 'update'
61 cache = 'update'
62 if not self.ui.configbool('convert', 'cvsps.cache', True):
62 if not self.ui.configbool('convert', 'cvsps.cache', True):
63 cache = None
63 cache = None
64 db = cvsps.createlog(self.ui, cache=cache)
64 db = cvsps.createlog(self.ui, cache=cache)
65 db = cvsps.createchangeset(self.ui, db,
65 db = cvsps.createchangeset(self.ui, db,
66 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
66 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
67 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
67 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
68 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
68 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
69
69
70 for cs in db:
70 for cs in db:
71 if maxrev and cs.id > maxrev:
71 if maxrev and cs.id > maxrev:
72 break
72 break
73 id = str(cs.id)
73 id = str(cs.id)
74 cs.author = self.recode(cs.author)
74 cs.author = self.recode(cs.author)
75 self.lastbranch[cs.branch] = id
75 self.lastbranch[cs.branch] = id
76 cs.comment = self.recode(cs.comment)
76 cs.comment = self.recode(cs.comment)
77 if self.ui.configbool('convert', 'localtimezone'):
77 if self.ui.configbool('convert', 'localtimezone'):
78 cs.date = makedatetimestamp(cs.date[0])
78 cs.date = makedatetimestamp(cs.date[0])
79 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
79 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
80 self.tags.update(dict.fromkeys(cs.tags, id))
80 self.tags.update(dict.fromkeys(cs.tags, id))
81
81
82 files = {}
82 files = {}
83 for f in cs.entries:
83 for f in cs.entries:
84 files[f.file] = "%s%s" % ('.'.join([str(x)
84 files[f.file] = "%s%s" % ('.'.join([str(x)
85 for x in f.revision]),
85 for x in f.revision]),
86 ['', '(DEAD)'][f.dead])
86 ['', '(DEAD)'][f.dead])
87
87
88 # add current commit to set
88 # add current commit to set
89 c = commit(author=cs.author, date=date,
89 c = commit(author=cs.author, date=date,
90 parents=[str(p.id) for p in cs.parents],
90 parents=[str(p.id) for p in cs.parents],
91 desc=cs.comment, branch=cs.branch or '')
91 desc=cs.comment, branch=cs.branch or '')
92 self.changeset[id] = c
92 self.changeset[id] = c
93 self.files[id] = files
93 self.files[id] = files
94
94
95 self.heads = self.lastbranch.values()
95 self.heads = self.lastbranch.values()
96 finally:
96 finally:
97 os.chdir(d)
97 os.chdir(d)
98
98
99 def _connect(self):
99 def _connect(self):
100 root = self.cvsroot
100 root = self.cvsroot
101 conntype = None
101 conntype = None
102 user, host = None, None
102 user, host = None, None
103 cmd = ['cvs', 'server']
103 cmd = ['cvs', 'server']
104
104
105 self.ui.status(_("connecting to %s\n") % root)
105 self.ui.status(_("connecting to %s\n") % root)
106
106
107 if root.startswith(":pserver:"):
107 if root.startswith(":pserver:"):
108 root = root[9:]
108 root = root[9:]
109 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
109 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
110 root)
110 root)
111 if m:
111 if m:
112 conntype = "pserver"
112 conntype = "pserver"
113 user, passw, serv, port, root = m.groups()
113 user, passw, serv, port, root = m.groups()
114 if not user:
114 if not user:
115 user = "anonymous"
115 user = "anonymous"
116 if not port:
116 if not port:
117 port = 2401
117 port = 2401
118 else:
118 else:
119 port = int(port)
119 port = int(port)
120 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
120 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
121 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
121 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
122
122
123 if not passw:
123 if not passw:
124 passw = "A"
124 passw = "A"
125 cvspass = os.path.expanduser("~/.cvspass")
125 cvspass = os.path.expanduser("~/.cvspass")
126 try:
126 try:
127 pf = open(cvspass)
127 pf = open(cvspass)
128 for line in pf.read().splitlines():
128 for line in pf.read().splitlines():
129 part1, part2 = line.split(' ', 1)
129 part1, part2 = line.split(' ', 1)
130 # /1 :pserver:user@example.com:2401/cvsroot/foo
130 # /1 :pserver:user@example.com:2401/cvsroot/foo
131 # Ah<Z
131 # Ah<Z
132 if part1 == '/1':
132 if part1 == '/1':
133 part1, part2 = part2.split(' ', 1)
133 part1, part2 = part2.split(' ', 1)
134 format = format1
134 format = format1
135 # :pserver:user@example.com:/cvsroot/foo Ah<Z
135 # :pserver:user@example.com:/cvsroot/foo Ah<Z
136 else:
136 else:
137 format = format0
137 format = format0
138 if part1 == format:
138 if part1 == format:
139 passw = part2
139 passw = part2
140 break
140 break
141 pf.close()
141 pf.close()
142 except IOError as inst:
142 except IOError as inst:
143 if inst.errno != errno.ENOENT:
143 if inst.errno != errno.ENOENT:
144 if not getattr(inst, 'filename', None):
144 if not getattr(inst, 'filename', None):
145 inst.filename = cvspass
145 inst.filename = cvspass
146 raise
146 raise
147
147
148 sck = socket.socket()
148 sck = socket.socket()
149 sck.connect((serv, port))
149 sck.connect((serv, port))
150 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
150 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
151 "END AUTH REQUEST", ""]))
151 "END AUTH REQUEST", ""]))
152 if sck.recv(128) != "I LOVE YOU\n":
152 if sck.recv(128) != "I LOVE YOU\n":
153 raise error.Abort(_("CVS pserver authentication failed"))
153 raise error.Abort(_("CVS pserver authentication failed"))
154
154
155 self.writep = self.readp = sck.makefile('r+')
155 self.writep = self.readp = sck.makefile('r+')
156
156
157 if not conntype and root.startswith(":local:"):
157 if not conntype and root.startswith(":local:"):
158 conntype = "local"
158 conntype = "local"
159 root = root[7:]
159 root = root[7:]
160
160
161 if not conntype:
161 if not conntype:
162 # :ext:user@host/home/user/path/to/cvsroot
162 # :ext:user@host/home/user/path/to/cvsroot
163 if root.startswith(":ext:"):
163 if root.startswith(":ext:"):
164 root = root[5:]
164 root = root[5:]
165 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
165 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
166 # Do not take Windows path "c:\foo\bar" for a connection strings
166 # Do not take Windows path "c:\foo\bar" for a connection strings
167 if os.path.isdir(root) or not m:
167 if os.path.isdir(root) or not m:
168 conntype = "local"
168 conntype = "local"
169 else:
169 else:
170 conntype = "rsh"
170 conntype = "rsh"
171 user, host, root = m.group(1), m.group(2), m.group(3)
171 user, host, root = m.group(1), m.group(2), m.group(3)
172
172
173 if conntype != "pserver":
173 if conntype != "pserver":
174 if conntype == "rsh":
174 if conntype == "rsh":
175 rsh = os.environ.get("CVS_RSH") or "ssh"
175 rsh = os.environ.get("CVS_RSH") or "ssh"
176 if user:
176 if user:
177 cmd = [rsh, '-l', user, host] + cmd
177 cmd = [rsh, '-l', user, host] + cmd
178 else:
178 else:
179 cmd = [rsh, host] + cmd
179 cmd = [rsh, host] + cmd
180
180
181 # popen2 does not support argument lists under Windows
181 # popen2 does not support argument lists under Windows
182 cmd = [util.shellquote(arg) for arg in cmd]
182 cmd = [util.shellquote(arg) for arg in cmd]
183 cmd = util.quotecommand(' '.join(cmd))
183 cmd = util.quotecommand(' '.join(cmd))
184 self.writep, self.readp = util.popen2(cmd)
184 self.writep, self.readp = util.popen2(cmd)
185
185
186 self.realroot = root
186 self.realroot = root
187
187
188 self.writep.write("Root %s\n" % root)
188 self.writep.write("Root %s\n" % root)
189 self.writep.write("Valid-responses ok error Valid-requests Mode"
189 self.writep.write("Valid-responses ok error Valid-requests Mode"
190 " M Mbinary E Checked-in Created Updated"
190 " M Mbinary E Checked-in Created Updated"
191 " Merged Removed\n")
191 " Merged Removed\n")
192 self.writep.write("valid-requests\n")
192 self.writep.write("valid-requests\n")
193 self.writep.flush()
193 self.writep.flush()
194 r = self.readp.readline()
194 r = self.readp.readline()
195 if not r.startswith("Valid-requests"):
195 if not r.startswith("Valid-requests"):
196 raise error.Abort(_('unexpected response from CVS server '
196 raise error.Abort(_('unexpected response from CVS server '
197 '(expected "Valid-requests", but got %r)')
197 '(expected "Valid-requests", but got %r)')
198 % r)
198 % r)
199 if "UseUnchanged" in r:
199 if "UseUnchanged" in r:
200 self.writep.write("UseUnchanged\n")
200 self.writep.write("UseUnchanged\n")
201 self.writep.flush()
201 self.writep.flush()
202 r = self.readp.readline()
202 r = self.readp.readline()
203
203
204 def getheads(self):
204 def getheads(self):
205 self._parse()
205 self._parse()
206 return self.heads
206 return self.heads
207
207
208 def getfile(self, name, rev):
208 def getfile(self, name, rev):
209
209
210 def chunkedread(fp, count):
210 def chunkedread(fp, count):
211 # file-objects returned by socket.makefile() do not handle
211 # file-objects returned by socket.makefile() do not handle
212 # large read() requests very well.
212 # large read() requests very well.
213 chunksize = 65536
213 chunksize = 65536
214 output = StringIO()
214 output = StringIO()
215 while count > 0:
215 while count > 0:
216 data = fp.read(min(count, chunksize))
216 data = fp.read(min(count, chunksize))
217 if not data:
217 if not data:
218 raise error.Abort(_("%d bytes missing from remote file")
218 raise error.Abort(_("%d bytes missing from remote file")
219 % count)
219 % count)
220 count -= len(data)
220 count -= len(data)
221 output.write(data)
221 output.write(data)
222 return output.getvalue()
222 return output.getvalue()
223
223
224 self._parse()
224 self._parse()
225 if rev.endswith("(DEAD)"):
225 if rev.endswith("(DEAD)"):
226 return None, None
226 return None, None
227
227
228 args = ("-N -P -kk -r %s --" % rev).split()
228 args = ("-N -P -kk -r %s --" % rev).split()
229 args.append(self.cvsrepo + '/' + name)
229 args.append(self.cvsrepo + '/' + name)
230 for x in args:
230 for x in args:
231 self.writep.write("Argument %s\n" % x)
231 self.writep.write("Argument %s\n" % x)
232 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
232 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
233 self.writep.flush()
233 self.writep.flush()
234
234
235 data = ""
235 data = ""
236 mode = None
236 mode = None
237 while True:
237 while True:
238 line = self.readp.readline()
238 line = self.readp.readline()
239 if line.startswith("Created ") or line.startswith("Updated "):
239 if line.startswith("Created ") or line.startswith("Updated "):
240 self.readp.readline() # path
240 self.readp.readline() # path
241 self.readp.readline() # entries
241 self.readp.readline() # entries
242 mode = self.readp.readline()[:-1]
242 mode = self.readp.readline()[:-1]
243 count = int(self.readp.readline()[:-1])
243 count = int(self.readp.readline()[:-1])
244 data = chunkedread(self.readp, count)
244 data = chunkedread(self.readp, count)
245 elif line.startswith(" "):
245 elif line.startswith(" "):
246 data += line[1:]
246 data += line[1:]
247 elif line.startswith("M "):
247 elif line.startswith("M "):
248 pass
248 pass
249 elif line.startswith("Mbinary "):
249 elif line.startswith("Mbinary "):
250 count = int(self.readp.readline()[:-1])
250 count = int(self.readp.readline()[:-1])
251 data = chunkedread(self.readp, count)
251 data = chunkedread(self.readp, count)
252 else:
252 else:
253 if line == "ok\n":
253 if line == "ok\n":
254 if mode is None:
254 if mode is None:
255 raise error.Abort(_('malformed response from CVS'))
255 raise error.Abort(_('malformed response from CVS'))
256 return (data, "x" in mode and "x" or "")
256 return (data, "x" in mode and "x" or "")
257 elif line.startswith("E "):
257 elif line.startswith("E "):
258 self.ui.warn(_("cvs server: %s\n") % line[2:])
258 self.ui.warn(_("cvs server: %s\n") % line[2:])
259 elif line.startswith("Remove"):
259 elif line.startswith("Remove"):
260 self.readp.readline()
260 self.readp.readline()
261 else:
261 else:
262 raise error.Abort(_("unknown CVS response: %s") % line)
262 raise error.Abort(_("unknown CVS response: %s") % line)
263
263
264 def getchanges(self, rev, full):
264 def getchanges(self, rev, full):
265 if full:
265 if full:
266 raise error.Abort(_("convert from cvs do not support --full"))
266 raise error.Abort(_("convert from cvs does not support --full"))
267 self._parse()
267 self._parse()
268 return sorted(self.files[rev].iteritems()), {}, set()
268 return sorted(self.files[rev].iteritems()), {}, set()
269
269
270 def getcommit(self, rev):
270 def getcommit(self, rev):
271 self._parse()
271 self._parse()
272 return self.changeset[rev]
272 return self.changeset[rev]
273
273
274 def gettags(self):
274 def gettags(self):
275 self._parse()
275 self._parse()
276 return self.tags
276 return self.tags
277
277
278 def getchangedfiles(self, rev, i):
278 def getchangedfiles(self, rev, i):
279 self._parse()
279 self._parse()
280 return sorted(self.files[rev])
280 return sorted(self.files[rev])
@@ -1,208 +1,208 b''
1 # darcs.py - darcs support for the convert extension
1 # darcs.py - darcs support for the convert extension
2 #
2 #
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, checktool, commandline, commit, converter_source
8 from common import NoRepo, checktool, commandline, commit, converter_source
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial import util, error
10 from mercurial import util, error
11 import os, shutil, tempfile, re, errno
11 import os, shutil, tempfile, re, errno
12
12
13 # The naming drift of ElementTree is fun!
13 # The naming drift of ElementTree is fun!
14
14
15 try:
15 try:
16 from xml.etree.cElementTree import ElementTree, XMLParser
16 from xml.etree.cElementTree import ElementTree, XMLParser
17 except ImportError:
17 except ImportError:
18 try:
18 try:
19 from xml.etree.ElementTree import ElementTree, XMLParser
19 from xml.etree.ElementTree import ElementTree, XMLParser
20 except ImportError:
20 except ImportError:
21 try:
21 try:
22 from elementtree.cElementTree import ElementTree, XMLParser
22 from elementtree.cElementTree import ElementTree, XMLParser
23 except ImportError:
23 except ImportError:
24 try:
24 try:
25 from elementtree.ElementTree import ElementTree, XMLParser
25 from elementtree.ElementTree import ElementTree, XMLParser
26 except ImportError:
26 except ImportError:
27 pass
27 pass
28
28
29 class darcs_source(converter_source, commandline):
29 class darcs_source(converter_source, commandline):
30 def __init__(self, ui, path, revs=None):
30 def __init__(self, ui, path, revs=None):
31 converter_source.__init__(self, ui, path, revs=revs)
31 converter_source.__init__(self, ui, path, revs=revs)
32 commandline.__init__(self, ui, 'darcs')
32 commandline.__init__(self, ui, 'darcs')
33
33
34 # check for _darcs, ElementTree so that we can easily skip
34 # check for _darcs, ElementTree so that we can easily skip
35 # test-convert-darcs if ElementTree is not around
35 # test-convert-darcs if ElementTree is not around
36 if not os.path.exists(os.path.join(path, '_darcs')):
36 if not os.path.exists(os.path.join(path, '_darcs')):
37 raise NoRepo(_("%s does not look like a darcs repository") % path)
37 raise NoRepo(_("%s does not look like a darcs repository") % path)
38
38
39 checktool('darcs')
39 checktool('darcs')
40 version = self.run0('--version').splitlines()[0].strip()
40 version = self.run0('--version').splitlines()[0].strip()
41 if version < '2.1':
41 if version < '2.1':
42 raise error.Abort(_('darcs version 2.1 or newer needed (found %r)')
42 raise error.Abort(_('darcs version 2.1 or newer needed (found %r)')
43 % version)
43 % version)
44
44
45 if "ElementTree" not in globals():
45 if "ElementTree" not in globals():
46 raise error.Abort(_("Python ElementTree module is not available"))
46 raise error.Abort(_("Python ElementTree module is not available"))
47
47
48 self.path = os.path.realpath(path)
48 self.path = os.path.realpath(path)
49
49
50 self.lastrev = None
50 self.lastrev = None
51 self.changes = {}
51 self.changes = {}
52 self.parents = {}
52 self.parents = {}
53 self.tags = {}
53 self.tags = {}
54
54
55 # Check darcs repository format
55 # Check darcs repository format
56 format = self.format()
56 format = self.format()
57 if format:
57 if format:
58 if format in ('darcs-1.0', 'hashed'):
58 if format in ('darcs-1.0', 'hashed'):
59 raise NoRepo(_("%s repository format is unsupported, "
59 raise NoRepo(_("%s repository format is unsupported, "
60 "please upgrade") % format)
60 "please upgrade") % format)
61 else:
61 else:
62 self.ui.warn(_('failed to detect repository format!'))
62 self.ui.warn(_('failed to detect repository format!'))
63
63
64 def before(self):
64 def before(self):
65 self.tmppath = tempfile.mkdtemp(
65 self.tmppath = tempfile.mkdtemp(
66 prefix='convert-' + os.path.basename(self.path) + '-')
66 prefix='convert-' + os.path.basename(self.path) + '-')
67 output, status = self.run('init', repodir=self.tmppath)
67 output, status = self.run('init', repodir=self.tmppath)
68 self.checkexit(status)
68 self.checkexit(status)
69
69
70 tree = self.xml('changes', xml_output=True, summary=True,
70 tree = self.xml('changes', xml_output=True, summary=True,
71 repodir=self.path)
71 repodir=self.path)
72 tagname = None
72 tagname = None
73 child = None
73 child = None
74 for elt in tree.findall('patch'):
74 for elt in tree.findall('patch'):
75 node = elt.get('hash')
75 node = elt.get('hash')
76 name = elt.findtext('name', '')
76 name = elt.findtext('name', '')
77 if name.startswith('TAG '):
77 if name.startswith('TAG '):
78 tagname = name[4:].strip()
78 tagname = name[4:].strip()
79 elif tagname is not None:
79 elif tagname is not None:
80 self.tags[tagname] = node
80 self.tags[tagname] = node
81 tagname = None
81 tagname = None
82 self.changes[node] = elt
82 self.changes[node] = elt
83 self.parents[child] = [node]
83 self.parents[child] = [node]
84 child = node
84 child = node
85 self.parents[child] = []
85 self.parents[child] = []
86
86
87 def after(self):
87 def after(self):
88 self.ui.debug('cleaning up %s\n' % self.tmppath)
88 self.ui.debug('cleaning up %s\n' % self.tmppath)
89 shutil.rmtree(self.tmppath, ignore_errors=True)
89 shutil.rmtree(self.tmppath, ignore_errors=True)
90
90
91 def recode(self, s, encoding=None):
91 def recode(self, s, encoding=None):
92 if isinstance(s, unicode):
92 if isinstance(s, unicode):
93 # XMLParser returns unicode objects for anything it can't
93 # XMLParser returns unicode objects for anything it can't
94 # encode into ASCII. We convert them back to str to get
94 # encode into ASCII. We convert them back to str to get
95 # recode's normal conversion behavior.
95 # recode's normal conversion behavior.
96 s = s.encode('latin-1')
96 s = s.encode('latin-1')
97 return super(darcs_source, self).recode(s, encoding)
97 return super(darcs_source, self).recode(s, encoding)
98
98
99 def xml(self, cmd, **kwargs):
99 def xml(self, cmd, **kwargs):
100 # NOTE: darcs is currently encoding agnostic and will print
100 # NOTE: darcs is currently encoding agnostic and will print
101 # patch metadata byte-for-byte, even in the XML changelog.
101 # patch metadata byte-for-byte, even in the XML changelog.
102 etree = ElementTree()
102 etree = ElementTree()
103 # While we are decoding the XML as latin-1 to be as liberal as
103 # While we are decoding the XML as latin-1 to be as liberal as
104 # possible, etree will still raise an exception if any
104 # possible, etree will still raise an exception if any
105 # non-printable characters are in the XML changelog.
105 # non-printable characters are in the XML changelog.
106 parser = XMLParser(encoding='latin-1')
106 parser = XMLParser(encoding='latin-1')
107 p = self._run(cmd, **kwargs)
107 p = self._run(cmd, **kwargs)
108 etree.parse(p.stdout, parser=parser)
108 etree.parse(p.stdout, parser=parser)
109 p.wait()
109 p.wait()
110 self.checkexit(p.returncode)
110 self.checkexit(p.returncode)
111 return etree.getroot()
111 return etree.getroot()
112
112
113 def format(self):
113 def format(self):
114 output, status = self.run('show', 'repo', no_files=True,
114 output, status = self.run('show', 'repo', no_files=True,
115 repodir=self.path)
115 repodir=self.path)
116 self.checkexit(status)
116 self.checkexit(status)
117 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
117 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
118 if not m:
118 if not m:
119 return None
119 return None
120 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
120 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
121
121
122 def manifest(self):
122 def manifest(self):
123 man = []
123 man = []
124 output, status = self.run('show', 'files', no_directories=True,
124 output, status = self.run('show', 'files', no_directories=True,
125 repodir=self.tmppath)
125 repodir=self.tmppath)
126 self.checkexit(status)
126 self.checkexit(status)
127 for line in output.split('\n'):
127 for line in output.split('\n'):
128 path = line[2:]
128 path = line[2:]
129 if path:
129 if path:
130 man.append(path)
130 man.append(path)
131 return man
131 return man
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getcommit(self, rev):
136 def getcommit(self, rev):
137 elt = self.changes[rev]
137 elt = self.changes[rev]
138 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
138 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
139 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
139 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
140 # etree can return unicode objects for name, comment, and author,
140 # etree can return unicode objects for name, comment, and author,
141 # so recode() is used to ensure str objects are emitted.
141 # so recode() is used to ensure str objects are emitted.
142 return commit(author=self.recode(elt.get('author')),
142 return commit(author=self.recode(elt.get('author')),
143 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
143 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
144 desc=self.recode(desc).strip(),
144 desc=self.recode(desc).strip(),
145 parents=self.parents[rev])
145 parents=self.parents[rev])
146
146
147 def pull(self, rev):
147 def pull(self, rev):
148 output, status = self.run('pull', self.path, all=True,
148 output, status = self.run('pull', self.path, all=True,
149 match='hash %s' % rev,
149 match='hash %s' % rev,
150 no_test=True, no_posthook=True,
150 no_test=True, no_posthook=True,
151 external_merge='/bin/false',
151 external_merge='/bin/false',
152 repodir=self.tmppath)
152 repodir=self.tmppath)
153 if status:
153 if status:
154 if output.find('We have conflicts in') == -1:
154 if output.find('We have conflicts in') == -1:
155 self.checkexit(status, output)
155 self.checkexit(status, output)
156 output, status = self.run('revert', all=True, repodir=self.tmppath)
156 output, status = self.run('revert', all=True, repodir=self.tmppath)
157 self.checkexit(status, output)
157 self.checkexit(status, output)
158
158
159 def getchanges(self, rev, full):
159 def getchanges(self, rev, full):
160 if full:
160 if full:
161 raise error.Abort(_("convert from darcs do not support --full"))
161 raise error.Abort(_("convert from darcs does not support --full"))
162 copies = {}
162 copies = {}
163 changes = []
163 changes = []
164 man = None
164 man = None
165 for elt in self.changes[rev].find('summary').getchildren():
165 for elt in self.changes[rev].find('summary').getchildren():
166 if elt.tag in ('add_directory', 'remove_directory'):
166 if elt.tag in ('add_directory', 'remove_directory'):
167 continue
167 continue
168 if elt.tag == 'move':
168 if elt.tag == 'move':
169 if man is None:
169 if man is None:
170 man = self.manifest()
170 man = self.manifest()
171 source, dest = elt.get('from'), elt.get('to')
171 source, dest = elt.get('from'), elt.get('to')
172 if source in man:
172 if source in man:
173 # File move
173 # File move
174 changes.append((source, rev))
174 changes.append((source, rev))
175 changes.append((dest, rev))
175 changes.append((dest, rev))
176 copies[dest] = source
176 copies[dest] = source
177 else:
177 else:
178 # Directory move, deduce file moves from manifest
178 # Directory move, deduce file moves from manifest
179 source = source + '/'
179 source = source + '/'
180 for f in man:
180 for f in man:
181 if not f.startswith(source):
181 if not f.startswith(source):
182 continue
182 continue
183 fdest = dest + '/' + f[len(source):]
183 fdest = dest + '/' + f[len(source):]
184 changes.append((f, rev))
184 changes.append((f, rev))
185 changes.append((fdest, rev))
185 changes.append((fdest, rev))
186 copies[fdest] = f
186 copies[fdest] = f
187 else:
187 else:
188 changes.append((elt.text.strip(), rev))
188 changes.append((elt.text.strip(), rev))
189 self.pull(rev)
189 self.pull(rev)
190 self.lastrev = rev
190 self.lastrev = rev
191 return sorted(changes), copies, set()
191 return sorted(changes), copies, set()
192
192
193 def getfile(self, name, rev):
193 def getfile(self, name, rev):
194 if rev != self.lastrev:
194 if rev != self.lastrev:
195 raise error.Abort(_('internal calling inconsistency'))
195 raise error.Abort(_('internal calling inconsistency'))
196 path = os.path.join(self.tmppath, name)
196 path = os.path.join(self.tmppath, name)
197 try:
197 try:
198 data = util.readfile(path)
198 data = util.readfile(path)
199 mode = os.lstat(path).st_mode
199 mode = os.lstat(path).st_mode
200 except IOError as inst:
200 except IOError as inst:
201 if inst.errno == errno.ENOENT:
201 if inst.errno == errno.ENOENT:
202 return None, None
202 return None, None
203 raise
203 raise
204 mode = (mode & 0o111) and 'x' or ''
204 mode = (mode & 0o111) and 'x' or ''
205 return data, mode
205 return data, mode
206
206
207 def gettags(self):
207 def gettags(self):
208 return self.tags
208 return self.tags
@@ -1,416 +1,416 b''
1 # git.py - git support for the convert extension
1 # git.py - git support for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 import subprocess
9 import subprocess
10 from mercurial import util, config, error
10 from mercurial import util, config, error
11 from mercurial.node import hex, nullid
11 from mercurial.node import hex, nullid
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 from common import NoRepo, commit, converter_source, checktool
14 from common import NoRepo, commit, converter_source, checktool
15
15
16 class submodule(object):
16 class submodule(object):
17 def __init__(self, path, node, url):
17 def __init__(self, path, node, url):
18 self.path = path
18 self.path = path
19 self.node = node
19 self.node = node
20 self.url = url
20 self.url = url
21
21
22 def hgsub(self):
22 def hgsub(self):
23 return "%s = [git]%s" % (self.path, self.url)
23 return "%s = [git]%s" % (self.path, self.url)
24
24
25 def hgsubstate(self):
25 def hgsubstate(self):
26 return "%s %s" % (self.node, self.path)
26 return "%s %s" % (self.node, self.path)
27
27
28 class convert_git(converter_source):
28 class convert_git(converter_source):
29 # Windows does not support GIT_DIR= construct while other systems
29 # Windows does not support GIT_DIR= construct while other systems
30 # cannot remove environment variable. Just assume none have
30 # cannot remove environment variable. Just assume none have
31 # both issues.
31 # both issues.
32 if util.safehasattr(os, 'unsetenv'):
32 if util.safehasattr(os, 'unsetenv'):
33 def gitopen(self, s, err=None):
33 def gitopen(self, s, err=None):
34 prevgitdir = os.environ.get('GIT_DIR')
34 prevgitdir = os.environ.get('GIT_DIR')
35 os.environ['GIT_DIR'] = self.path
35 os.environ['GIT_DIR'] = self.path
36 try:
36 try:
37 if err == subprocess.PIPE:
37 if err == subprocess.PIPE:
38 (stdin, stdout, stderr) = util.popen3(s)
38 (stdin, stdout, stderr) = util.popen3(s)
39 return stdout
39 return stdout
40 elif err == subprocess.STDOUT:
40 elif err == subprocess.STDOUT:
41 return self.popen_with_stderr(s)
41 return self.popen_with_stderr(s)
42 else:
42 else:
43 return util.popen(s, 'rb')
43 return util.popen(s, 'rb')
44 finally:
44 finally:
45 if prevgitdir is None:
45 if prevgitdir is None:
46 del os.environ['GIT_DIR']
46 del os.environ['GIT_DIR']
47 else:
47 else:
48 os.environ['GIT_DIR'] = prevgitdir
48 os.environ['GIT_DIR'] = prevgitdir
49
49
50 def gitpipe(self, s):
50 def gitpipe(self, s):
51 prevgitdir = os.environ.get('GIT_DIR')
51 prevgitdir = os.environ.get('GIT_DIR')
52 os.environ['GIT_DIR'] = self.path
52 os.environ['GIT_DIR'] = self.path
53 try:
53 try:
54 return util.popen3(s)
54 return util.popen3(s)
55 finally:
55 finally:
56 if prevgitdir is None:
56 if prevgitdir is None:
57 del os.environ['GIT_DIR']
57 del os.environ['GIT_DIR']
58 else:
58 else:
59 os.environ['GIT_DIR'] = prevgitdir
59 os.environ['GIT_DIR'] = prevgitdir
60
60
61 else:
61 else:
62 def gitopen(self, s, err=None):
62 def gitopen(self, s, err=None):
63 if err == subprocess.PIPE:
63 if err == subprocess.PIPE:
64 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
64 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
65 return so
65 return so
66 elif err == subprocess.STDOUT:
66 elif err == subprocess.STDOUT:
67 return self.popen_with_stderr(s)
67 return self.popen_with_stderr(s)
68 else:
68 else:
69 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
69 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
70
70
71 def gitpipe(self, s):
71 def gitpipe(self, s):
72 return util.popen3('GIT_DIR=%s %s' % (self.path, s))
72 return util.popen3('GIT_DIR=%s %s' % (self.path, s))
73
73
74 def popen_with_stderr(self, s):
74 def popen_with_stderr(self, s):
75 p = subprocess.Popen(s, shell=True, bufsize=-1,
75 p = subprocess.Popen(s, shell=True, bufsize=-1,
76 close_fds=util.closefds,
76 close_fds=util.closefds,
77 stdin=subprocess.PIPE,
77 stdin=subprocess.PIPE,
78 stdout=subprocess.PIPE,
78 stdout=subprocess.PIPE,
79 stderr=subprocess.STDOUT,
79 stderr=subprocess.STDOUT,
80 universal_newlines=False,
80 universal_newlines=False,
81 env=None)
81 env=None)
82 return p.stdout
82 return p.stdout
83
83
84 def gitread(self, s):
84 def gitread(self, s):
85 fh = self.gitopen(s)
85 fh = self.gitopen(s)
86 data = fh.read()
86 data = fh.read()
87 return data, fh.close()
87 return data, fh.close()
88
88
89 def __init__(self, ui, path, revs=None):
89 def __init__(self, ui, path, revs=None):
90 super(convert_git, self).__init__(ui, path, revs=revs)
90 super(convert_git, self).__init__(ui, path, revs=revs)
91
91
92 if os.path.isdir(path + "/.git"):
92 if os.path.isdir(path + "/.git"):
93 path += "/.git"
93 path += "/.git"
94 if not os.path.exists(path + "/objects"):
94 if not os.path.exists(path + "/objects"):
95 raise NoRepo(_("%s does not look like a Git repository") % path)
95 raise NoRepo(_("%s does not look like a Git repository") % path)
96
96
97 # The default value (50) is based on the default for 'git diff'.
97 # The default value (50) is based on the default for 'git diff'.
98 similarity = ui.configint('convert', 'git.similarity', default=50)
98 similarity = ui.configint('convert', 'git.similarity', default=50)
99 if similarity < 0 or similarity > 100:
99 if similarity < 0 or similarity > 100:
100 raise error.Abort(_('similarity must be between 0 and 100'))
100 raise error.Abort(_('similarity must be between 0 and 100'))
101 if similarity > 0:
101 if similarity > 0:
102 self.simopt = '-C%d%%' % similarity
102 self.simopt = '-C%d%%' % similarity
103 findcopiesharder = ui.configbool('convert', 'git.findcopiesharder',
103 findcopiesharder = ui.configbool('convert', 'git.findcopiesharder',
104 False)
104 False)
105 if findcopiesharder:
105 if findcopiesharder:
106 self.simopt += ' --find-copies-harder'
106 self.simopt += ' --find-copies-harder'
107 else:
107 else:
108 self.simopt = ''
108 self.simopt = ''
109
109
110 checktool('git', 'git')
110 checktool('git', 'git')
111
111
112 self.path = path
112 self.path = path
113 self.submodules = []
113 self.submodules = []
114
114
115 self.catfilepipe = self.gitpipe('git cat-file --batch')
115 self.catfilepipe = self.gitpipe('git cat-file --batch')
116
116
117 def after(self):
117 def after(self):
118 for f in self.catfilepipe:
118 for f in self.catfilepipe:
119 f.close()
119 f.close()
120
120
121 def getheads(self):
121 def getheads(self):
122 if not self.revs:
122 if not self.revs:
123 heads, ret = self.gitread('git rev-parse --branches --remotes')
123 heads, ret = self.gitread('git rev-parse --branches --remotes')
124 heads = heads.splitlines()
124 heads = heads.splitlines()
125 if ret:
125 if ret:
126 raise error.Abort(_('cannot retrieve git heads'))
126 raise error.Abort(_('cannot retrieve git heads'))
127 else:
127 else:
128 heads = []
128 heads = []
129 for rev in self.revs:
129 for rev in self.revs:
130 rawhead, ret = self.gitread("git rev-parse --verify %s" % rev)
130 rawhead, ret = self.gitread("git rev-parse --verify %s" % rev)
131 heads.append(rawhead[:-1])
131 heads.append(rawhead[:-1])
132 if ret:
132 if ret:
133 raise error.Abort(_('cannot retrieve git head "%s"') % rev)
133 raise error.Abort(_('cannot retrieve git head "%s"') % rev)
134 return heads
134 return heads
135
135
136 def catfile(self, rev, type):
136 def catfile(self, rev, type):
137 if rev == hex(nullid):
137 if rev == hex(nullid):
138 raise IOError
138 raise IOError
139 self.catfilepipe[0].write(rev+'\n')
139 self.catfilepipe[0].write(rev+'\n')
140 self.catfilepipe[0].flush()
140 self.catfilepipe[0].flush()
141 info = self.catfilepipe[1].readline().split()
141 info = self.catfilepipe[1].readline().split()
142 if info[1] != type:
142 if info[1] != type:
143 raise error.Abort(_('cannot read %r object at %s') % (type, rev))
143 raise error.Abort(_('cannot read %r object at %s') % (type, rev))
144 size = int(info[2])
144 size = int(info[2])
145 data = self.catfilepipe[1].read(size)
145 data = self.catfilepipe[1].read(size)
146 if len(data) < size:
146 if len(data) < size:
147 raise error.Abort(_('cannot read %r object at %s: unexpected size')
147 raise error.Abort(_('cannot read %r object at %s: unexpected size')
148 % (type, rev))
148 % (type, rev))
149 # read the trailing newline
149 # read the trailing newline
150 self.catfilepipe[1].read(1)
150 self.catfilepipe[1].read(1)
151 return data
151 return data
152
152
153 def getfile(self, name, rev):
153 def getfile(self, name, rev):
154 if rev == hex(nullid):
154 if rev == hex(nullid):
155 return None, None
155 return None, None
156 if name == '.hgsub':
156 if name == '.hgsub':
157 data = '\n'.join([m.hgsub() for m in self.submoditer()])
157 data = '\n'.join([m.hgsub() for m in self.submoditer()])
158 mode = ''
158 mode = ''
159 elif name == '.hgsubstate':
159 elif name == '.hgsubstate':
160 data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
160 data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
161 mode = ''
161 mode = ''
162 else:
162 else:
163 data = self.catfile(rev, "blob")
163 data = self.catfile(rev, "blob")
164 mode = self.modecache[(name, rev)]
164 mode = self.modecache[(name, rev)]
165 return data, mode
165 return data, mode
166
166
167 def submoditer(self):
167 def submoditer(self):
168 null = hex(nullid)
168 null = hex(nullid)
169 for m in sorted(self.submodules, key=lambda p: p.path):
169 for m in sorted(self.submodules, key=lambda p: p.path):
170 if m.node != null:
170 if m.node != null:
171 yield m
171 yield m
172
172
173 def parsegitmodules(self, content):
173 def parsegitmodules(self, content):
174 """Parse the formatted .gitmodules file, example file format:
174 """Parse the formatted .gitmodules file, example file format:
175 [submodule "sub"]\n
175 [submodule "sub"]\n
176 \tpath = sub\n
176 \tpath = sub\n
177 \turl = git://giturl\n
177 \turl = git://giturl\n
178 """
178 """
179 self.submodules = []
179 self.submodules = []
180 c = config.config()
180 c = config.config()
181 # Each item in .gitmodules starts with whitespace that cant be parsed
181 # Each item in .gitmodules starts with whitespace that cant be parsed
182 c.parse('.gitmodules', '\n'.join(line.strip() for line in
182 c.parse('.gitmodules', '\n'.join(line.strip() for line in
183 content.split('\n')))
183 content.split('\n')))
184 for sec in c.sections():
184 for sec in c.sections():
185 s = c[sec]
185 s = c[sec]
186 if 'url' in s and 'path' in s:
186 if 'url' in s and 'path' in s:
187 self.submodules.append(submodule(s['path'], '', s['url']))
187 self.submodules.append(submodule(s['path'], '', s['url']))
188
188
189 def retrievegitmodules(self, version):
189 def retrievegitmodules(self, version):
190 modules, ret = self.gitread("git show %s:%s" % (version, '.gitmodules'))
190 modules, ret = self.gitread("git show %s:%s" % (version, '.gitmodules'))
191 if ret:
191 if ret:
192 # This can happen if a file is in the repo that has permissions
192 # This can happen if a file is in the repo that has permissions
193 # 160000, but there is no .gitmodules file.
193 # 160000, but there is no .gitmodules file.
194 self.ui.warn(_("warning: cannot read submodules config file in "
194 self.ui.warn(_("warning: cannot read submodules config file in "
195 "%s\n") % version)
195 "%s\n") % version)
196 return
196 return
197
197
198 try:
198 try:
199 self.parsegitmodules(modules)
199 self.parsegitmodules(modules)
200 except error.ParseError:
200 except error.ParseError:
201 self.ui.warn(_("warning: unable to parse .gitmodules in %s\n")
201 self.ui.warn(_("warning: unable to parse .gitmodules in %s\n")
202 % version)
202 % version)
203 return
203 return
204
204
205 for m in self.submodules:
205 for m in self.submodules:
206 node, ret = self.gitread("git rev-parse %s:%s" % (version, m.path))
206 node, ret = self.gitread("git rev-parse %s:%s" % (version, m.path))
207 if ret:
207 if ret:
208 continue
208 continue
209 m.node = node.strip()
209 m.node = node.strip()
210
210
211 def getchanges(self, version, full):
211 def getchanges(self, version, full):
212 if full:
212 if full:
213 raise error.Abort(_("convert from git do not support --full"))
213 raise error.Abort(_("convert from git does not support --full"))
214 self.modecache = {}
214 self.modecache = {}
215 fh = self.gitopen("git diff-tree -z --root -m -r %s %s" % (
215 fh = self.gitopen("git diff-tree -z --root -m -r %s %s" % (
216 self.simopt, version))
216 self.simopt, version))
217 changes = []
217 changes = []
218 copies = {}
218 copies = {}
219 seen = set()
219 seen = set()
220 entry = None
220 entry = None
221 subexists = [False]
221 subexists = [False]
222 subdeleted = [False]
222 subdeleted = [False]
223 difftree = fh.read().split('\x00')
223 difftree = fh.read().split('\x00')
224 lcount = len(difftree)
224 lcount = len(difftree)
225 i = 0
225 i = 0
226
226
227 skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules',
227 skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules',
228 False)
228 False)
229 def add(entry, f, isdest):
229 def add(entry, f, isdest):
230 seen.add(f)
230 seen.add(f)
231 h = entry[3]
231 h = entry[3]
232 p = (entry[1] == "100755")
232 p = (entry[1] == "100755")
233 s = (entry[1] == "120000")
233 s = (entry[1] == "120000")
234 renamesource = (not isdest and entry[4][0] == 'R')
234 renamesource = (not isdest and entry[4][0] == 'R')
235
235
236 if f == '.gitmodules':
236 if f == '.gitmodules':
237 if skipsubmodules:
237 if skipsubmodules:
238 return
238 return
239
239
240 subexists[0] = True
240 subexists[0] = True
241 if entry[4] == 'D' or renamesource:
241 if entry[4] == 'D' or renamesource:
242 subdeleted[0] = True
242 subdeleted[0] = True
243 changes.append(('.hgsub', hex(nullid)))
243 changes.append(('.hgsub', hex(nullid)))
244 else:
244 else:
245 changes.append(('.hgsub', ''))
245 changes.append(('.hgsub', ''))
246 elif entry[1] == '160000' or entry[0] == ':160000':
246 elif entry[1] == '160000' or entry[0] == ':160000':
247 if not skipsubmodules:
247 if not skipsubmodules:
248 subexists[0] = True
248 subexists[0] = True
249 else:
249 else:
250 if renamesource:
250 if renamesource:
251 h = hex(nullid)
251 h = hex(nullid)
252 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
252 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
253 changes.append((f, h))
253 changes.append((f, h))
254
254
255 while i < lcount:
255 while i < lcount:
256 l = difftree[i]
256 l = difftree[i]
257 i += 1
257 i += 1
258 if not entry:
258 if not entry:
259 if not l.startswith(':'):
259 if not l.startswith(':'):
260 continue
260 continue
261 entry = l.split()
261 entry = l.split()
262 continue
262 continue
263 f = l
263 f = l
264 if entry[4][0] == 'C':
264 if entry[4][0] == 'C':
265 copysrc = f
265 copysrc = f
266 copydest = difftree[i]
266 copydest = difftree[i]
267 i += 1
267 i += 1
268 f = copydest
268 f = copydest
269 copies[copydest] = copysrc
269 copies[copydest] = copysrc
270 if f not in seen:
270 if f not in seen:
271 add(entry, f, False)
271 add(entry, f, False)
272 # A file can be copied multiple times, or modified and copied
272 # A file can be copied multiple times, or modified and copied
273 # simultaneously. So f can be repeated even if fdest isn't.
273 # simultaneously. So f can be repeated even if fdest isn't.
274 if entry[4][0] == 'R':
274 if entry[4][0] == 'R':
275 # rename: next line is the destination
275 # rename: next line is the destination
276 fdest = difftree[i]
276 fdest = difftree[i]
277 i += 1
277 i += 1
278 if fdest not in seen:
278 if fdest not in seen:
279 add(entry, fdest, True)
279 add(entry, fdest, True)
280 # .gitmodules isn't imported at all, so it being copied to
280 # .gitmodules isn't imported at all, so it being copied to
281 # and fro doesn't really make sense
281 # and fro doesn't really make sense
282 if f != '.gitmodules' and fdest != '.gitmodules':
282 if f != '.gitmodules' and fdest != '.gitmodules':
283 copies[fdest] = f
283 copies[fdest] = f
284 entry = None
284 entry = None
285 if fh.close():
285 if fh.close():
286 raise error.Abort(_('cannot read changes in %s') % version)
286 raise error.Abort(_('cannot read changes in %s') % version)
287
287
288 if subexists[0]:
288 if subexists[0]:
289 if subdeleted[0]:
289 if subdeleted[0]:
290 changes.append(('.hgsubstate', hex(nullid)))
290 changes.append(('.hgsubstate', hex(nullid)))
291 else:
291 else:
292 self.retrievegitmodules(version)
292 self.retrievegitmodules(version)
293 changes.append(('.hgsubstate', ''))
293 changes.append(('.hgsubstate', ''))
294 return (changes, copies, set())
294 return (changes, copies, set())
295
295
296 def getcommit(self, version):
296 def getcommit(self, version):
297 c = self.catfile(version, "commit") # read the commit hash
297 c = self.catfile(version, "commit") # read the commit hash
298 end = c.find("\n\n")
298 end = c.find("\n\n")
299 message = c[end + 2:]
299 message = c[end + 2:]
300 message = self.recode(message)
300 message = self.recode(message)
301 l = c[:end].splitlines()
301 l = c[:end].splitlines()
302 parents = []
302 parents = []
303 author = committer = None
303 author = committer = None
304 for e in l[1:]:
304 for e in l[1:]:
305 n, v = e.split(" ", 1)
305 n, v = e.split(" ", 1)
306 if n == "author":
306 if n == "author":
307 p = v.split()
307 p = v.split()
308 tm, tz = p[-2:]
308 tm, tz = p[-2:]
309 author = " ".join(p[:-2])
309 author = " ".join(p[:-2])
310 if author[0] == "<": author = author[1:-1]
310 if author[0] == "<": author = author[1:-1]
311 author = self.recode(author)
311 author = self.recode(author)
312 if n == "committer":
312 if n == "committer":
313 p = v.split()
313 p = v.split()
314 tm, tz = p[-2:]
314 tm, tz = p[-2:]
315 committer = " ".join(p[:-2])
315 committer = " ".join(p[:-2])
316 if committer[0] == "<": committer = committer[1:-1]
316 if committer[0] == "<": committer = committer[1:-1]
317 committer = self.recode(committer)
317 committer = self.recode(committer)
318 if n == "parent":
318 if n == "parent":
319 parents.append(v)
319 parents.append(v)
320
320
321 if committer and committer != author:
321 if committer and committer != author:
322 message += "\ncommitter: %s\n" % committer
322 message += "\ncommitter: %s\n" % committer
323 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
323 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
324 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
324 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
325 date = tm + " " + str(tz)
325 date = tm + " " + str(tz)
326
326
327 c = commit(parents=parents, date=date, author=author, desc=message,
327 c = commit(parents=parents, date=date, author=author, desc=message,
328 rev=version)
328 rev=version)
329 return c
329 return c
330
330
331 def numcommits(self):
331 def numcommits(self):
332 return len([None for _ in self.gitopen('git rev-list --all')])
332 return len([None for _ in self.gitopen('git rev-list --all')])
333
333
334 def gettags(self):
334 def gettags(self):
335 tags = {}
335 tags = {}
336 alltags = {}
336 alltags = {}
337 fh = self.gitopen('git ls-remote --tags "%s"' % self.path,
337 fh = self.gitopen('git ls-remote --tags "%s"' % self.path,
338 err=subprocess.STDOUT)
338 err=subprocess.STDOUT)
339 prefix = 'refs/tags/'
339 prefix = 'refs/tags/'
340
340
341 # Build complete list of tags, both annotated and bare ones
341 # Build complete list of tags, both annotated and bare ones
342 for line in fh:
342 for line in fh:
343 line = line.strip()
343 line = line.strip()
344 if line.startswith("error:") or line.startswith("fatal:"):
344 if line.startswith("error:") or line.startswith("fatal:"):
345 raise error.Abort(_('cannot read tags from %s') % self.path)
345 raise error.Abort(_('cannot read tags from %s') % self.path)
346 node, tag = line.split(None, 1)
346 node, tag = line.split(None, 1)
347 if not tag.startswith(prefix):
347 if not tag.startswith(prefix):
348 continue
348 continue
349 alltags[tag[len(prefix):]] = node
349 alltags[tag[len(prefix):]] = node
350 if fh.close():
350 if fh.close():
351 raise error.Abort(_('cannot read tags from %s') % self.path)
351 raise error.Abort(_('cannot read tags from %s') % self.path)
352
352
353 # Filter out tag objects for annotated tag refs
353 # Filter out tag objects for annotated tag refs
354 for tag in alltags:
354 for tag in alltags:
355 if tag.endswith('^{}'):
355 if tag.endswith('^{}'):
356 tags[tag[:-3]] = alltags[tag]
356 tags[tag[:-3]] = alltags[tag]
357 else:
357 else:
358 if tag + '^{}' in alltags:
358 if tag + '^{}' in alltags:
359 continue
359 continue
360 else:
360 else:
361 tags[tag] = alltags[tag]
361 tags[tag] = alltags[tag]
362
362
363 return tags
363 return tags
364
364
365 def getchangedfiles(self, version, i):
365 def getchangedfiles(self, version, i):
366 changes = []
366 changes = []
367 if i is None:
367 if i is None:
368 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
368 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
369 for l in fh:
369 for l in fh:
370 if "\t" not in l:
370 if "\t" not in l:
371 continue
371 continue
372 m, f = l[:-1].split("\t")
372 m, f = l[:-1].split("\t")
373 changes.append(f)
373 changes.append(f)
374 else:
374 else:
375 fh = self.gitopen('git diff-tree --name-only --root -r %s '
375 fh = self.gitopen('git diff-tree --name-only --root -r %s '
376 '"%s^%s" --' % (version, version, i + 1))
376 '"%s^%s" --' % (version, version, i + 1))
377 changes = [f.rstrip('\n') for f in fh]
377 changes = [f.rstrip('\n') for f in fh]
378 if fh.close():
378 if fh.close():
379 raise error.Abort(_('cannot read changes in %s') % version)
379 raise error.Abort(_('cannot read changes in %s') % version)
380
380
381 return changes
381 return changes
382
382
383 def getbookmarks(self):
383 def getbookmarks(self):
384 bookmarks = {}
384 bookmarks = {}
385
385
386 # Handle local and remote branches
386 # Handle local and remote branches
387 remoteprefix = self.ui.config('convert', 'git.remoteprefix', 'remote')
387 remoteprefix = self.ui.config('convert', 'git.remoteprefix', 'remote')
388 reftypes = [
388 reftypes = [
389 # (git prefix, hg prefix)
389 # (git prefix, hg prefix)
390 ('refs/remotes/origin/', remoteprefix + '/'),
390 ('refs/remotes/origin/', remoteprefix + '/'),
391 ('refs/heads/', '')
391 ('refs/heads/', '')
392 ]
392 ]
393
393
394 exclude = set([
394 exclude = set([
395 'refs/remotes/origin/HEAD',
395 'refs/remotes/origin/HEAD',
396 ])
396 ])
397
397
398 try:
398 try:
399 fh = self.gitopen('git show-ref', err=subprocess.PIPE)
399 fh = self.gitopen('git show-ref', err=subprocess.PIPE)
400 for line in fh:
400 for line in fh:
401 line = line.strip()
401 line = line.strip()
402 rev, name = line.split(None, 1)
402 rev, name = line.split(None, 1)
403 # Process each type of branch
403 # Process each type of branch
404 for gitprefix, hgprefix in reftypes:
404 for gitprefix, hgprefix in reftypes:
405 if not name.startswith(gitprefix) or name in exclude:
405 if not name.startswith(gitprefix) or name in exclude:
406 continue
406 continue
407 name = '%s%s' % (hgprefix, name[len(gitprefix):])
407 name = '%s%s' % (hgprefix, name[len(gitprefix):])
408 bookmarks[name] = rev
408 bookmarks[name] = rev
409 except Exception:
409 except Exception:
410 pass
410 pass
411
411
412 return bookmarks
412 return bookmarks
413
413
414 def checkrevformat(self, revstr, mapname='splicemap'):
414 def checkrevformat(self, revstr, mapname='splicemap'):
415 """ git revision string is a 40 byte hex """
415 """ git revision string is a 40 byte hex """
416 self.checkhexformat(revstr, mapname)
416 self.checkhexformat(revstr, mapname)
@@ -1,342 +1,342 b''
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import encoding, util, error
11 from mercurial import encoding, util, error
12 import os, shutil, tempfile, stat
12 import os, shutil, tempfile, stat
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev(object):
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, revs=None):
30 def __init__(self, ui, path, revs=None):
31 super(gnuarch_source, self).__init__(ui, path, revs=revs)
31 super(gnuarch_source, self).__init__(ui, path, revs=revs)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 % path)
35 % path)
36
36
37 # Could use checktool, but we want to check for baz or tla.
37 # Could use checktool, but we want to check for baz or tla.
38 self.execmd = None
38 self.execmd = None
39 if util.findexe('baz'):
39 if util.findexe('baz'):
40 self.execmd = 'baz'
40 self.execmd = 'baz'
41 else:
41 else:
42 if util.findexe('tla'):
42 if util.findexe('tla'):
43 self.execmd = 'tla'
43 self.execmd = 'tla'
44 else:
44 else:
45 raise error.Abort(_('cannot find a GNU Arch tool'))
45 raise error.Abort(_('cannot find a GNU Arch tool'))
46
46
47 commandline.__init__(self, ui, self.execmd)
47 commandline.__init__(self, ui, self.execmd)
48
48
49 self.path = os.path.realpath(path)
49 self.path = os.path.realpath(path)
50 self.tmppath = None
50 self.tmppath = None
51
51
52 self.treeversion = None
52 self.treeversion = None
53 self.lastrev = None
53 self.lastrev = None
54 self.changes = {}
54 self.changes = {}
55 self.parents = {}
55 self.parents = {}
56 self.tags = {}
56 self.tags = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.encoding = encoding.encoding
58 self.encoding = encoding.encoding
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieving revisions for %s'
92 self.checkexit(status, 'failed retrieving revisions for %s'
93 % treeversion)
93 % treeversion)
94
94
95 # No new iteration unless a revision has a continuation-of header
95 # No new iteration unless a revision has a continuation-of header
96 treeversion = None
96 treeversion = None
97
97
98 for l in output:
98 for l in output:
99 rev = l.strip()
99 rev = l.strip()
100 self.changes[rev] = self.gnuarch_rev(rev)
100 self.changes[rev] = self.gnuarch_rev(rev)
101 self.parents[rev] = []
101 self.parents[rev] = []
102
102
103 # Read author, date and summary
103 # Read author, date and summary
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 if status:
105 if status:
106 catlog = self.run0('cat-archive-log', rev)
106 catlog = self.run0('cat-archive-log', rev)
107 self._parsecatlog(catlog, rev)
107 self._parsecatlog(catlog, rev)
108
108
109 # Populate the parents map
109 # Populate the parents map
110 self.parents[child].append(rev)
110 self.parents[child].append(rev)
111
111
112 # Keep track of the current revision as the child of the next
112 # Keep track of the current revision as the child of the next
113 # revision scanned
113 # revision scanned
114 child = rev
114 child = rev
115
115
116 # Check if we have to follow the usual incremental history
116 # Check if we have to follow the usual incremental history
117 # or if we have to 'jump' to a different treeversion given
117 # or if we have to 'jump' to a different treeversion given
118 # by the continuation-of header.
118 # by the continuation-of header.
119 if self.changes[rev].continuationof:
119 if self.changes[rev].continuationof:
120 treeversion = '--'.join(
120 treeversion = '--'.join(
121 self.changes[rev].continuationof.split('--')[:-1])
121 self.changes[rev].continuationof.split('--')[:-1])
122 break
122 break
123
123
124 # If we reached a base-0 revision w/o any continuation-of
124 # If we reached a base-0 revision w/o any continuation-of
125 # header, it means the tree history ends here.
125 # header, it means the tree history ends here.
126 if rev[-6:] == 'base-0':
126 if rev[-6:] == 'base-0':
127 break
127 break
128
128
129 def after(self):
129 def after(self):
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev != self.lastrev:
137 if rev != self.lastrev:
138 raise error.Abort(_('internal calling inconsistency'))
138 raise error.Abort(_('internal calling inconsistency'))
139
139
140 if not os.path.lexists(os.path.join(self.tmppath, name)):
140 if not os.path.lexists(os.path.join(self.tmppath, name)):
141 return None, None
141 return None, None
142
142
143 return self._getfile(name, rev)
143 return self._getfile(name, rev)
144
144
145 def getchanges(self, rev, full):
145 def getchanges(self, rev, full):
146 if full:
146 if full:
147 raise error.Abort(_("convert from arch do not support --full"))
147 raise error.Abort(_("convert from arch does not support --full"))
148 self._update(rev)
148 self._update(rev)
149 changes = []
149 changes = []
150 copies = {}
150 copies = {}
151
151
152 for f in self.changes[rev].add_files:
152 for f in self.changes[rev].add_files:
153 changes.append((f, rev))
153 changes.append((f, rev))
154
154
155 for f in self.changes[rev].mod_files:
155 for f in self.changes[rev].mod_files:
156 changes.append((f, rev))
156 changes.append((f, rev))
157
157
158 for f in self.changes[rev].del_files:
158 for f in self.changes[rev].del_files:
159 changes.append((f, rev))
159 changes.append((f, rev))
160
160
161 for src in self.changes[rev].ren_files:
161 for src in self.changes[rev].ren_files:
162 to = self.changes[rev].ren_files[src]
162 to = self.changes[rev].ren_files[src]
163 changes.append((src, rev))
163 changes.append((src, rev))
164 changes.append((to, rev))
164 changes.append((to, rev))
165 copies[to] = src
165 copies[to] = src
166
166
167 for src in self.changes[rev].ren_dirs:
167 for src in self.changes[rev].ren_dirs:
168 to = self.changes[rev].ren_dirs[src]
168 to = self.changes[rev].ren_dirs[src]
169 chgs, cps = self._rendirchanges(src, to)
169 chgs, cps = self._rendirchanges(src, to)
170 changes += [(f, rev) for f in chgs]
170 changes += [(f, rev) for f in chgs]
171 copies.update(cps)
171 copies.update(cps)
172
172
173 self.lastrev = rev
173 self.lastrev = rev
174 return sorted(set(changes)), copies, set()
174 return sorted(set(changes)), copies, set()
175
175
176 def getcommit(self, rev):
176 def getcommit(self, rev):
177 changes = self.changes[rev]
177 changes = self.changes[rev]
178 return commit(author=changes.author, date=changes.date,
178 return commit(author=changes.author, date=changes.date,
179 desc=changes.summary, parents=self.parents[rev], rev=rev)
179 desc=changes.summary, parents=self.parents[rev], rev=rev)
180
180
181 def gettags(self):
181 def gettags(self):
182 return self.tags
182 return self.tags
183
183
184 def _execute(self, cmd, *args, **kwargs):
184 def _execute(self, cmd, *args, **kwargs):
185 cmdline = [self.execmd, cmd]
185 cmdline = [self.execmd, cmd]
186 cmdline += args
186 cmdline += args
187 cmdline = [util.shellquote(arg) for arg in cmdline]
187 cmdline = [util.shellquote(arg) for arg in cmdline]
188 cmdline += ['>', os.devnull, '2>', os.devnull]
188 cmdline += ['>', os.devnull, '2>', os.devnull]
189 cmdline = util.quotecommand(' '.join(cmdline))
189 cmdline = util.quotecommand(' '.join(cmdline))
190 self.ui.debug(cmdline, '\n')
190 self.ui.debug(cmdline, '\n')
191 return os.system(cmdline)
191 return os.system(cmdline)
192
192
193 def _update(self, rev):
193 def _update(self, rev):
194 self.ui.debug('applying revision %s...\n' % rev)
194 self.ui.debug('applying revision %s...\n' % rev)
195 changeset, status = self.runlines('replay', '-d', self.tmppath,
195 changeset, status = self.runlines('replay', '-d', self.tmppath,
196 rev)
196 rev)
197 if status:
197 if status:
198 # Something went wrong while merging (baz or tla
198 # Something went wrong while merging (baz or tla
199 # issue?), get latest revision and try from there
199 # issue?), get latest revision and try from there
200 shutil.rmtree(self.tmppath, ignore_errors=True)
200 shutil.rmtree(self.tmppath, ignore_errors=True)
201 self._obtainrevision(rev)
201 self._obtainrevision(rev)
202 else:
202 else:
203 old_rev = self.parents[rev][0]
203 old_rev = self.parents[rev][0]
204 self.ui.debug('computing changeset between %s and %s...\n'
204 self.ui.debug('computing changeset between %s and %s...\n'
205 % (old_rev, rev))
205 % (old_rev, rev))
206 self._parsechangeset(changeset, rev)
206 self._parsechangeset(changeset, rev)
207
207
208 def _getfile(self, name, rev):
208 def _getfile(self, name, rev):
209 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
209 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
210 if stat.S_ISLNK(mode):
210 if stat.S_ISLNK(mode):
211 data = os.readlink(os.path.join(self.tmppath, name))
211 data = os.readlink(os.path.join(self.tmppath, name))
212 if mode:
212 if mode:
213 mode = 'l'
213 mode = 'l'
214 else:
214 else:
215 mode = ''
215 mode = ''
216 else:
216 else:
217 data = open(os.path.join(self.tmppath, name), 'rb').read()
217 data = open(os.path.join(self.tmppath, name), 'rb').read()
218 mode = (mode & 0o111) and 'x' or ''
218 mode = (mode & 0o111) and 'x' or ''
219 return data, mode
219 return data, mode
220
220
221 def _exclude(self, name):
221 def _exclude(self, name):
222 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
222 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
223 for exc in exclude:
223 for exc in exclude:
224 if name.find(exc) != -1:
224 if name.find(exc) != -1:
225 return True
225 return True
226 return False
226 return False
227
227
228 def _readcontents(self, path):
228 def _readcontents(self, path):
229 files = []
229 files = []
230 contents = os.listdir(path)
230 contents = os.listdir(path)
231 while len(contents) > 0:
231 while len(contents) > 0:
232 c = contents.pop()
232 c = contents.pop()
233 p = os.path.join(path, c)
233 p = os.path.join(path, c)
234 # os.walk could be used, but here we avoid internal GNU
234 # os.walk could be used, but here we avoid internal GNU
235 # Arch files and directories, thus saving a lot time.
235 # Arch files and directories, thus saving a lot time.
236 if not self._exclude(p):
236 if not self._exclude(p):
237 if os.path.isdir(p):
237 if os.path.isdir(p):
238 contents += [os.path.join(c, f) for f in os.listdir(p)]
238 contents += [os.path.join(c, f) for f in os.listdir(p)]
239 else:
239 else:
240 files.append(c)
240 files.append(c)
241 return files
241 return files
242
242
243 def _rendirchanges(self, src, dest):
243 def _rendirchanges(self, src, dest):
244 changes = []
244 changes = []
245 copies = {}
245 copies = {}
246 files = self._readcontents(os.path.join(self.tmppath, dest))
246 files = self._readcontents(os.path.join(self.tmppath, dest))
247 for f in files:
247 for f in files:
248 s = os.path.join(src, f)
248 s = os.path.join(src, f)
249 d = os.path.join(dest, f)
249 d = os.path.join(dest, f)
250 changes.append(s)
250 changes.append(s)
251 changes.append(d)
251 changes.append(d)
252 copies[d] = s
252 copies[d] = s
253 return changes, copies
253 return changes, copies
254
254
255 def _obtainrevision(self, rev):
255 def _obtainrevision(self, rev):
256 self.ui.debug('obtaining revision %s...\n' % rev)
256 self.ui.debug('obtaining revision %s...\n' % rev)
257 output = self._execute('get', rev, self.tmppath)
257 output = self._execute('get', rev, self.tmppath)
258 self.checkexit(output)
258 self.checkexit(output)
259 self.ui.debug('analyzing revision %s...\n' % rev)
259 self.ui.debug('analyzing revision %s...\n' % rev)
260 files = self._readcontents(self.tmppath)
260 files = self._readcontents(self.tmppath)
261 self.changes[rev].add_files += files
261 self.changes[rev].add_files += files
262
262
263 def _stripbasepath(self, path):
263 def _stripbasepath(self, path):
264 if path.startswith('./'):
264 if path.startswith('./'):
265 return path[2:]
265 return path[2:]
266 return path
266 return path
267
267
268 def _parsecatlog(self, data, rev):
268 def _parsecatlog(self, data, rev):
269 try:
269 try:
270 catlog = self.catlogparser.parsestr(data)
270 catlog = self.catlogparser.parsestr(data)
271
271
272 # Commit date
272 # Commit date
273 self.changes[rev].date = util.datestr(
273 self.changes[rev].date = util.datestr(
274 util.strdate(catlog['Standard-date'],
274 util.strdate(catlog['Standard-date'],
275 '%Y-%m-%d %H:%M:%S'))
275 '%Y-%m-%d %H:%M:%S'))
276
276
277 # Commit author
277 # Commit author
278 self.changes[rev].author = self.recode(catlog['Creator'])
278 self.changes[rev].author = self.recode(catlog['Creator'])
279
279
280 # Commit description
280 # Commit description
281 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
281 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
282 catlog.get_payload()))
282 catlog.get_payload()))
283 self.changes[rev].summary = self.recode(self.changes[rev].summary)
283 self.changes[rev].summary = self.recode(self.changes[rev].summary)
284
284
285 # Commit revision origin when dealing with a branch or tag
285 # Commit revision origin when dealing with a branch or tag
286 if 'Continuation-of' in catlog:
286 if 'Continuation-of' in catlog:
287 self.changes[rev].continuationof = self.recode(
287 self.changes[rev].continuationof = self.recode(
288 catlog['Continuation-of'])
288 catlog['Continuation-of'])
289 except Exception:
289 except Exception:
290 raise error.Abort(_('could not parse cat-log of %s') % rev)
290 raise error.Abort(_('could not parse cat-log of %s') % rev)
291
291
292 def _parsechangeset(self, data, rev):
292 def _parsechangeset(self, data, rev):
293 for l in data:
293 for l in data:
294 l = l.strip()
294 l = l.strip()
295 # Added file (ignore added directory)
295 # Added file (ignore added directory)
296 if l.startswith('A') and not l.startswith('A/'):
296 if l.startswith('A') and not l.startswith('A/'):
297 file = self._stripbasepath(l[1:].strip())
297 file = self._stripbasepath(l[1:].strip())
298 if not self._exclude(file):
298 if not self._exclude(file):
299 self.changes[rev].add_files.append(file)
299 self.changes[rev].add_files.append(file)
300 # Deleted file (ignore deleted directory)
300 # Deleted file (ignore deleted directory)
301 elif l.startswith('D') and not l.startswith('D/'):
301 elif l.startswith('D') and not l.startswith('D/'):
302 file = self._stripbasepath(l[1:].strip())
302 file = self._stripbasepath(l[1:].strip())
303 if not self._exclude(file):
303 if not self._exclude(file):
304 self.changes[rev].del_files.append(file)
304 self.changes[rev].del_files.append(file)
305 # Modified binary file
305 # Modified binary file
306 elif l.startswith('Mb'):
306 elif l.startswith('Mb'):
307 file = self._stripbasepath(l[2:].strip())
307 file = self._stripbasepath(l[2:].strip())
308 if not self._exclude(file):
308 if not self._exclude(file):
309 self.changes[rev].mod_files.append(file)
309 self.changes[rev].mod_files.append(file)
310 # Modified link
310 # Modified link
311 elif l.startswith('M->'):
311 elif l.startswith('M->'):
312 file = self._stripbasepath(l[3:].strip())
312 file = self._stripbasepath(l[3:].strip())
313 if not self._exclude(file):
313 if not self._exclude(file):
314 self.changes[rev].mod_files.append(file)
314 self.changes[rev].mod_files.append(file)
315 # Modified file
315 # Modified file
316 elif l.startswith('M'):
316 elif l.startswith('M'):
317 file = self._stripbasepath(l[1:].strip())
317 file = self._stripbasepath(l[1:].strip())
318 if not self._exclude(file):
318 if not self._exclude(file):
319 self.changes[rev].mod_files.append(file)
319 self.changes[rev].mod_files.append(file)
320 # Renamed file (or link)
320 # Renamed file (or link)
321 elif l.startswith('=>'):
321 elif l.startswith('=>'):
322 files = l[2:].strip().split(' ')
322 files = l[2:].strip().split(' ')
323 if len(files) == 1:
323 if len(files) == 1:
324 files = l[2:].strip().split('\t')
324 files = l[2:].strip().split('\t')
325 src = self._stripbasepath(files[0])
325 src = self._stripbasepath(files[0])
326 dst = self._stripbasepath(files[1])
326 dst = self._stripbasepath(files[1])
327 if not self._exclude(src) and not self._exclude(dst):
327 if not self._exclude(src) and not self._exclude(dst):
328 self.changes[rev].ren_files[src] = dst
328 self.changes[rev].ren_files[src] = dst
329 # Conversion from file to link or from link to file (modified)
329 # Conversion from file to link or from link to file (modified)
330 elif l.startswith('ch'):
330 elif l.startswith('ch'):
331 file = self._stripbasepath(l[2:].strip())
331 file = self._stripbasepath(l[2:].strip())
332 if not self._exclude(file):
332 if not self._exclude(file):
333 self.changes[rev].mod_files.append(file)
333 self.changes[rev].mod_files.append(file)
334 # Renamed directory
334 # Renamed directory
335 elif l.startswith('/>'):
335 elif l.startswith('/>'):
336 dirs = l[2:].strip().split(' ')
336 dirs = l[2:].strip().split(' ')
337 if len(dirs) == 1:
337 if len(dirs) == 1:
338 dirs = l[2:].strip().split('\t')
338 dirs = l[2:].strip().split('\t')
339 src = self._stripbasepath(dirs[0])
339 src = self._stripbasepath(dirs[0])
340 dst = self._stripbasepath(dirs[1])
340 dst = self._stripbasepath(dirs[1])
341 if not self._exclude(src) and not self._exclude(dst):
341 if not self._exclude(src) and not self._exclude(dst):
342 self.changes[rev].ren_dirs[src] = dst
342 self.changes[rev].ren_dirs[src] = dst
@@ -1,364 +1,365 b''
1 # monotone.py - monotone support for the convert extension
1 # monotone.py - monotone support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
4 # others
4 # others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os, re
9 import os, re
10 from mercurial import util, error
10 from mercurial import util, error
11 from common import NoRepo, commit, converter_source, checktool
11 from common import NoRepo, commit, converter_source, checktool
12 from common import commandline
12 from common import commandline
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 class monotone_source(converter_source, commandline):
15 class monotone_source(converter_source, commandline):
16 def __init__(self, ui, path=None, revs=None):
16 def __init__(self, ui, path=None, revs=None):
17 converter_source.__init__(self, ui, path, revs)
17 converter_source.__init__(self, ui, path, revs)
18 if revs and len(revs) > 1:
18 if revs and len(revs) > 1:
19 raise error.Abort(_('monotone source does not support specifying '
19 raise error.Abort(_('monotone source does not support specifying '
20 'multiple revs'))
20 'multiple revs'))
21 commandline.__init__(self, ui, 'mtn')
21 commandline.__init__(self, ui, 'mtn')
22
22
23 self.ui = ui
23 self.ui = ui
24 self.path = path
24 self.path = path
25 self.automatestdio = False
25 self.automatestdio = False
26 self.revs = revs
26 self.revs = revs
27
27
28 norepo = NoRepo(_("%s does not look like a monotone repository")
28 norepo = NoRepo(_("%s does not look like a monotone repository")
29 % path)
29 % path)
30 if not os.path.exists(os.path.join(path, '_MTN')):
30 if not os.path.exists(os.path.join(path, '_MTN')):
31 # Could be a monotone repository (SQLite db file)
31 # Could be a monotone repository (SQLite db file)
32 try:
32 try:
33 f = file(path, 'rb')
33 f = file(path, 'rb')
34 header = f.read(16)
34 header = f.read(16)
35 f.close()
35 f.close()
36 except IOError:
36 except IOError:
37 header = ''
37 header = ''
38 if header != 'SQLite format 3\x00':
38 if header != 'SQLite format 3\x00':
39 raise norepo
39 raise norepo
40
40
41 # regular expressions for parsing monotone output
41 # regular expressions for parsing monotone output
42 space = r'\s*'
42 space = r'\s*'
43 name = r'\s+"((?:\\"|[^"])*)"\s*'
43 name = r'\s+"((?:\\"|[^"])*)"\s*'
44 value = name
44 value = name
45 revision = r'\s+\[(\w+)\]\s*'
45 revision = r'\s+\[(\w+)\]\s*'
46 lines = r'(?:.|\n)+'
46 lines = r'(?:.|\n)+'
47
47
48 self.dir_re = re.compile(space + "dir" + name)
48 self.dir_re = re.compile(space + "dir" + name)
49 self.file_re = re.compile(space + "file" + name +
49 self.file_re = re.compile(space + "file" + name +
50 "content" + revision)
50 "content" + revision)
51 self.add_file_re = re.compile(space + "add_file" + name +
51 self.add_file_re = re.compile(space + "add_file" + name +
52 "content" + revision)
52 "content" + revision)
53 self.patch_re = re.compile(space + "patch" + name +
53 self.patch_re = re.compile(space + "patch" + name +
54 "from" + revision + "to" + revision)
54 "from" + revision + "to" + revision)
55 self.rename_re = re.compile(space + "rename" + name + "to" + name)
55 self.rename_re = re.compile(space + "rename" + name + "to" + name)
56 self.delete_re = re.compile(space + "delete" + name)
56 self.delete_re = re.compile(space + "delete" + name)
57 self.tag_re = re.compile(space + "tag" + name + "revision" +
57 self.tag_re = re.compile(space + "tag" + name + "revision" +
58 revision)
58 revision)
59 self.cert_re = re.compile(lines + space + "name" + name +
59 self.cert_re = re.compile(lines + space + "name" + name +
60 "value" + value)
60 "value" + value)
61
61
62 attr = space + "file" + lines + space + "attr" + space
62 attr = space + "file" + lines + space + "attr" + space
63 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
63 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
64 space + '"true"')
64 space + '"true"')
65
65
66 # cached data
66 # cached data
67 self.manifest_rev = None
67 self.manifest_rev = None
68 self.manifest = None
68 self.manifest = None
69 self.files = None
69 self.files = None
70 self.dirs = None
70 self.dirs = None
71
71
72 checktool('mtn', abort=False)
72 checktool('mtn', abort=False)
73
73
74 def mtnrun(self, *args, **kwargs):
74 def mtnrun(self, *args, **kwargs):
75 if self.automatestdio:
75 if self.automatestdio:
76 return self.mtnrunstdio(*args, **kwargs)
76 return self.mtnrunstdio(*args, **kwargs)
77 else:
77 else:
78 return self.mtnrunsingle(*args, **kwargs)
78 return self.mtnrunsingle(*args, **kwargs)
79
79
80 def mtnrunsingle(self, *args, **kwargs):
80 def mtnrunsingle(self, *args, **kwargs):
81 kwargs['d'] = self.path
81 kwargs['d'] = self.path
82 return self.run0('automate', *args, **kwargs)
82 return self.run0('automate', *args, **kwargs)
83
83
84 def mtnrunstdio(self, *args, **kwargs):
84 def mtnrunstdio(self, *args, **kwargs):
85 # Prepare the command in automate stdio format
85 # Prepare the command in automate stdio format
86 command = []
86 command = []
87 for k, v in kwargs.iteritems():
87 for k, v in kwargs.iteritems():
88 command.append("%s:%s" % (len(k), k))
88 command.append("%s:%s" % (len(k), k))
89 if v:
89 if v:
90 command.append("%s:%s" % (len(v), v))
90 command.append("%s:%s" % (len(v), v))
91 if command:
91 if command:
92 command.insert(0, 'o')
92 command.insert(0, 'o')
93 command.append('e')
93 command.append('e')
94
94
95 command.append('l')
95 command.append('l')
96 for arg in args:
96 for arg in args:
97 command += "%s:%s" % (len(arg), arg)
97 command += "%s:%s" % (len(arg), arg)
98 command.append('e')
98 command.append('e')
99 command = ''.join(command)
99 command = ''.join(command)
100
100
101 self.ui.debug("mtn: sending '%s'\n" % command)
101 self.ui.debug("mtn: sending '%s'\n" % command)
102 self.mtnwritefp.write(command)
102 self.mtnwritefp.write(command)
103 self.mtnwritefp.flush()
103 self.mtnwritefp.flush()
104
104
105 return self.mtnstdioreadcommandoutput(command)
105 return self.mtnstdioreadcommandoutput(command)
106
106
107 def mtnstdioreadpacket(self):
107 def mtnstdioreadpacket(self):
108 read = None
108 read = None
109 commandnbr = ''
109 commandnbr = ''
110 while read != ':':
110 while read != ':':
111 read = self.mtnreadfp.read(1)
111 read = self.mtnreadfp.read(1)
112 if not read:
112 if not read:
113 raise error.Abort(_('bad mtn packet - no end of commandnbr'))
113 raise error.Abort(_('bad mtn packet - no end of commandnbr'))
114 commandnbr += read
114 commandnbr += read
115 commandnbr = commandnbr[:-1]
115 commandnbr = commandnbr[:-1]
116
116
117 stream = self.mtnreadfp.read(1)
117 stream = self.mtnreadfp.read(1)
118 if stream not in 'mewptl':
118 if stream not in 'mewptl':
119 raise error.Abort(_('bad mtn packet - bad stream type %s') % stream)
119 raise error.Abort(_('bad mtn packet - bad stream type %s') % stream)
120
120
121 read = self.mtnreadfp.read(1)
121 read = self.mtnreadfp.read(1)
122 if read != ':':
122 if read != ':':
123 raise error.Abort(_('bad mtn packet - no divider before size'))
123 raise error.Abort(_('bad mtn packet - no divider before size'))
124
124
125 read = None
125 read = None
126 lengthstr = ''
126 lengthstr = ''
127 while read != ':':
127 while read != ':':
128 read = self.mtnreadfp.read(1)
128 read = self.mtnreadfp.read(1)
129 if not read:
129 if not read:
130 raise error.Abort(_('bad mtn packet - no end of packet size'))
130 raise error.Abort(_('bad mtn packet - no end of packet size'))
131 lengthstr += read
131 lengthstr += read
132 try:
132 try:
133 length = long(lengthstr[:-1])
133 length = long(lengthstr[:-1])
134 except TypeError:
134 except TypeError:
135 raise error.Abort(_('bad mtn packet - bad packet size %s')
135 raise error.Abort(_('bad mtn packet - bad packet size %s')
136 % lengthstr)
136 % lengthstr)
137
137
138 read = self.mtnreadfp.read(length)
138 read = self.mtnreadfp.read(length)
139 if len(read) != length:
139 if len(read) != length:
140 raise error.Abort(_("bad mtn packet - unable to read full packet "
140 raise error.Abort(_("bad mtn packet - unable to read full packet "
141 "read %s of %s") % (len(read), length))
141 "read %s of %s") % (len(read), length))
142
142
143 return (commandnbr, stream, length, read)
143 return (commandnbr, stream, length, read)
144
144
145 def mtnstdioreadcommandoutput(self, command):
145 def mtnstdioreadcommandoutput(self, command):
146 retval = []
146 retval = []
147 while True:
147 while True:
148 commandnbr, stream, length, output = self.mtnstdioreadpacket()
148 commandnbr, stream, length, output = self.mtnstdioreadpacket()
149 self.ui.debug('mtn: read packet %s:%s:%s\n' %
149 self.ui.debug('mtn: read packet %s:%s:%s\n' %
150 (commandnbr, stream, length))
150 (commandnbr, stream, length))
151
151
152 if stream == 'l':
152 if stream == 'l':
153 # End of command
153 # End of command
154 if output != '0':
154 if output != '0':
155 raise error.Abort(_("mtn command '%s' returned %s") %
155 raise error.Abort(_("mtn command '%s' returned %s") %
156 (command, output))
156 (command, output))
157 break
157 break
158 elif stream in 'ew':
158 elif stream in 'ew':
159 # Error, warning output
159 # Error, warning output
160 self.ui.warn(_('%s error:\n') % self.command)
160 self.ui.warn(_('%s error:\n') % self.command)
161 self.ui.warn(output)
161 self.ui.warn(output)
162 elif stream == 'p':
162 elif stream == 'p':
163 # Progress messages
163 # Progress messages
164 self.ui.debug('mtn: ' + output)
164 self.ui.debug('mtn: ' + output)
165 elif stream == 'm':
165 elif stream == 'm':
166 # Main stream - command output
166 # Main stream - command output
167 retval.append(output)
167 retval.append(output)
168
168
169 return ''.join(retval)
169 return ''.join(retval)
170
170
171 def mtnloadmanifest(self, rev):
171 def mtnloadmanifest(self, rev):
172 if self.manifest_rev == rev:
172 if self.manifest_rev == rev:
173 return
173 return
174 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
174 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
175 self.manifest_rev = rev
175 self.manifest_rev = rev
176 self.files = {}
176 self.files = {}
177 self.dirs = {}
177 self.dirs = {}
178
178
179 for e in self.manifest:
179 for e in self.manifest:
180 m = self.file_re.match(e)
180 m = self.file_re.match(e)
181 if m:
181 if m:
182 attr = ""
182 attr = ""
183 name = m.group(1)
183 name = m.group(1)
184 node = m.group(2)
184 node = m.group(2)
185 if self.attr_execute_re.match(e):
185 if self.attr_execute_re.match(e):
186 attr += "x"
186 attr += "x"
187 self.files[name] = (node, attr)
187 self.files[name] = (node, attr)
188 m = self.dir_re.match(e)
188 m = self.dir_re.match(e)
189 if m:
189 if m:
190 self.dirs[m.group(1)] = True
190 self.dirs[m.group(1)] = True
191
191
192 def mtnisfile(self, name, rev):
192 def mtnisfile(self, name, rev):
193 # a non-file could be a directory or a deleted or renamed file
193 # a non-file could be a directory or a deleted or renamed file
194 self.mtnloadmanifest(rev)
194 self.mtnloadmanifest(rev)
195 return name in self.files
195 return name in self.files
196
196
197 def mtnisdir(self, name, rev):
197 def mtnisdir(self, name, rev):
198 self.mtnloadmanifest(rev)
198 self.mtnloadmanifest(rev)
199 return name in self.dirs
199 return name in self.dirs
200
200
201 def mtngetcerts(self, rev):
201 def mtngetcerts(self, rev):
202 certs = {"author":"<missing>", "date":"<missing>",
202 certs = {"author":"<missing>", "date":"<missing>",
203 "changelog":"<missing>", "branch":"<missing>"}
203 "changelog":"<missing>", "branch":"<missing>"}
204 certlist = self.mtnrun("certs", rev)
204 certlist = self.mtnrun("certs", rev)
205 # mtn < 0.45:
205 # mtn < 0.45:
206 # key "test@selenic.com"
206 # key "test@selenic.com"
207 # mtn >= 0.45:
207 # mtn >= 0.45:
208 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
208 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
209 certlist = re.split('\n\n key ["\[]', certlist)
209 certlist = re.split('\n\n key ["\[]', certlist)
210 for e in certlist:
210 for e in certlist:
211 m = self.cert_re.match(e)
211 m = self.cert_re.match(e)
212 if m:
212 if m:
213 name, value = m.groups()
213 name, value = m.groups()
214 value = value.replace(r'\"', '"')
214 value = value.replace(r'\"', '"')
215 value = value.replace(r'\\', '\\')
215 value = value.replace(r'\\', '\\')
216 certs[name] = value
216 certs[name] = value
217 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
217 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
218 # and all times are stored in UTC
218 # and all times are stored in UTC
219 certs["date"] = certs["date"].split('.')[0] + " UTC"
219 certs["date"] = certs["date"].split('.')[0] + " UTC"
220 return certs
220 return certs
221
221
222 # implement the converter_source interface:
222 # implement the converter_source interface:
223
223
224 def getheads(self):
224 def getheads(self):
225 if not self.revs:
225 if not self.revs:
226 return self.mtnrun("leaves").splitlines()
226 return self.mtnrun("leaves").splitlines()
227 else:
227 else:
228 return self.revs
228 return self.revs
229
229
230 def getchanges(self, rev, full):
230 def getchanges(self, rev, full):
231 if full:
231 if full:
232 raise error.Abort(_("convert from monotone do not support --full"))
232 raise error.Abort(_("convert from monotone does not support "
233 "--full"))
233 revision = self.mtnrun("get_revision", rev).split("\n\n")
234 revision = self.mtnrun("get_revision", rev).split("\n\n")
234 files = {}
235 files = {}
235 ignoremove = {}
236 ignoremove = {}
236 renameddirs = []
237 renameddirs = []
237 copies = {}
238 copies = {}
238 for e in revision:
239 for e in revision:
239 m = self.add_file_re.match(e)
240 m = self.add_file_re.match(e)
240 if m:
241 if m:
241 files[m.group(1)] = rev
242 files[m.group(1)] = rev
242 ignoremove[m.group(1)] = rev
243 ignoremove[m.group(1)] = rev
243 m = self.patch_re.match(e)
244 m = self.patch_re.match(e)
244 if m:
245 if m:
245 files[m.group(1)] = rev
246 files[m.group(1)] = rev
246 # Delete/rename is handled later when the convert engine
247 # Delete/rename is handled later when the convert engine
247 # discovers an IOError exception from getfile,
248 # discovers an IOError exception from getfile,
248 # but only if we add the "from" file to the list of changes.
249 # but only if we add the "from" file to the list of changes.
249 m = self.delete_re.match(e)
250 m = self.delete_re.match(e)
250 if m:
251 if m:
251 files[m.group(1)] = rev
252 files[m.group(1)] = rev
252 m = self.rename_re.match(e)
253 m = self.rename_re.match(e)
253 if m:
254 if m:
254 toname = m.group(2)
255 toname = m.group(2)
255 fromname = m.group(1)
256 fromname = m.group(1)
256 if self.mtnisfile(toname, rev):
257 if self.mtnisfile(toname, rev):
257 ignoremove[toname] = 1
258 ignoremove[toname] = 1
258 copies[toname] = fromname
259 copies[toname] = fromname
259 files[toname] = rev
260 files[toname] = rev
260 files[fromname] = rev
261 files[fromname] = rev
261 elif self.mtnisdir(toname, rev):
262 elif self.mtnisdir(toname, rev):
262 renameddirs.append((fromname, toname))
263 renameddirs.append((fromname, toname))
263
264
264 # Directory renames can be handled only once we have recorded
265 # Directory renames can be handled only once we have recorded
265 # all new files
266 # all new files
266 for fromdir, todir in renameddirs:
267 for fromdir, todir in renameddirs:
267 renamed = {}
268 renamed = {}
268 for tofile in self.files:
269 for tofile in self.files:
269 if tofile in ignoremove:
270 if tofile in ignoremove:
270 continue
271 continue
271 if tofile.startswith(todir + '/'):
272 if tofile.startswith(todir + '/'):
272 renamed[tofile] = fromdir + tofile[len(todir):]
273 renamed[tofile] = fromdir + tofile[len(todir):]
273 # Avoid chained moves like:
274 # Avoid chained moves like:
274 # d1(/a) => d3/d1(/a)
275 # d1(/a) => d3/d1(/a)
275 # d2 => d3
276 # d2 => d3
276 ignoremove[tofile] = 1
277 ignoremove[tofile] = 1
277 for tofile, fromfile in renamed.items():
278 for tofile, fromfile in renamed.items():
278 self.ui.debug (_("copying file in renamed directory "
279 self.ui.debug (_("copying file in renamed directory "
279 "from '%s' to '%s'")
280 "from '%s' to '%s'")
280 % (fromfile, tofile), '\n')
281 % (fromfile, tofile), '\n')
281 files[tofile] = rev
282 files[tofile] = rev
282 copies[tofile] = fromfile
283 copies[tofile] = fromfile
283 for fromfile in renamed.values():
284 for fromfile in renamed.values():
284 files[fromfile] = rev
285 files[fromfile] = rev
285
286
286 return (files.items(), copies, set())
287 return (files.items(), copies, set())
287
288
288 def getfile(self, name, rev):
289 def getfile(self, name, rev):
289 if not self.mtnisfile(name, rev):
290 if not self.mtnisfile(name, rev):
290 return None, None
291 return None, None
291 try:
292 try:
292 data = self.mtnrun("get_file_of", name, r=rev)
293 data = self.mtnrun("get_file_of", name, r=rev)
293 except Exception:
294 except Exception:
294 return None, None
295 return None, None
295 self.mtnloadmanifest(rev)
296 self.mtnloadmanifest(rev)
296 node, attr = self.files.get(name, (None, ""))
297 node, attr = self.files.get(name, (None, ""))
297 return data, attr
298 return data, attr
298
299
299 def getcommit(self, rev):
300 def getcommit(self, rev):
300 extra = {}
301 extra = {}
301 certs = self.mtngetcerts(rev)
302 certs = self.mtngetcerts(rev)
302 if certs.get('suspend') == certs["branch"]:
303 if certs.get('suspend') == certs["branch"]:
303 extra['close'] = 1
304 extra['close'] = 1
304 return commit(
305 return commit(
305 author=certs["author"],
306 author=certs["author"],
306 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
307 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
307 desc=certs["changelog"],
308 desc=certs["changelog"],
308 rev=rev,
309 rev=rev,
309 parents=self.mtnrun("parents", rev).splitlines(),
310 parents=self.mtnrun("parents", rev).splitlines(),
310 branch=certs["branch"],
311 branch=certs["branch"],
311 extra=extra)
312 extra=extra)
312
313
313 def gettags(self):
314 def gettags(self):
314 tags = {}
315 tags = {}
315 for e in self.mtnrun("tags").split("\n\n"):
316 for e in self.mtnrun("tags").split("\n\n"):
316 m = self.tag_re.match(e)
317 m = self.tag_re.match(e)
317 if m:
318 if m:
318 tags[m.group(1)] = m.group(2)
319 tags[m.group(1)] = m.group(2)
319 return tags
320 return tags
320
321
321 def getchangedfiles(self, rev, i):
322 def getchangedfiles(self, rev, i):
322 # This function is only needed to support --filemap
323 # This function is only needed to support --filemap
323 # ... and we don't support that
324 # ... and we don't support that
324 raise NotImplementedError
325 raise NotImplementedError
325
326
326 def before(self):
327 def before(self):
327 # Check if we have a new enough version to use automate stdio
328 # Check if we have a new enough version to use automate stdio
328 version = 0.0
329 version = 0.0
329 try:
330 try:
330 versionstr = self.mtnrunsingle("interface_version")
331 versionstr = self.mtnrunsingle("interface_version")
331 version = float(versionstr)
332 version = float(versionstr)
332 except Exception:
333 except Exception:
333 raise error.Abort(_("unable to determine mtn automate interface "
334 raise error.Abort(_("unable to determine mtn automate interface "
334 "version"))
335 "version"))
335
336
336 if version >= 12.0:
337 if version >= 12.0:
337 self.automatestdio = True
338 self.automatestdio = True
338 self.ui.debug("mtn automate version %s - using automate stdio\n" %
339 self.ui.debug("mtn automate version %s - using automate stdio\n" %
339 version)
340 version)
340
341
341 # launch the long-running automate stdio process
342 # launch the long-running automate stdio process
342 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
343 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
343 '-d', self.path)
344 '-d', self.path)
344 # read the headers
345 # read the headers
345 read = self.mtnreadfp.readline()
346 read = self.mtnreadfp.readline()
346 if read != 'format-version: 2\n':
347 if read != 'format-version: 2\n':
347 raise error.Abort(_('mtn automate stdio header unexpected: %s')
348 raise error.Abort(_('mtn automate stdio header unexpected: %s')
348 % read)
349 % read)
349 while read != '\n':
350 while read != '\n':
350 read = self.mtnreadfp.readline()
351 read = self.mtnreadfp.readline()
351 if not read:
352 if not read:
352 raise error.Abort(_("failed to reach end of mtn automate "
353 raise error.Abort(_("failed to reach end of mtn automate "
353 "stdio headers"))
354 "stdio headers"))
354 else:
355 else:
355 self.ui.debug("mtn automate version %s - not using automate stdio "
356 self.ui.debug("mtn automate version %s - not using automate stdio "
356 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
357 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
357
358
358 def after(self):
359 def after(self):
359 if self.automatestdio:
360 if self.automatestdio:
360 self.mtnwritefp.close()
361 self.mtnwritefp.close()
361 self.mtnwritefp = None
362 self.mtnwritefp = None
362 self.mtnreadfp.close()
363 self.mtnreadfp.close()
363 self.mtnreadfp = None
364 self.mtnreadfp = None
364
365
@@ -1,290 +1,290 b''
1 # Perforce source for convert extension.
1 # Perforce source for convert extension.
2 #
2 #
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from mercurial import util, error
8 from mercurial import util, error
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10
10
11 from common import commit, converter_source, checktool, NoRepo
11 from common import commit, converter_source, checktool, NoRepo
12 import marshal
12 import marshal
13 import re
13 import re
14
14
15 def loaditer(f):
15 def loaditer(f):
16 "Yield the dictionary objects generated by p4"
16 "Yield the dictionary objects generated by p4"
17 try:
17 try:
18 while True:
18 while True:
19 d = marshal.load(f)
19 d = marshal.load(f)
20 if not d:
20 if not d:
21 break
21 break
22 yield d
22 yield d
23 except EOFError:
23 except EOFError:
24 pass
24 pass
25
25
26 def decodefilename(filename):
26 def decodefilename(filename):
27 """Perforce escapes special characters @, #, *, or %
27 """Perforce escapes special characters @, #, *, or %
28 with %40, %23, %2A, or %25 respectively
28 with %40, %23, %2A, or %25 respectively
29
29
30 >>> decodefilename('portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
30 >>> decodefilename('portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
31 'portable-net45%2Bnetcore45%2Bwp8%2BMonoAndroid'
31 'portable-net45%2Bnetcore45%2Bwp8%2BMonoAndroid'
32 >>> decodefilename('//Depot/Directory/%2525/%2523/%23%40.%2A')
32 >>> decodefilename('//Depot/Directory/%2525/%2523/%23%40.%2A')
33 '//Depot/Directory/%25/%23/#@.*'
33 '//Depot/Directory/%25/%23/#@.*'
34 """
34 """
35 replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
35 replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
36 for k, v in replacements:
36 for k, v in replacements:
37 filename = filename.replace(k, v)
37 filename = filename.replace(k, v)
38 return filename
38 return filename
39
39
40 class p4_source(converter_source):
40 class p4_source(converter_source):
41 def __init__(self, ui, path, revs=None):
41 def __init__(self, ui, path, revs=None):
42 # avoid import cycle
42 # avoid import cycle
43 import convcmd
43 import convcmd
44
44
45 super(p4_source, self).__init__(ui, path, revs=revs)
45 super(p4_source, self).__init__(ui, path, revs=revs)
46
46
47 if "/" in path and not path.startswith('//'):
47 if "/" in path and not path.startswith('//'):
48 raise NoRepo(_('%s does not look like a P4 repository') % path)
48 raise NoRepo(_('%s does not look like a P4 repository') % path)
49
49
50 checktool('p4', abort=False)
50 checktool('p4', abort=False)
51
51
52 self.p4changes = {}
52 self.p4changes = {}
53 self.heads = {}
53 self.heads = {}
54 self.changeset = {}
54 self.changeset = {}
55 self.files = {}
55 self.files = {}
56 self.copies = {}
56 self.copies = {}
57 self.tags = {}
57 self.tags = {}
58 self.lastbranch = {}
58 self.lastbranch = {}
59 self.parent = {}
59 self.parent = {}
60 self.encoding = self.ui.config('convert', 'p4.encoding',
60 self.encoding = self.ui.config('convert', 'p4.encoding',
61 default=convcmd.orig_encoding)
61 default=convcmd.orig_encoding)
62 self.depotname = {} # mapping from local name to depot name
62 self.depotname = {} # mapping from local name to depot name
63 self.localname = {} # mapping from depot name to local name
63 self.localname = {} # mapping from depot name to local name
64 self.re_type = re.compile(
64 self.re_type = re.compile(
65 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
65 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
66 "(\+\w+)?$")
66 "(\+\w+)?$")
67 self.re_keywords = re.compile(
67 self.re_keywords = re.compile(
68 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
68 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
69 r":[^$\n]*\$")
69 r":[^$\n]*\$")
70 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
70 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
71
71
72 if revs and len(revs) > 1:
72 if revs and len(revs) > 1:
73 raise error.Abort(_("p4 source does not support specifying "
73 raise error.Abort(_("p4 source does not support specifying "
74 "multiple revisions"))
74 "multiple revisions"))
75 self._parse(ui, path)
75 self._parse(ui, path)
76
76
77 def _parse_view(self, path):
77 def _parse_view(self, path):
78 "Read changes affecting the path"
78 "Read changes affecting the path"
79 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
79 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
80 stdout = util.popen(cmd, mode='rb')
80 stdout = util.popen(cmd, mode='rb')
81 for d in loaditer(stdout):
81 for d in loaditer(stdout):
82 c = d.get("change", None)
82 c = d.get("change", None)
83 if c:
83 if c:
84 self.p4changes[c] = True
84 self.p4changes[c] = True
85
85
86 def _parse(self, ui, path):
86 def _parse(self, ui, path):
87 "Prepare list of P4 filenames and revisions to import"
87 "Prepare list of P4 filenames and revisions to import"
88 ui.status(_('reading p4 views\n'))
88 ui.status(_('reading p4 views\n'))
89
89
90 # read client spec or view
90 # read client spec or view
91 if "/" in path:
91 if "/" in path:
92 self._parse_view(path)
92 self._parse_view(path)
93 if path.startswith("//") and path.endswith("/..."):
93 if path.startswith("//") and path.endswith("/..."):
94 views = {path[:-3]:""}
94 views = {path[:-3]:""}
95 else:
95 else:
96 views = {"//": ""}
96 views = {"//": ""}
97 else:
97 else:
98 cmd = 'p4 -G client -o %s' % util.shellquote(path)
98 cmd = 'p4 -G client -o %s' % util.shellquote(path)
99 clientspec = marshal.load(util.popen(cmd, mode='rb'))
99 clientspec = marshal.load(util.popen(cmd, mode='rb'))
100
100
101 views = {}
101 views = {}
102 for client in clientspec:
102 for client in clientspec:
103 if client.startswith("View"):
103 if client.startswith("View"):
104 sview, cview = clientspec[client].split()
104 sview, cview = clientspec[client].split()
105 self._parse_view(sview)
105 self._parse_view(sview)
106 if sview.endswith("...") and cview.endswith("..."):
106 if sview.endswith("...") and cview.endswith("..."):
107 sview = sview[:-3]
107 sview = sview[:-3]
108 cview = cview[:-3]
108 cview = cview[:-3]
109 cview = cview[2:]
109 cview = cview[2:]
110 cview = cview[cview.find("/") + 1:]
110 cview = cview[cview.find("/") + 1:]
111 views[sview] = cview
111 views[sview] = cview
112
112
113 # list of changes that affect our source files
113 # list of changes that affect our source files
114 self.p4changes = self.p4changes.keys()
114 self.p4changes = self.p4changes.keys()
115 self.p4changes.sort(key=int)
115 self.p4changes.sort(key=int)
116
116
117 # list with depot pathnames, longest first
117 # list with depot pathnames, longest first
118 vieworder = views.keys()
118 vieworder = views.keys()
119 vieworder.sort(key=len, reverse=True)
119 vieworder.sort(key=len, reverse=True)
120
120
121 # handle revision limiting
121 # handle revision limiting
122 startrev = self.ui.config('convert', 'p4.startrev', default=0)
122 startrev = self.ui.config('convert', 'p4.startrev', default=0)
123 self.p4changes = [x for x in self.p4changes
123 self.p4changes = [x for x in self.p4changes
124 if ((not startrev or int(x) >= int(startrev)) and
124 if ((not startrev or int(x) >= int(startrev)) and
125 (not self.revs or int(x) <= int(self.revs[0])))]
125 (not self.revs or int(x) <= int(self.revs[0])))]
126
126
127 # now read the full changelists to get the list of file revisions
127 # now read the full changelists to get the list of file revisions
128 ui.status(_('collecting p4 changelists\n'))
128 ui.status(_('collecting p4 changelists\n'))
129 lastid = None
129 lastid = None
130 for change in self.p4changes:
130 for change in self.p4changes:
131 cmd = "p4 -G describe -s %s" % change
131 cmd = "p4 -G describe -s %s" % change
132 stdout = util.popen(cmd, mode='rb')
132 stdout = util.popen(cmd, mode='rb')
133 d = marshal.load(stdout)
133 d = marshal.load(stdout)
134 desc = self.recode(d.get("desc", ""))
134 desc = self.recode(d.get("desc", ""))
135 shortdesc = desc.split("\n", 1)[0]
135 shortdesc = desc.split("\n", 1)[0]
136 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
136 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
137 ui.status(util.ellipsis(t, 80) + '\n')
137 ui.status(util.ellipsis(t, 80) + '\n')
138
138
139 if lastid:
139 if lastid:
140 parents = [lastid]
140 parents = [lastid]
141 else:
141 else:
142 parents = []
142 parents = []
143
143
144 date = (int(d["time"]), 0) # timezone not set
144 date = (int(d["time"]), 0) # timezone not set
145 c = commit(author=self.recode(d["user"]),
145 c = commit(author=self.recode(d["user"]),
146 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
146 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
147 parents=parents, desc=desc, branch=None,
147 parents=parents, desc=desc, branch=None,
148 extra={"p4": change})
148 extra={"p4": change})
149
149
150 files = []
150 files = []
151 copies = {}
151 copies = {}
152 copiedfiles = []
152 copiedfiles = []
153 i = 0
153 i = 0
154 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
154 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
155 oldname = d["depotFile%d" % i]
155 oldname = d["depotFile%d" % i]
156 filename = None
156 filename = None
157 for v in vieworder:
157 for v in vieworder:
158 if oldname.lower().startswith(v.lower()):
158 if oldname.lower().startswith(v.lower()):
159 filename = decodefilename(views[v] + oldname[len(v):])
159 filename = decodefilename(views[v] + oldname[len(v):])
160 break
160 break
161 if filename:
161 if filename:
162 files.append((filename, d["rev%d" % i]))
162 files.append((filename, d["rev%d" % i]))
163 self.depotname[filename] = oldname
163 self.depotname[filename] = oldname
164 if (d.get("action%d" % i) == "move/add"):
164 if (d.get("action%d" % i) == "move/add"):
165 copiedfiles.append(filename)
165 copiedfiles.append(filename)
166 self.localname[oldname] = filename
166 self.localname[oldname] = filename
167 i += 1
167 i += 1
168
168
169 # Collect information about copied files
169 # Collect information about copied files
170 for filename in copiedfiles:
170 for filename in copiedfiles:
171 oldname = self.depotname[filename]
171 oldname = self.depotname[filename]
172
172
173 flcmd = 'p4 -G filelog %s' \
173 flcmd = 'p4 -G filelog %s' \
174 % util.shellquote(oldname)
174 % util.shellquote(oldname)
175 flstdout = util.popen(flcmd, mode='rb')
175 flstdout = util.popen(flcmd, mode='rb')
176
176
177 copiedfilename = None
177 copiedfilename = None
178 for d in loaditer(flstdout):
178 for d in loaditer(flstdout):
179 copiedoldname = None
179 copiedoldname = None
180
180
181 i = 0
181 i = 0
182 while ("change%d" % i) in d:
182 while ("change%d" % i) in d:
183 if (d["change%d" % i] == change and
183 if (d["change%d" % i] == change and
184 d["action%d" % i] == "move/add"):
184 d["action%d" % i] == "move/add"):
185 j = 0
185 j = 0
186 while ("file%d,%d" % (i, j)) in d:
186 while ("file%d,%d" % (i, j)) in d:
187 if d["how%d,%d" % (i, j)] == "moved from":
187 if d["how%d,%d" % (i, j)] == "moved from":
188 copiedoldname = d["file%d,%d" % (i, j)]
188 copiedoldname = d["file%d,%d" % (i, j)]
189 break
189 break
190 j += 1
190 j += 1
191 i += 1
191 i += 1
192
192
193 if copiedoldname and copiedoldname in self.localname:
193 if copiedoldname and copiedoldname in self.localname:
194 copiedfilename = self.localname[copiedoldname]
194 copiedfilename = self.localname[copiedoldname]
195 break
195 break
196
196
197 if copiedfilename:
197 if copiedfilename:
198 copies[filename] = copiedfilename
198 copies[filename] = copiedfilename
199 else:
199 else:
200 ui.warn(_("cannot find source for copied file: %s@%s\n")
200 ui.warn(_("cannot find source for copied file: %s@%s\n")
201 % (filename, change))
201 % (filename, change))
202
202
203 self.changeset[change] = c
203 self.changeset[change] = c
204 self.files[change] = files
204 self.files[change] = files
205 self.copies[change] = copies
205 self.copies[change] = copies
206 lastid = change
206 lastid = change
207
207
208 if lastid:
208 if lastid:
209 self.heads = [lastid]
209 self.heads = [lastid]
210
210
211 def getheads(self):
211 def getheads(self):
212 return self.heads
212 return self.heads
213
213
214 def getfile(self, name, rev):
214 def getfile(self, name, rev):
215 cmd = 'p4 -G print %s' \
215 cmd = 'p4 -G print %s' \
216 % util.shellquote("%s#%s" % (self.depotname[name], rev))
216 % util.shellquote("%s#%s" % (self.depotname[name], rev))
217
217
218 lasterror = None
218 lasterror = None
219 while True:
219 while True:
220 stdout = util.popen(cmd, mode='rb')
220 stdout = util.popen(cmd, mode='rb')
221
221
222 mode = None
222 mode = None
223 contents = []
223 contents = []
224 keywords = None
224 keywords = None
225
225
226 for d in loaditer(stdout):
226 for d in loaditer(stdout):
227 code = d["code"]
227 code = d["code"]
228 data = d.get("data")
228 data = d.get("data")
229
229
230 if code == "error":
230 if code == "error":
231 # if this is the first time error happened
231 # if this is the first time error happened
232 # re-attempt getting the file
232 # re-attempt getting the file
233 if not lasterror:
233 if not lasterror:
234 lasterror = IOError(d["generic"], data)
234 lasterror = IOError(d["generic"], data)
235 # this will exit inner-most for-loop
235 # this will exit inner-most for-loop
236 break
236 break
237 else:
237 else:
238 raise lasterror
238 raise lasterror
239
239
240 elif code == "stat":
240 elif code == "stat":
241 action = d.get("action")
241 action = d.get("action")
242 if action in ["purge", "delete", "move/delete"]:
242 if action in ["purge", "delete", "move/delete"]:
243 return None, None
243 return None, None
244 p4type = self.re_type.match(d["type"])
244 p4type = self.re_type.match(d["type"])
245 if p4type:
245 if p4type:
246 mode = ""
246 mode = ""
247 flags = ((p4type.group(1) or "")
247 flags = ((p4type.group(1) or "")
248 + (p4type.group(3) or ""))
248 + (p4type.group(3) or ""))
249 if "x" in flags:
249 if "x" in flags:
250 mode = "x"
250 mode = "x"
251 if p4type.group(2) == "symlink":
251 if p4type.group(2) == "symlink":
252 mode = "l"
252 mode = "l"
253 if "ko" in flags:
253 if "ko" in flags:
254 keywords = self.re_keywords_old
254 keywords = self.re_keywords_old
255 elif "k" in flags:
255 elif "k" in flags:
256 keywords = self.re_keywords
256 keywords = self.re_keywords
257
257
258 elif code == "text" or code == "binary":
258 elif code == "text" or code == "binary":
259 contents.append(data)
259 contents.append(data)
260
260
261 lasterror = None
261 lasterror = None
262
262
263 if not lasterror:
263 if not lasterror:
264 break
264 break
265
265
266 if mode is None:
266 if mode is None:
267 return None, None
267 return None, None
268
268
269 contents = ''.join(contents)
269 contents = ''.join(contents)
270
270
271 if keywords:
271 if keywords:
272 contents = keywords.sub("$\\1$", contents)
272 contents = keywords.sub("$\\1$", contents)
273 if mode == "l" and contents.endswith("\n"):
273 if mode == "l" and contents.endswith("\n"):
274 contents = contents[:-1]
274 contents = contents[:-1]
275
275
276 return contents, mode
276 return contents, mode
277
277
278 def getchanges(self, rev, full):
278 def getchanges(self, rev, full):
279 if full:
279 if full:
280 raise error.Abort(_("convert from p4 do not support --full"))
280 raise error.Abort(_("convert from p4 does not support --full"))
281 return self.files[rev], self.copies[rev], set()
281 return self.files[rev], self.copies[rev], set()
282
282
283 def getcommit(self, rev):
283 def getcommit(self, rev):
284 return self.changeset[rev]
284 return self.changeset[rev]
285
285
286 def gettags(self):
286 def gettags(self):
287 return self.tags
287 return self.tags
288
288
289 def getchangedfiles(self, rev, i):
289 def getchangedfiles(self, rev, i):
290 return sorted([x[0] for x in self.files[rev]])
290 return sorted([x[0] for x in self.files[rev]])
@@ -1,1850 +1,1850 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib, urllib2
10 import errno, urllib, urllib2
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 import lock as lockmod
13 import lock as lockmod
14 import streamclone
14 import streamclone
15 import sslutil
15 import sslutil
16 import tags
16 import tags
17 import url as urlmod
17 import url as urlmod
18
18
19 # Maps bundle compression human names to internal representation.
19 # Maps bundle compression human names to internal representation.
20 _bundlespeccompressions = {'none': None,
20 _bundlespeccompressions = {'none': None,
21 'bzip2': 'BZ',
21 'bzip2': 'BZ',
22 'gzip': 'GZ',
22 'gzip': 'GZ',
23 }
23 }
24
24
25 # Maps bundle version human names to changegroup versions.
25 # Maps bundle version human names to changegroup versions.
26 _bundlespeccgversions = {'v1': '01',
26 _bundlespeccgversions = {'v1': '01',
27 'v2': '02',
27 'v2': '02',
28 'packed1': 's1',
28 'packed1': 's1',
29 'bundle2': '02', #legacy
29 'bundle2': '02', #legacy
30 }
30 }
31
31
32 def parsebundlespec(repo, spec, strict=True, externalnames=False):
32 def parsebundlespec(repo, spec, strict=True, externalnames=False):
33 """Parse a bundle string specification into parts.
33 """Parse a bundle string specification into parts.
34
34
35 Bundle specifications denote a well-defined bundle/exchange format.
35 Bundle specifications denote a well-defined bundle/exchange format.
36 The content of a given specification should not change over time in
36 The content of a given specification should not change over time in
37 order to ensure that bundles produced by a newer version of Mercurial are
37 order to ensure that bundles produced by a newer version of Mercurial are
38 readable from an older version.
38 readable from an older version.
39
39
40 The string currently has the form:
40 The string currently has the form:
41
41
42 <compression>-<type>[;<parameter0>[;<parameter1>]]
42 <compression>-<type>[;<parameter0>[;<parameter1>]]
43
43
44 Where <compression> is one of the supported compression formats
44 Where <compression> is one of the supported compression formats
45 and <type> is (currently) a version string. A ";" can follow the type and
45 and <type> is (currently) a version string. A ";" can follow the type and
46 all text afterwards is interpretted as URI encoded, ";" delimited key=value
46 all text afterwards is interpretted as URI encoded, ";" delimited key=value
47 pairs.
47 pairs.
48
48
49 If ``strict`` is True (the default) <compression> is required. Otherwise,
49 If ``strict`` is True (the default) <compression> is required. Otherwise,
50 it is optional.
50 it is optional.
51
51
52 If ``externalnames`` is False (the default), the human-centric names will
52 If ``externalnames`` is False (the default), the human-centric names will
53 be converted to their internal representation.
53 be converted to their internal representation.
54
54
55 Returns a 3-tuple of (compression, version, parameters). Compression will
55 Returns a 3-tuple of (compression, version, parameters). Compression will
56 be ``None`` if not in strict mode and a compression isn't defined.
56 be ``None`` if not in strict mode and a compression isn't defined.
57
57
58 An ``InvalidBundleSpecification`` is raised when the specification is
58 An ``InvalidBundleSpecification`` is raised when the specification is
59 not syntactically well formed.
59 not syntactically well formed.
60
60
61 An ``UnsupportedBundleSpecification`` is raised when the compression or
61 An ``UnsupportedBundleSpecification`` is raised when the compression or
62 bundle type/version is not recognized.
62 bundle type/version is not recognized.
63
63
64 Note: this function will likely eventually return a more complex data
64 Note: this function will likely eventually return a more complex data
65 structure, including bundle2 part information.
65 structure, including bundle2 part information.
66 """
66 """
67 def parseparams(s):
67 def parseparams(s):
68 if ';' not in s:
68 if ';' not in s:
69 return s, {}
69 return s, {}
70
70
71 params = {}
71 params = {}
72 version, paramstr = s.split(';', 1)
72 version, paramstr = s.split(';', 1)
73
73
74 for p in paramstr.split(';'):
74 for p in paramstr.split(';'):
75 if '=' not in p:
75 if '=' not in p:
76 raise error.InvalidBundleSpecification(
76 raise error.InvalidBundleSpecification(
77 _('invalid bundle specification: '
77 _('invalid bundle specification: '
78 'missing "=" in parameter: %s') % p)
78 'missing "=" in parameter: %s') % p)
79
79
80 key, value = p.split('=', 1)
80 key, value = p.split('=', 1)
81 key = urllib.unquote(key)
81 key = urllib.unquote(key)
82 value = urllib.unquote(value)
82 value = urllib.unquote(value)
83 params[key] = value
83 params[key] = value
84
84
85 return version, params
85 return version, params
86
86
87
87
88 if strict and '-' not in spec:
88 if strict and '-' not in spec:
89 raise error.InvalidBundleSpecification(
89 raise error.InvalidBundleSpecification(
90 _('invalid bundle specification; '
90 _('invalid bundle specification; '
91 'must be prefixed with compression: %s') % spec)
91 'must be prefixed with compression: %s') % spec)
92
92
93 if '-' in spec:
93 if '-' in spec:
94 compression, version = spec.split('-', 1)
94 compression, version = spec.split('-', 1)
95
95
96 if compression not in _bundlespeccompressions:
96 if compression not in _bundlespeccompressions:
97 raise error.UnsupportedBundleSpecification(
97 raise error.UnsupportedBundleSpecification(
98 _('%s compression is not supported') % compression)
98 _('%s compression is not supported') % compression)
99
99
100 version, params = parseparams(version)
100 version, params = parseparams(version)
101
101
102 if version not in _bundlespeccgversions:
102 if version not in _bundlespeccgversions:
103 raise error.UnsupportedBundleSpecification(
103 raise error.UnsupportedBundleSpecification(
104 _('%s is not a recognized bundle version') % version)
104 _('%s is not a recognized bundle version') % version)
105 else:
105 else:
106 # Value could be just the compression or just the version, in which
106 # Value could be just the compression or just the version, in which
107 # case some defaults are assumed (but only when not in strict mode).
107 # case some defaults are assumed (but only when not in strict mode).
108 assert not strict
108 assert not strict
109
109
110 spec, params = parseparams(spec)
110 spec, params = parseparams(spec)
111
111
112 if spec in _bundlespeccompressions:
112 if spec in _bundlespeccompressions:
113 compression = spec
113 compression = spec
114 version = 'v1'
114 version = 'v1'
115 if 'generaldelta' in repo.requirements:
115 if 'generaldelta' in repo.requirements:
116 version = 'v2'
116 version = 'v2'
117 elif spec in _bundlespeccgversions:
117 elif spec in _bundlespeccgversions:
118 if spec == 'packed1':
118 if spec == 'packed1':
119 compression = 'none'
119 compression = 'none'
120 else:
120 else:
121 compression = 'bzip2'
121 compression = 'bzip2'
122 version = spec
122 version = spec
123 else:
123 else:
124 raise error.UnsupportedBundleSpecification(
124 raise error.UnsupportedBundleSpecification(
125 _('%s is not a recognized bundle specification') % spec)
125 _('%s is not a recognized bundle specification') % spec)
126
126
127 # The specification for packed1 can optionally declare the data formats
127 # The specification for packed1 can optionally declare the data formats
128 # required to apply it. If we see this metadata, compare against what the
128 # required to apply it. If we see this metadata, compare against what the
129 # repo supports and error if the bundle isn't compatible.
129 # repo supports and error if the bundle isn't compatible.
130 if version == 'packed1' and 'requirements' in params:
130 if version == 'packed1' and 'requirements' in params:
131 requirements = set(params['requirements'].split(','))
131 requirements = set(params['requirements'].split(','))
132 missingreqs = requirements - repo.supportedformats
132 missingreqs = requirements - repo.supportedformats
133 if missingreqs:
133 if missingreqs:
134 raise error.UnsupportedBundleSpecification(
134 raise error.UnsupportedBundleSpecification(
135 _('missing support for repository features: %s') %
135 _('missing support for repository features: %s') %
136 ', '.join(sorted(missingreqs)))
136 ', '.join(sorted(missingreqs)))
137
137
138 if not externalnames:
138 if not externalnames:
139 compression = _bundlespeccompressions[compression]
139 compression = _bundlespeccompressions[compression]
140 version = _bundlespeccgversions[version]
140 version = _bundlespeccgversions[version]
141 return compression, version, params
141 return compression, version, params
142
142
143 def readbundle(ui, fh, fname, vfs=None):
143 def readbundle(ui, fh, fname, vfs=None):
144 header = changegroup.readexactly(fh, 4)
144 header = changegroup.readexactly(fh, 4)
145
145
146 alg = None
146 alg = None
147 if not fname:
147 if not fname:
148 fname = "stream"
148 fname = "stream"
149 if not header.startswith('HG') and header.startswith('\0'):
149 if not header.startswith('HG') and header.startswith('\0'):
150 fh = changegroup.headerlessfixup(fh, header)
150 fh = changegroup.headerlessfixup(fh, header)
151 header = "HG10"
151 header = "HG10"
152 alg = 'UN'
152 alg = 'UN'
153 elif vfs:
153 elif vfs:
154 fname = vfs.join(fname)
154 fname = vfs.join(fname)
155
155
156 magic, version = header[0:2], header[2:4]
156 magic, version = header[0:2], header[2:4]
157
157
158 if magic != 'HG':
158 if magic != 'HG':
159 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
159 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
160 if version == '10':
160 if version == '10':
161 if alg is None:
161 if alg is None:
162 alg = changegroup.readexactly(fh, 2)
162 alg = changegroup.readexactly(fh, 2)
163 return changegroup.cg1unpacker(fh, alg)
163 return changegroup.cg1unpacker(fh, alg)
164 elif version.startswith('2'):
164 elif version.startswith('2'):
165 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
165 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
166 elif version == 'S1':
166 elif version == 'S1':
167 return streamclone.streamcloneapplier(fh)
167 return streamclone.streamcloneapplier(fh)
168 else:
168 else:
169 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
169 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
170
170
171 def buildobsmarkerspart(bundler, markers):
171 def buildobsmarkerspart(bundler, markers):
172 """add an obsmarker part to the bundler with <markers>
172 """add an obsmarker part to the bundler with <markers>
173
173
174 No part is created if markers is empty.
174 No part is created if markers is empty.
175 Raises ValueError if the bundler doesn't support any known obsmarker format.
175 Raises ValueError if the bundler doesn't support any known obsmarker format.
176 """
176 """
177 if markers:
177 if markers:
178 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
178 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
179 version = obsolete.commonversion(remoteversions)
179 version = obsolete.commonversion(remoteversions)
180 if version is None:
180 if version is None:
181 raise ValueError('bundler do not support common obsmarker format')
181 raise ValueError('bundler does not support common obsmarker format')
182 stream = obsolete.encodemarkers(markers, True, version=version)
182 stream = obsolete.encodemarkers(markers, True, version=version)
183 return bundler.newpart('obsmarkers', data=stream)
183 return bundler.newpart('obsmarkers', data=stream)
184 return None
184 return None
185
185
186 def _canusebundle2(op):
186 def _canusebundle2(op):
187 """return true if a pull/push can use bundle2
187 """return true if a pull/push can use bundle2
188
188
189 Feel free to nuke this function when we drop the experimental option"""
189 Feel free to nuke this function when we drop the experimental option"""
190 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
190 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
191 and op.remote.capable('bundle2'))
191 and op.remote.capable('bundle2'))
192
192
193
193
194 class pushoperation(object):
194 class pushoperation(object):
195 """A object that represent a single push operation
195 """A object that represent a single push operation
196
196
197 It purpose is to carry push related state and very common operation.
197 It purpose is to carry push related state and very common operation.
198
198
199 A new should be created at the beginning of each push and discarded
199 A new should be created at the beginning of each push and discarded
200 afterward.
200 afterward.
201 """
201 """
202
202
203 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
203 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
204 bookmarks=()):
204 bookmarks=()):
205 # repo we push from
205 # repo we push from
206 self.repo = repo
206 self.repo = repo
207 self.ui = repo.ui
207 self.ui = repo.ui
208 # repo we push to
208 # repo we push to
209 self.remote = remote
209 self.remote = remote
210 # force option provided
210 # force option provided
211 self.force = force
211 self.force = force
212 # revs to be pushed (None is "all")
212 # revs to be pushed (None is "all")
213 self.revs = revs
213 self.revs = revs
214 # bookmark explicitly pushed
214 # bookmark explicitly pushed
215 self.bookmarks = bookmarks
215 self.bookmarks = bookmarks
216 # allow push of new branch
216 # allow push of new branch
217 self.newbranch = newbranch
217 self.newbranch = newbranch
218 # did a local lock get acquired?
218 # did a local lock get acquired?
219 self.locallocked = None
219 self.locallocked = None
220 # step already performed
220 # step already performed
221 # (used to check what steps have been already performed through bundle2)
221 # (used to check what steps have been already performed through bundle2)
222 self.stepsdone = set()
222 self.stepsdone = set()
223 # Integer version of the changegroup push result
223 # Integer version of the changegroup push result
224 # - None means nothing to push
224 # - None means nothing to push
225 # - 0 means HTTP error
225 # - 0 means HTTP error
226 # - 1 means we pushed and remote head count is unchanged *or*
226 # - 1 means we pushed and remote head count is unchanged *or*
227 # we have outgoing changesets but refused to push
227 # we have outgoing changesets but refused to push
228 # - other values as described by addchangegroup()
228 # - other values as described by addchangegroup()
229 self.cgresult = None
229 self.cgresult = None
230 # Boolean value for the bookmark push
230 # Boolean value for the bookmark push
231 self.bkresult = None
231 self.bkresult = None
232 # discover.outgoing object (contains common and outgoing data)
232 # discover.outgoing object (contains common and outgoing data)
233 self.outgoing = None
233 self.outgoing = None
234 # all remote heads before the push
234 # all remote heads before the push
235 self.remoteheads = None
235 self.remoteheads = None
236 # testable as a boolean indicating if any nodes are missing locally.
236 # testable as a boolean indicating if any nodes are missing locally.
237 self.incoming = None
237 self.incoming = None
238 # phases changes that must be pushed along side the changesets
238 # phases changes that must be pushed along side the changesets
239 self.outdatedphases = None
239 self.outdatedphases = None
240 # phases changes that must be pushed if changeset push fails
240 # phases changes that must be pushed if changeset push fails
241 self.fallbackoutdatedphases = None
241 self.fallbackoutdatedphases = None
242 # outgoing obsmarkers
242 # outgoing obsmarkers
243 self.outobsmarkers = set()
243 self.outobsmarkers = set()
244 # outgoing bookmarks
244 # outgoing bookmarks
245 self.outbookmarks = []
245 self.outbookmarks = []
246 # transaction manager
246 # transaction manager
247 self.trmanager = None
247 self.trmanager = None
248 # map { pushkey partid -> callback handling failure}
248 # map { pushkey partid -> callback handling failure}
249 # used to handle exception from mandatory pushkey part failure
249 # used to handle exception from mandatory pushkey part failure
250 self.pkfailcb = {}
250 self.pkfailcb = {}
251
251
252 @util.propertycache
252 @util.propertycache
253 def futureheads(self):
253 def futureheads(self):
254 """future remote heads if the changeset push succeeds"""
254 """future remote heads if the changeset push succeeds"""
255 return self.outgoing.missingheads
255 return self.outgoing.missingheads
256
256
257 @util.propertycache
257 @util.propertycache
258 def fallbackheads(self):
258 def fallbackheads(self):
259 """future remote heads if the changeset push fails"""
259 """future remote heads if the changeset push fails"""
260 if self.revs is None:
260 if self.revs is None:
261 # not target to push, all common are relevant
261 # not target to push, all common are relevant
262 return self.outgoing.commonheads
262 return self.outgoing.commonheads
263 unfi = self.repo.unfiltered()
263 unfi = self.repo.unfiltered()
264 # I want cheads = heads(::missingheads and ::commonheads)
264 # I want cheads = heads(::missingheads and ::commonheads)
265 # (missingheads is revs with secret changeset filtered out)
265 # (missingheads is revs with secret changeset filtered out)
266 #
266 #
267 # This can be expressed as:
267 # This can be expressed as:
268 # cheads = ( (missingheads and ::commonheads)
268 # cheads = ( (missingheads and ::commonheads)
269 # + (commonheads and ::missingheads))"
269 # + (commonheads and ::missingheads))"
270 # )
270 # )
271 #
271 #
272 # while trying to push we already computed the following:
272 # while trying to push we already computed the following:
273 # common = (::commonheads)
273 # common = (::commonheads)
274 # missing = ((commonheads::missingheads) - commonheads)
274 # missing = ((commonheads::missingheads) - commonheads)
275 #
275 #
276 # We can pick:
276 # We can pick:
277 # * missingheads part of common (::commonheads)
277 # * missingheads part of common (::commonheads)
278 common = self.outgoing.common
278 common = self.outgoing.common
279 nm = self.repo.changelog.nodemap
279 nm = self.repo.changelog.nodemap
280 cheads = [node for node in self.revs if nm[node] in common]
280 cheads = [node for node in self.revs if nm[node] in common]
281 # and
281 # and
282 # * commonheads parents on missing
282 # * commonheads parents on missing
283 revset = unfi.set('%ln and parents(roots(%ln))',
283 revset = unfi.set('%ln and parents(roots(%ln))',
284 self.outgoing.commonheads,
284 self.outgoing.commonheads,
285 self.outgoing.missing)
285 self.outgoing.missing)
286 cheads.extend(c.node() for c in revset)
286 cheads.extend(c.node() for c in revset)
287 return cheads
287 return cheads
288
288
289 @property
289 @property
290 def commonheads(self):
290 def commonheads(self):
291 """set of all common heads after changeset bundle push"""
291 """set of all common heads after changeset bundle push"""
292 if self.cgresult:
292 if self.cgresult:
293 return self.futureheads
293 return self.futureheads
294 else:
294 else:
295 return self.fallbackheads
295 return self.fallbackheads
296
296
297 # mapping of message used when pushing bookmark
297 # mapping of message used when pushing bookmark
298 bookmsgmap = {'update': (_("updating bookmark %s\n"),
298 bookmsgmap = {'update': (_("updating bookmark %s\n"),
299 _('updating bookmark %s failed!\n')),
299 _('updating bookmark %s failed!\n')),
300 'export': (_("exporting bookmark %s\n"),
300 'export': (_("exporting bookmark %s\n"),
301 _('exporting bookmark %s failed!\n')),
301 _('exporting bookmark %s failed!\n')),
302 'delete': (_("deleting remote bookmark %s\n"),
302 'delete': (_("deleting remote bookmark %s\n"),
303 _('deleting remote bookmark %s failed!\n')),
303 _('deleting remote bookmark %s failed!\n')),
304 }
304 }
305
305
306
306
307 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
307 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
308 opargs=None):
308 opargs=None):
309 '''Push outgoing changesets (limited by revs) from a local
309 '''Push outgoing changesets (limited by revs) from a local
310 repository to remote. Return an integer:
310 repository to remote. Return an integer:
311 - None means nothing to push
311 - None means nothing to push
312 - 0 means HTTP error
312 - 0 means HTTP error
313 - 1 means we pushed and remote head count is unchanged *or*
313 - 1 means we pushed and remote head count is unchanged *or*
314 we have outgoing changesets but refused to push
314 we have outgoing changesets but refused to push
315 - other values as described by addchangegroup()
315 - other values as described by addchangegroup()
316 '''
316 '''
317 if opargs is None:
317 if opargs is None:
318 opargs = {}
318 opargs = {}
319 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
319 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
320 **opargs)
320 **opargs)
321 if pushop.remote.local():
321 if pushop.remote.local():
322 missing = (set(pushop.repo.requirements)
322 missing = (set(pushop.repo.requirements)
323 - pushop.remote.local().supported)
323 - pushop.remote.local().supported)
324 if missing:
324 if missing:
325 msg = _("required features are not"
325 msg = _("required features are not"
326 " supported in the destination:"
326 " supported in the destination:"
327 " %s") % (', '.join(sorted(missing)))
327 " %s") % (', '.join(sorted(missing)))
328 raise error.Abort(msg)
328 raise error.Abort(msg)
329
329
330 # there are two ways to push to remote repo:
330 # there are two ways to push to remote repo:
331 #
331 #
332 # addchangegroup assumes local user can lock remote
332 # addchangegroup assumes local user can lock remote
333 # repo (local filesystem, old ssh servers).
333 # repo (local filesystem, old ssh servers).
334 #
334 #
335 # unbundle assumes local user cannot lock remote repo (new ssh
335 # unbundle assumes local user cannot lock remote repo (new ssh
336 # servers, http servers).
336 # servers, http servers).
337
337
338 if not pushop.remote.canpush():
338 if not pushop.remote.canpush():
339 raise error.Abort(_("destination does not support push"))
339 raise error.Abort(_("destination does not support push"))
340 # get local lock as we might write phase data
340 # get local lock as we might write phase data
341 localwlock = locallock = None
341 localwlock = locallock = None
342 try:
342 try:
343 # bundle2 push may receive a reply bundle touching bookmarks or other
343 # bundle2 push may receive a reply bundle touching bookmarks or other
344 # things requiring the wlock. Take it now to ensure proper ordering.
344 # things requiring the wlock. Take it now to ensure proper ordering.
345 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
345 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
346 if _canusebundle2(pushop) and maypushback:
346 if _canusebundle2(pushop) and maypushback:
347 localwlock = pushop.repo.wlock()
347 localwlock = pushop.repo.wlock()
348 locallock = pushop.repo.lock()
348 locallock = pushop.repo.lock()
349 pushop.locallocked = True
349 pushop.locallocked = True
350 except IOError as err:
350 except IOError as err:
351 pushop.locallocked = False
351 pushop.locallocked = False
352 if err.errno != errno.EACCES:
352 if err.errno != errno.EACCES:
353 raise
353 raise
354 # source repo cannot be locked.
354 # source repo cannot be locked.
355 # We do not abort the push, but just disable the local phase
355 # We do not abort the push, but just disable the local phase
356 # synchronisation.
356 # synchronisation.
357 msg = 'cannot lock source repository: %s\n' % err
357 msg = 'cannot lock source repository: %s\n' % err
358 pushop.ui.debug(msg)
358 pushop.ui.debug(msg)
359 try:
359 try:
360 if pushop.locallocked:
360 if pushop.locallocked:
361 pushop.trmanager = transactionmanager(pushop.repo,
361 pushop.trmanager = transactionmanager(pushop.repo,
362 'push-response',
362 'push-response',
363 pushop.remote.url())
363 pushop.remote.url())
364 pushop.repo.checkpush(pushop)
364 pushop.repo.checkpush(pushop)
365 lock = None
365 lock = None
366 unbundle = pushop.remote.capable('unbundle')
366 unbundle = pushop.remote.capable('unbundle')
367 if not unbundle:
367 if not unbundle:
368 lock = pushop.remote.lock()
368 lock = pushop.remote.lock()
369 try:
369 try:
370 _pushdiscovery(pushop)
370 _pushdiscovery(pushop)
371 if _canusebundle2(pushop):
371 if _canusebundle2(pushop):
372 _pushbundle2(pushop)
372 _pushbundle2(pushop)
373 _pushchangeset(pushop)
373 _pushchangeset(pushop)
374 _pushsyncphase(pushop)
374 _pushsyncphase(pushop)
375 _pushobsolete(pushop)
375 _pushobsolete(pushop)
376 _pushbookmark(pushop)
376 _pushbookmark(pushop)
377 finally:
377 finally:
378 if lock is not None:
378 if lock is not None:
379 lock.release()
379 lock.release()
380 if pushop.trmanager:
380 if pushop.trmanager:
381 pushop.trmanager.close()
381 pushop.trmanager.close()
382 finally:
382 finally:
383 if pushop.trmanager:
383 if pushop.trmanager:
384 pushop.trmanager.release()
384 pushop.trmanager.release()
385 if locallock is not None:
385 if locallock is not None:
386 locallock.release()
386 locallock.release()
387 if localwlock is not None:
387 if localwlock is not None:
388 localwlock.release()
388 localwlock.release()
389
389
390 return pushop
390 return pushop
391
391
392 # list of steps to perform discovery before push
392 # list of steps to perform discovery before push
393 pushdiscoveryorder = []
393 pushdiscoveryorder = []
394
394
395 # Mapping between step name and function
395 # Mapping between step name and function
396 #
396 #
397 # This exists to help extensions wrap steps if necessary
397 # This exists to help extensions wrap steps if necessary
398 pushdiscoverymapping = {}
398 pushdiscoverymapping = {}
399
399
400 def pushdiscovery(stepname):
400 def pushdiscovery(stepname):
401 """decorator for function performing discovery before push
401 """decorator for function performing discovery before push
402
402
403 The function is added to the step -> function mapping and appended to the
403 The function is added to the step -> function mapping and appended to the
404 list of steps. Beware that decorated function will be added in order (this
404 list of steps. Beware that decorated function will be added in order (this
405 may matter).
405 may matter).
406
406
407 You can only use this decorator for a new step, if you want to wrap a step
407 You can only use this decorator for a new step, if you want to wrap a step
408 from an extension, change the pushdiscovery dictionary directly."""
408 from an extension, change the pushdiscovery dictionary directly."""
409 def dec(func):
409 def dec(func):
410 assert stepname not in pushdiscoverymapping
410 assert stepname not in pushdiscoverymapping
411 pushdiscoverymapping[stepname] = func
411 pushdiscoverymapping[stepname] = func
412 pushdiscoveryorder.append(stepname)
412 pushdiscoveryorder.append(stepname)
413 return func
413 return func
414 return dec
414 return dec
415
415
416 def _pushdiscovery(pushop):
416 def _pushdiscovery(pushop):
417 """Run all discovery steps"""
417 """Run all discovery steps"""
418 for stepname in pushdiscoveryorder:
418 for stepname in pushdiscoveryorder:
419 step = pushdiscoverymapping[stepname]
419 step = pushdiscoverymapping[stepname]
420 step(pushop)
420 step(pushop)
421
421
422 @pushdiscovery('changeset')
422 @pushdiscovery('changeset')
423 def _pushdiscoverychangeset(pushop):
423 def _pushdiscoverychangeset(pushop):
424 """discover the changeset that need to be pushed"""
424 """discover the changeset that need to be pushed"""
425 fci = discovery.findcommonincoming
425 fci = discovery.findcommonincoming
426 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
426 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
427 common, inc, remoteheads = commoninc
427 common, inc, remoteheads = commoninc
428 fco = discovery.findcommonoutgoing
428 fco = discovery.findcommonoutgoing
429 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
429 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
430 commoninc=commoninc, force=pushop.force)
430 commoninc=commoninc, force=pushop.force)
431 pushop.outgoing = outgoing
431 pushop.outgoing = outgoing
432 pushop.remoteheads = remoteheads
432 pushop.remoteheads = remoteheads
433 pushop.incoming = inc
433 pushop.incoming = inc
434
434
435 @pushdiscovery('phase')
435 @pushdiscovery('phase')
436 def _pushdiscoveryphase(pushop):
436 def _pushdiscoveryphase(pushop):
437 """discover the phase that needs to be pushed
437 """discover the phase that needs to be pushed
438
438
439 (computed for both success and failure case for changesets push)"""
439 (computed for both success and failure case for changesets push)"""
440 outgoing = pushop.outgoing
440 outgoing = pushop.outgoing
441 unfi = pushop.repo.unfiltered()
441 unfi = pushop.repo.unfiltered()
442 remotephases = pushop.remote.listkeys('phases')
442 remotephases = pushop.remote.listkeys('phases')
443 publishing = remotephases.get('publishing', False)
443 publishing = remotephases.get('publishing', False)
444 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
444 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
445 and remotephases # server supports phases
445 and remotephases # server supports phases
446 and not pushop.outgoing.missing # no changesets to be pushed
446 and not pushop.outgoing.missing # no changesets to be pushed
447 and publishing):
447 and publishing):
448 # When:
448 # When:
449 # - this is a subrepo push
449 # - this is a subrepo push
450 # - and remote support phase
450 # - and remote support phase
451 # - and no changeset are to be pushed
451 # - and no changeset are to be pushed
452 # - and remote is publishing
452 # - and remote is publishing
453 # We may be in issue 3871 case!
453 # We may be in issue 3871 case!
454 # We drop the possible phase synchronisation done by
454 # We drop the possible phase synchronisation done by
455 # courtesy to publish changesets possibly locally draft
455 # courtesy to publish changesets possibly locally draft
456 # on the remote.
456 # on the remote.
457 remotephases = {'publishing': 'True'}
457 remotephases = {'publishing': 'True'}
458 ana = phases.analyzeremotephases(pushop.repo,
458 ana = phases.analyzeremotephases(pushop.repo,
459 pushop.fallbackheads,
459 pushop.fallbackheads,
460 remotephases)
460 remotephases)
461 pheads, droots = ana
461 pheads, droots = ana
462 extracond = ''
462 extracond = ''
463 if not publishing:
463 if not publishing:
464 extracond = ' and public()'
464 extracond = ' and public()'
465 revset = 'heads((%%ln::%%ln) %s)' % extracond
465 revset = 'heads((%%ln::%%ln) %s)' % extracond
466 # Get the list of all revs draft on remote by public here.
466 # Get the list of all revs draft on remote by public here.
467 # XXX Beware that revset break if droots is not strictly
467 # XXX Beware that revset break if droots is not strictly
468 # XXX root we may want to ensure it is but it is costly
468 # XXX root we may want to ensure it is but it is costly
469 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
469 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
470 if not outgoing.missing:
470 if not outgoing.missing:
471 future = fallback
471 future = fallback
472 else:
472 else:
473 # adds changeset we are going to push as draft
473 # adds changeset we are going to push as draft
474 #
474 #
475 # should not be necessary for publishing server, but because of an
475 # should not be necessary for publishing server, but because of an
476 # issue fixed in xxxxx we have to do it anyway.
476 # issue fixed in xxxxx we have to do it anyway.
477 fdroots = list(unfi.set('roots(%ln + %ln::)',
477 fdroots = list(unfi.set('roots(%ln + %ln::)',
478 outgoing.missing, droots))
478 outgoing.missing, droots))
479 fdroots = [f.node() for f in fdroots]
479 fdroots = [f.node() for f in fdroots]
480 future = list(unfi.set(revset, fdroots, pushop.futureheads))
480 future = list(unfi.set(revset, fdroots, pushop.futureheads))
481 pushop.outdatedphases = future
481 pushop.outdatedphases = future
482 pushop.fallbackoutdatedphases = fallback
482 pushop.fallbackoutdatedphases = fallback
483
483
484 @pushdiscovery('obsmarker')
484 @pushdiscovery('obsmarker')
485 def _pushdiscoveryobsmarkers(pushop):
485 def _pushdiscoveryobsmarkers(pushop):
486 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
486 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
487 and pushop.repo.obsstore
487 and pushop.repo.obsstore
488 and 'obsolete' in pushop.remote.listkeys('namespaces')):
488 and 'obsolete' in pushop.remote.listkeys('namespaces')):
489 repo = pushop.repo
489 repo = pushop.repo
490 # very naive computation, that can be quite expensive on big repo.
490 # very naive computation, that can be quite expensive on big repo.
491 # However: evolution is currently slow on them anyway.
491 # However: evolution is currently slow on them anyway.
492 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
492 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
493 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
493 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
494
494
495 @pushdiscovery('bookmarks')
495 @pushdiscovery('bookmarks')
496 def _pushdiscoverybookmarks(pushop):
496 def _pushdiscoverybookmarks(pushop):
497 ui = pushop.ui
497 ui = pushop.ui
498 repo = pushop.repo.unfiltered()
498 repo = pushop.repo.unfiltered()
499 remote = pushop.remote
499 remote = pushop.remote
500 ui.debug("checking for updated bookmarks\n")
500 ui.debug("checking for updated bookmarks\n")
501 ancestors = ()
501 ancestors = ()
502 if pushop.revs:
502 if pushop.revs:
503 revnums = map(repo.changelog.rev, pushop.revs)
503 revnums = map(repo.changelog.rev, pushop.revs)
504 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
504 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
505 remotebookmark = remote.listkeys('bookmarks')
505 remotebookmark = remote.listkeys('bookmarks')
506
506
507 explicit = set(pushop.bookmarks)
507 explicit = set(pushop.bookmarks)
508
508
509 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
509 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
510 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
510 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
511 for b, scid, dcid in advsrc:
511 for b, scid, dcid in advsrc:
512 if b in explicit:
512 if b in explicit:
513 explicit.remove(b)
513 explicit.remove(b)
514 if not ancestors or repo[scid].rev() in ancestors:
514 if not ancestors or repo[scid].rev() in ancestors:
515 pushop.outbookmarks.append((b, dcid, scid))
515 pushop.outbookmarks.append((b, dcid, scid))
516 # search added bookmark
516 # search added bookmark
517 for b, scid, dcid in addsrc:
517 for b, scid, dcid in addsrc:
518 if b in explicit:
518 if b in explicit:
519 explicit.remove(b)
519 explicit.remove(b)
520 pushop.outbookmarks.append((b, '', scid))
520 pushop.outbookmarks.append((b, '', scid))
521 # search for overwritten bookmark
521 # search for overwritten bookmark
522 for b, scid, dcid in advdst + diverge + differ:
522 for b, scid, dcid in advdst + diverge + differ:
523 if b in explicit:
523 if b in explicit:
524 explicit.remove(b)
524 explicit.remove(b)
525 pushop.outbookmarks.append((b, dcid, scid))
525 pushop.outbookmarks.append((b, dcid, scid))
526 # search for bookmark to delete
526 # search for bookmark to delete
527 for b, scid, dcid in adddst:
527 for b, scid, dcid in adddst:
528 if b in explicit:
528 if b in explicit:
529 explicit.remove(b)
529 explicit.remove(b)
530 # treat as "deleted locally"
530 # treat as "deleted locally"
531 pushop.outbookmarks.append((b, dcid, ''))
531 pushop.outbookmarks.append((b, dcid, ''))
532 # identical bookmarks shouldn't get reported
532 # identical bookmarks shouldn't get reported
533 for b, scid, dcid in same:
533 for b, scid, dcid in same:
534 if b in explicit:
534 if b in explicit:
535 explicit.remove(b)
535 explicit.remove(b)
536
536
537 if explicit:
537 if explicit:
538 explicit = sorted(explicit)
538 explicit = sorted(explicit)
539 # we should probably list all of them
539 # we should probably list all of them
540 ui.warn(_('bookmark %s does not exist on the local '
540 ui.warn(_('bookmark %s does not exist on the local '
541 'or remote repository!\n') % explicit[0])
541 'or remote repository!\n') % explicit[0])
542 pushop.bkresult = 2
542 pushop.bkresult = 2
543
543
544 pushop.outbookmarks.sort()
544 pushop.outbookmarks.sort()
545
545
546 def _pushcheckoutgoing(pushop):
546 def _pushcheckoutgoing(pushop):
547 outgoing = pushop.outgoing
547 outgoing = pushop.outgoing
548 unfi = pushop.repo.unfiltered()
548 unfi = pushop.repo.unfiltered()
549 if not outgoing.missing:
549 if not outgoing.missing:
550 # nothing to push
550 # nothing to push
551 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
551 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
552 return False
552 return False
553 # something to push
553 # something to push
554 if not pushop.force:
554 if not pushop.force:
555 # if repo.obsstore == False --> no obsolete
555 # if repo.obsstore == False --> no obsolete
556 # then, save the iteration
556 # then, save the iteration
557 if unfi.obsstore:
557 if unfi.obsstore:
558 # this message are here for 80 char limit reason
558 # this message are here for 80 char limit reason
559 mso = _("push includes obsolete changeset: %s!")
559 mso = _("push includes obsolete changeset: %s!")
560 mst = {"unstable": _("push includes unstable changeset: %s!"),
560 mst = {"unstable": _("push includes unstable changeset: %s!"),
561 "bumped": _("push includes bumped changeset: %s!"),
561 "bumped": _("push includes bumped changeset: %s!"),
562 "divergent": _("push includes divergent changeset: %s!")}
562 "divergent": _("push includes divergent changeset: %s!")}
563 # If we are to push if there is at least one
563 # If we are to push if there is at least one
564 # obsolete or unstable changeset in missing, at
564 # obsolete or unstable changeset in missing, at
565 # least one of the missinghead will be obsolete or
565 # least one of the missinghead will be obsolete or
566 # unstable. So checking heads only is ok
566 # unstable. So checking heads only is ok
567 for node in outgoing.missingheads:
567 for node in outgoing.missingheads:
568 ctx = unfi[node]
568 ctx = unfi[node]
569 if ctx.obsolete():
569 if ctx.obsolete():
570 raise error.Abort(mso % ctx)
570 raise error.Abort(mso % ctx)
571 elif ctx.troubled():
571 elif ctx.troubled():
572 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
572 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
573
573
574 # internal config: bookmarks.pushing
574 # internal config: bookmarks.pushing
575 newbm = pushop.ui.configlist('bookmarks', 'pushing')
575 newbm = pushop.ui.configlist('bookmarks', 'pushing')
576 discovery.checkheads(unfi, pushop.remote, outgoing,
576 discovery.checkheads(unfi, pushop.remote, outgoing,
577 pushop.remoteheads,
577 pushop.remoteheads,
578 pushop.newbranch,
578 pushop.newbranch,
579 bool(pushop.incoming),
579 bool(pushop.incoming),
580 newbm)
580 newbm)
581 return True
581 return True
582
582
583 # List of names of steps to perform for an outgoing bundle2, order matters.
583 # List of names of steps to perform for an outgoing bundle2, order matters.
584 b2partsgenorder = []
584 b2partsgenorder = []
585
585
586 # Mapping between step name and function
586 # Mapping between step name and function
587 #
587 #
588 # This exists to help extensions wrap steps if necessary
588 # This exists to help extensions wrap steps if necessary
589 b2partsgenmapping = {}
589 b2partsgenmapping = {}
590
590
591 def b2partsgenerator(stepname, idx=None):
591 def b2partsgenerator(stepname, idx=None):
592 """decorator for function generating bundle2 part
592 """decorator for function generating bundle2 part
593
593
594 The function is added to the step -> function mapping and appended to the
594 The function is added to the step -> function mapping and appended to the
595 list of steps. Beware that decorated functions will be added in order
595 list of steps. Beware that decorated functions will be added in order
596 (this may matter).
596 (this may matter).
597
597
598 You can only use this decorator for new steps, if you want to wrap a step
598 You can only use this decorator for new steps, if you want to wrap a step
599 from an extension, attack the b2partsgenmapping dictionary directly."""
599 from an extension, attack the b2partsgenmapping dictionary directly."""
600 def dec(func):
600 def dec(func):
601 assert stepname not in b2partsgenmapping
601 assert stepname not in b2partsgenmapping
602 b2partsgenmapping[stepname] = func
602 b2partsgenmapping[stepname] = func
603 if idx is None:
603 if idx is None:
604 b2partsgenorder.append(stepname)
604 b2partsgenorder.append(stepname)
605 else:
605 else:
606 b2partsgenorder.insert(idx, stepname)
606 b2partsgenorder.insert(idx, stepname)
607 return func
607 return func
608 return dec
608 return dec
609
609
610 def _pushb2ctxcheckheads(pushop, bundler):
610 def _pushb2ctxcheckheads(pushop, bundler):
611 """Generate race condition checking parts
611 """Generate race condition checking parts
612
612
613 Exists as an indepedent function to aid extensions
613 Exists as an indepedent function to aid extensions
614 """
614 """
615 if not pushop.force:
615 if not pushop.force:
616 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
616 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
617
617
618 @b2partsgenerator('changeset')
618 @b2partsgenerator('changeset')
619 def _pushb2ctx(pushop, bundler):
619 def _pushb2ctx(pushop, bundler):
620 """handle changegroup push through bundle2
620 """handle changegroup push through bundle2
621
621
622 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
622 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
623 """
623 """
624 if 'changesets' in pushop.stepsdone:
624 if 'changesets' in pushop.stepsdone:
625 return
625 return
626 pushop.stepsdone.add('changesets')
626 pushop.stepsdone.add('changesets')
627 # Send known heads to the server for race detection.
627 # Send known heads to the server for race detection.
628 if not _pushcheckoutgoing(pushop):
628 if not _pushcheckoutgoing(pushop):
629 return
629 return
630 pushop.repo.prepushoutgoinghooks(pushop.repo,
630 pushop.repo.prepushoutgoinghooks(pushop.repo,
631 pushop.remote,
631 pushop.remote,
632 pushop.outgoing)
632 pushop.outgoing)
633
633
634 _pushb2ctxcheckheads(pushop, bundler)
634 _pushb2ctxcheckheads(pushop, bundler)
635
635
636 b2caps = bundle2.bundle2caps(pushop.remote)
636 b2caps = bundle2.bundle2caps(pushop.remote)
637 version = None
637 version = None
638 cgversions = b2caps.get('changegroup')
638 cgversions = b2caps.get('changegroup')
639 if not cgversions: # 3.1 and 3.2 ship with an empty value
639 if not cgversions: # 3.1 and 3.2 ship with an empty value
640 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
640 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
641 pushop.outgoing)
641 pushop.outgoing)
642 else:
642 else:
643 cgversions = [v for v in cgversions if v in changegroup.packermap]
643 cgversions = [v for v in cgversions if v in changegroup.packermap]
644 if not cgversions:
644 if not cgversions:
645 raise ValueError(_('no common changegroup version'))
645 raise ValueError(_('no common changegroup version'))
646 version = max(cgversions)
646 version = max(cgversions)
647 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
647 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
648 pushop.outgoing,
648 pushop.outgoing,
649 version=version)
649 version=version)
650 cgpart = bundler.newpart('changegroup', data=cg)
650 cgpart = bundler.newpart('changegroup', data=cg)
651 if version is not None:
651 if version is not None:
652 cgpart.addparam('version', version)
652 cgpart.addparam('version', version)
653 def handlereply(op):
653 def handlereply(op):
654 """extract addchangegroup returns from server reply"""
654 """extract addchangegroup returns from server reply"""
655 cgreplies = op.records.getreplies(cgpart.id)
655 cgreplies = op.records.getreplies(cgpart.id)
656 assert len(cgreplies['changegroup']) == 1
656 assert len(cgreplies['changegroup']) == 1
657 pushop.cgresult = cgreplies['changegroup'][0]['return']
657 pushop.cgresult = cgreplies['changegroup'][0]['return']
658 return handlereply
658 return handlereply
659
659
660 @b2partsgenerator('phase')
660 @b2partsgenerator('phase')
661 def _pushb2phases(pushop, bundler):
661 def _pushb2phases(pushop, bundler):
662 """handle phase push through bundle2"""
662 """handle phase push through bundle2"""
663 if 'phases' in pushop.stepsdone:
663 if 'phases' in pushop.stepsdone:
664 return
664 return
665 b2caps = bundle2.bundle2caps(pushop.remote)
665 b2caps = bundle2.bundle2caps(pushop.remote)
666 if not 'pushkey' in b2caps:
666 if not 'pushkey' in b2caps:
667 return
667 return
668 pushop.stepsdone.add('phases')
668 pushop.stepsdone.add('phases')
669 part2node = []
669 part2node = []
670
670
671 def handlefailure(pushop, exc):
671 def handlefailure(pushop, exc):
672 targetid = int(exc.partid)
672 targetid = int(exc.partid)
673 for partid, node in part2node:
673 for partid, node in part2node:
674 if partid == targetid:
674 if partid == targetid:
675 raise error.Abort(_('updating %s to public failed') % node)
675 raise error.Abort(_('updating %s to public failed') % node)
676
676
677 enc = pushkey.encode
677 enc = pushkey.encode
678 for newremotehead in pushop.outdatedphases:
678 for newremotehead in pushop.outdatedphases:
679 part = bundler.newpart('pushkey')
679 part = bundler.newpart('pushkey')
680 part.addparam('namespace', enc('phases'))
680 part.addparam('namespace', enc('phases'))
681 part.addparam('key', enc(newremotehead.hex()))
681 part.addparam('key', enc(newremotehead.hex()))
682 part.addparam('old', enc(str(phases.draft)))
682 part.addparam('old', enc(str(phases.draft)))
683 part.addparam('new', enc(str(phases.public)))
683 part.addparam('new', enc(str(phases.public)))
684 part2node.append((part.id, newremotehead))
684 part2node.append((part.id, newremotehead))
685 pushop.pkfailcb[part.id] = handlefailure
685 pushop.pkfailcb[part.id] = handlefailure
686
686
687 def handlereply(op):
687 def handlereply(op):
688 for partid, node in part2node:
688 for partid, node in part2node:
689 partrep = op.records.getreplies(partid)
689 partrep = op.records.getreplies(partid)
690 results = partrep['pushkey']
690 results = partrep['pushkey']
691 assert len(results) <= 1
691 assert len(results) <= 1
692 msg = None
692 msg = None
693 if not results:
693 if not results:
694 msg = _('server ignored update of %s to public!\n') % node
694 msg = _('server ignored update of %s to public!\n') % node
695 elif not int(results[0]['return']):
695 elif not int(results[0]['return']):
696 msg = _('updating %s to public failed!\n') % node
696 msg = _('updating %s to public failed!\n') % node
697 if msg is not None:
697 if msg is not None:
698 pushop.ui.warn(msg)
698 pushop.ui.warn(msg)
699 return handlereply
699 return handlereply
700
700
701 @b2partsgenerator('obsmarkers')
701 @b2partsgenerator('obsmarkers')
702 def _pushb2obsmarkers(pushop, bundler):
702 def _pushb2obsmarkers(pushop, bundler):
703 if 'obsmarkers' in pushop.stepsdone:
703 if 'obsmarkers' in pushop.stepsdone:
704 return
704 return
705 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
705 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
706 if obsolete.commonversion(remoteversions) is None:
706 if obsolete.commonversion(remoteversions) is None:
707 return
707 return
708 pushop.stepsdone.add('obsmarkers')
708 pushop.stepsdone.add('obsmarkers')
709 if pushop.outobsmarkers:
709 if pushop.outobsmarkers:
710 markers = sorted(pushop.outobsmarkers)
710 markers = sorted(pushop.outobsmarkers)
711 buildobsmarkerspart(bundler, markers)
711 buildobsmarkerspart(bundler, markers)
712
712
713 @b2partsgenerator('bookmarks')
713 @b2partsgenerator('bookmarks')
714 def _pushb2bookmarks(pushop, bundler):
714 def _pushb2bookmarks(pushop, bundler):
715 """handle bookmark push through bundle2"""
715 """handle bookmark push through bundle2"""
716 if 'bookmarks' in pushop.stepsdone:
716 if 'bookmarks' in pushop.stepsdone:
717 return
717 return
718 b2caps = bundle2.bundle2caps(pushop.remote)
718 b2caps = bundle2.bundle2caps(pushop.remote)
719 if 'pushkey' not in b2caps:
719 if 'pushkey' not in b2caps:
720 return
720 return
721 pushop.stepsdone.add('bookmarks')
721 pushop.stepsdone.add('bookmarks')
722 part2book = []
722 part2book = []
723 enc = pushkey.encode
723 enc = pushkey.encode
724
724
725 def handlefailure(pushop, exc):
725 def handlefailure(pushop, exc):
726 targetid = int(exc.partid)
726 targetid = int(exc.partid)
727 for partid, book, action in part2book:
727 for partid, book, action in part2book:
728 if partid == targetid:
728 if partid == targetid:
729 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
729 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
730 # we should not be called for part we did not generated
730 # we should not be called for part we did not generated
731 assert False
731 assert False
732
732
733 for book, old, new in pushop.outbookmarks:
733 for book, old, new in pushop.outbookmarks:
734 part = bundler.newpart('pushkey')
734 part = bundler.newpart('pushkey')
735 part.addparam('namespace', enc('bookmarks'))
735 part.addparam('namespace', enc('bookmarks'))
736 part.addparam('key', enc(book))
736 part.addparam('key', enc(book))
737 part.addparam('old', enc(old))
737 part.addparam('old', enc(old))
738 part.addparam('new', enc(new))
738 part.addparam('new', enc(new))
739 action = 'update'
739 action = 'update'
740 if not old:
740 if not old:
741 action = 'export'
741 action = 'export'
742 elif not new:
742 elif not new:
743 action = 'delete'
743 action = 'delete'
744 part2book.append((part.id, book, action))
744 part2book.append((part.id, book, action))
745 pushop.pkfailcb[part.id] = handlefailure
745 pushop.pkfailcb[part.id] = handlefailure
746
746
747 def handlereply(op):
747 def handlereply(op):
748 ui = pushop.ui
748 ui = pushop.ui
749 for partid, book, action in part2book:
749 for partid, book, action in part2book:
750 partrep = op.records.getreplies(partid)
750 partrep = op.records.getreplies(partid)
751 results = partrep['pushkey']
751 results = partrep['pushkey']
752 assert len(results) <= 1
752 assert len(results) <= 1
753 if not results:
753 if not results:
754 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
754 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
755 else:
755 else:
756 ret = int(results[0]['return'])
756 ret = int(results[0]['return'])
757 if ret:
757 if ret:
758 ui.status(bookmsgmap[action][0] % book)
758 ui.status(bookmsgmap[action][0] % book)
759 else:
759 else:
760 ui.warn(bookmsgmap[action][1] % book)
760 ui.warn(bookmsgmap[action][1] % book)
761 if pushop.bkresult is not None:
761 if pushop.bkresult is not None:
762 pushop.bkresult = 1
762 pushop.bkresult = 1
763 return handlereply
763 return handlereply
764
764
765
765
766 def _pushbundle2(pushop):
766 def _pushbundle2(pushop):
767 """push data to the remote using bundle2
767 """push data to the remote using bundle2
768
768
769 The only currently supported type of data is changegroup but this will
769 The only currently supported type of data is changegroup but this will
770 evolve in the future."""
770 evolve in the future."""
771 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
771 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
772 pushback = (pushop.trmanager
772 pushback = (pushop.trmanager
773 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
773 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
774
774
775 # create reply capability
775 # create reply capability
776 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
776 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
777 allowpushback=pushback))
777 allowpushback=pushback))
778 bundler.newpart('replycaps', data=capsblob)
778 bundler.newpart('replycaps', data=capsblob)
779 replyhandlers = []
779 replyhandlers = []
780 for partgenname in b2partsgenorder:
780 for partgenname in b2partsgenorder:
781 partgen = b2partsgenmapping[partgenname]
781 partgen = b2partsgenmapping[partgenname]
782 ret = partgen(pushop, bundler)
782 ret = partgen(pushop, bundler)
783 if callable(ret):
783 if callable(ret):
784 replyhandlers.append(ret)
784 replyhandlers.append(ret)
785 # do not push if nothing to push
785 # do not push if nothing to push
786 if bundler.nbparts <= 1:
786 if bundler.nbparts <= 1:
787 return
787 return
788 stream = util.chunkbuffer(bundler.getchunks())
788 stream = util.chunkbuffer(bundler.getchunks())
789 try:
789 try:
790 try:
790 try:
791 reply = pushop.remote.unbundle(stream, ['force'], 'push')
791 reply = pushop.remote.unbundle(stream, ['force'], 'push')
792 except error.BundleValueError as exc:
792 except error.BundleValueError as exc:
793 raise error.Abort('missing support for %s' % exc)
793 raise error.Abort('missing support for %s' % exc)
794 try:
794 try:
795 trgetter = None
795 trgetter = None
796 if pushback:
796 if pushback:
797 trgetter = pushop.trmanager.transaction
797 trgetter = pushop.trmanager.transaction
798 op = bundle2.processbundle(pushop.repo, reply, trgetter)
798 op = bundle2.processbundle(pushop.repo, reply, trgetter)
799 except error.BundleValueError as exc:
799 except error.BundleValueError as exc:
800 raise error.Abort('missing support for %s' % exc)
800 raise error.Abort('missing support for %s' % exc)
801 except error.PushkeyFailed as exc:
801 except error.PushkeyFailed as exc:
802 partid = int(exc.partid)
802 partid = int(exc.partid)
803 if partid not in pushop.pkfailcb:
803 if partid not in pushop.pkfailcb:
804 raise
804 raise
805 pushop.pkfailcb[partid](pushop, exc)
805 pushop.pkfailcb[partid](pushop, exc)
806 for rephand in replyhandlers:
806 for rephand in replyhandlers:
807 rephand(op)
807 rephand(op)
808
808
809 def _pushchangeset(pushop):
809 def _pushchangeset(pushop):
810 """Make the actual push of changeset bundle to remote repo"""
810 """Make the actual push of changeset bundle to remote repo"""
811 if 'changesets' in pushop.stepsdone:
811 if 'changesets' in pushop.stepsdone:
812 return
812 return
813 pushop.stepsdone.add('changesets')
813 pushop.stepsdone.add('changesets')
814 if not _pushcheckoutgoing(pushop):
814 if not _pushcheckoutgoing(pushop):
815 return
815 return
816 pushop.repo.prepushoutgoinghooks(pushop.repo,
816 pushop.repo.prepushoutgoinghooks(pushop.repo,
817 pushop.remote,
817 pushop.remote,
818 pushop.outgoing)
818 pushop.outgoing)
819 outgoing = pushop.outgoing
819 outgoing = pushop.outgoing
820 unbundle = pushop.remote.capable('unbundle')
820 unbundle = pushop.remote.capable('unbundle')
821 # TODO: get bundlecaps from remote
821 # TODO: get bundlecaps from remote
822 bundlecaps = None
822 bundlecaps = None
823 # create a changegroup from local
823 # create a changegroup from local
824 if pushop.revs is None and not (outgoing.excluded
824 if pushop.revs is None and not (outgoing.excluded
825 or pushop.repo.changelog.filteredrevs):
825 or pushop.repo.changelog.filteredrevs):
826 # push everything,
826 # push everything,
827 # use the fast path, no race possible on push
827 # use the fast path, no race possible on push
828 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
828 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
829 cg = changegroup.getsubset(pushop.repo,
829 cg = changegroup.getsubset(pushop.repo,
830 outgoing,
830 outgoing,
831 bundler,
831 bundler,
832 'push',
832 'push',
833 fastpath=True)
833 fastpath=True)
834 else:
834 else:
835 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
835 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
836 bundlecaps)
836 bundlecaps)
837
837
838 # apply changegroup to remote
838 # apply changegroup to remote
839 if unbundle:
839 if unbundle:
840 # local repo finds heads on server, finds out what
840 # local repo finds heads on server, finds out what
841 # revs it must push. once revs transferred, if server
841 # revs it must push. once revs transferred, if server
842 # finds it has different heads (someone else won
842 # finds it has different heads (someone else won
843 # commit/push race), server aborts.
843 # commit/push race), server aborts.
844 if pushop.force:
844 if pushop.force:
845 remoteheads = ['force']
845 remoteheads = ['force']
846 else:
846 else:
847 remoteheads = pushop.remoteheads
847 remoteheads = pushop.remoteheads
848 # ssh: return remote's addchangegroup()
848 # ssh: return remote's addchangegroup()
849 # http: return remote's addchangegroup() or 0 for error
849 # http: return remote's addchangegroup() or 0 for error
850 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
850 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
851 pushop.repo.url())
851 pushop.repo.url())
852 else:
852 else:
853 # we return an integer indicating remote head count
853 # we return an integer indicating remote head count
854 # change
854 # change
855 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
855 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
856 pushop.repo.url())
856 pushop.repo.url())
857
857
858 def _pushsyncphase(pushop):
858 def _pushsyncphase(pushop):
859 """synchronise phase information locally and remotely"""
859 """synchronise phase information locally and remotely"""
860 cheads = pushop.commonheads
860 cheads = pushop.commonheads
861 # even when we don't push, exchanging phase data is useful
861 # even when we don't push, exchanging phase data is useful
862 remotephases = pushop.remote.listkeys('phases')
862 remotephases = pushop.remote.listkeys('phases')
863 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
863 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
864 and remotephases # server supports phases
864 and remotephases # server supports phases
865 and pushop.cgresult is None # nothing was pushed
865 and pushop.cgresult is None # nothing was pushed
866 and remotephases.get('publishing', False)):
866 and remotephases.get('publishing', False)):
867 # When:
867 # When:
868 # - this is a subrepo push
868 # - this is a subrepo push
869 # - and remote support phase
869 # - and remote support phase
870 # - and no changeset was pushed
870 # - and no changeset was pushed
871 # - and remote is publishing
871 # - and remote is publishing
872 # We may be in issue 3871 case!
872 # We may be in issue 3871 case!
873 # We drop the possible phase synchronisation done by
873 # We drop the possible phase synchronisation done by
874 # courtesy to publish changesets possibly locally draft
874 # courtesy to publish changesets possibly locally draft
875 # on the remote.
875 # on the remote.
876 remotephases = {'publishing': 'True'}
876 remotephases = {'publishing': 'True'}
877 if not remotephases: # old server or public only reply from non-publishing
877 if not remotephases: # old server or public only reply from non-publishing
878 _localphasemove(pushop, cheads)
878 _localphasemove(pushop, cheads)
879 # don't push any phase data as there is nothing to push
879 # don't push any phase data as there is nothing to push
880 else:
880 else:
881 ana = phases.analyzeremotephases(pushop.repo, cheads,
881 ana = phases.analyzeremotephases(pushop.repo, cheads,
882 remotephases)
882 remotephases)
883 pheads, droots = ana
883 pheads, droots = ana
884 ### Apply remote phase on local
884 ### Apply remote phase on local
885 if remotephases.get('publishing', False):
885 if remotephases.get('publishing', False):
886 _localphasemove(pushop, cheads)
886 _localphasemove(pushop, cheads)
887 else: # publish = False
887 else: # publish = False
888 _localphasemove(pushop, pheads)
888 _localphasemove(pushop, pheads)
889 _localphasemove(pushop, cheads, phases.draft)
889 _localphasemove(pushop, cheads, phases.draft)
890 ### Apply local phase on remote
890 ### Apply local phase on remote
891
891
892 if pushop.cgresult:
892 if pushop.cgresult:
893 if 'phases' in pushop.stepsdone:
893 if 'phases' in pushop.stepsdone:
894 # phases already pushed though bundle2
894 # phases already pushed though bundle2
895 return
895 return
896 outdated = pushop.outdatedphases
896 outdated = pushop.outdatedphases
897 else:
897 else:
898 outdated = pushop.fallbackoutdatedphases
898 outdated = pushop.fallbackoutdatedphases
899
899
900 pushop.stepsdone.add('phases')
900 pushop.stepsdone.add('phases')
901
901
902 # filter heads already turned public by the push
902 # filter heads already turned public by the push
903 outdated = [c for c in outdated if c.node() not in pheads]
903 outdated = [c for c in outdated if c.node() not in pheads]
904 # fallback to independent pushkey command
904 # fallback to independent pushkey command
905 for newremotehead in outdated:
905 for newremotehead in outdated:
906 r = pushop.remote.pushkey('phases',
906 r = pushop.remote.pushkey('phases',
907 newremotehead.hex(),
907 newremotehead.hex(),
908 str(phases.draft),
908 str(phases.draft),
909 str(phases.public))
909 str(phases.public))
910 if not r:
910 if not r:
911 pushop.ui.warn(_('updating %s to public failed!\n')
911 pushop.ui.warn(_('updating %s to public failed!\n')
912 % newremotehead)
912 % newremotehead)
913
913
914 def _localphasemove(pushop, nodes, phase=phases.public):
914 def _localphasemove(pushop, nodes, phase=phases.public):
915 """move <nodes> to <phase> in the local source repo"""
915 """move <nodes> to <phase> in the local source repo"""
916 if pushop.trmanager:
916 if pushop.trmanager:
917 phases.advanceboundary(pushop.repo,
917 phases.advanceboundary(pushop.repo,
918 pushop.trmanager.transaction(),
918 pushop.trmanager.transaction(),
919 phase,
919 phase,
920 nodes)
920 nodes)
921 else:
921 else:
922 # repo is not locked, do not change any phases!
922 # repo is not locked, do not change any phases!
923 # Informs the user that phases should have been moved when
923 # Informs the user that phases should have been moved when
924 # applicable.
924 # applicable.
925 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
925 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
926 phasestr = phases.phasenames[phase]
926 phasestr = phases.phasenames[phase]
927 if actualmoves:
927 if actualmoves:
928 pushop.ui.status(_('cannot lock source repo, skipping '
928 pushop.ui.status(_('cannot lock source repo, skipping '
929 'local %s phase update\n') % phasestr)
929 'local %s phase update\n') % phasestr)
930
930
931 def _pushobsolete(pushop):
931 def _pushobsolete(pushop):
932 """utility function to push obsolete markers to a remote"""
932 """utility function to push obsolete markers to a remote"""
933 if 'obsmarkers' in pushop.stepsdone:
933 if 'obsmarkers' in pushop.stepsdone:
934 return
934 return
935 repo = pushop.repo
935 repo = pushop.repo
936 remote = pushop.remote
936 remote = pushop.remote
937 pushop.stepsdone.add('obsmarkers')
937 pushop.stepsdone.add('obsmarkers')
938 if pushop.outobsmarkers:
938 if pushop.outobsmarkers:
939 pushop.ui.debug('try to push obsolete markers to remote\n')
939 pushop.ui.debug('try to push obsolete markers to remote\n')
940 rslts = []
940 rslts = []
941 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
941 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
942 for key in sorted(remotedata, reverse=True):
942 for key in sorted(remotedata, reverse=True):
943 # reverse sort to ensure we end with dump0
943 # reverse sort to ensure we end with dump0
944 data = remotedata[key]
944 data = remotedata[key]
945 rslts.append(remote.pushkey('obsolete', key, '', data))
945 rslts.append(remote.pushkey('obsolete', key, '', data))
946 if [r for r in rslts if not r]:
946 if [r for r in rslts if not r]:
947 msg = _('failed to push some obsolete markers!\n')
947 msg = _('failed to push some obsolete markers!\n')
948 repo.ui.warn(msg)
948 repo.ui.warn(msg)
949
949
950 def _pushbookmark(pushop):
950 def _pushbookmark(pushop):
951 """Update bookmark position on remote"""
951 """Update bookmark position on remote"""
952 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
952 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
953 return
953 return
954 pushop.stepsdone.add('bookmarks')
954 pushop.stepsdone.add('bookmarks')
955 ui = pushop.ui
955 ui = pushop.ui
956 remote = pushop.remote
956 remote = pushop.remote
957
957
958 for b, old, new in pushop.outbookmarks:
958 for b, old, new in pushop.outbookmarks:
959 action = 'update'
959 action = 'update'
960 if not old:
960 if not old:
961 action = 'export'
961 action = 'export'
962 elif not new:
962 elif not new:
963 action = 'delete'
963 action = 'delete'
964 if remote.pushkey('bookmarks', b, old, new):
964 if remote.pushkey('bookmarks', b, old, new):
965 ui.status(bookmsgmap[action][0] % b)
965 ui.status(bookmsgmap[action][0] % b)
966 else:
966 else:
967 ui.warn(bookmsgmap[action][1] % b)
967 ui.warn(bookmsgmap[action][1] % b)
968 # discovery can have set the value form invalid entry
968 # discovery can have set the value form invalid entry
969 if pushop.bkresult is not None:
969 if pushop.bkresult is not None:
970 pushop.bkresult = 1
970 pushop.bkresult = 1
971
971
972 class pulloperation(object):
972 class pulloperation(object):
973 """A object that represent a single pull operation
973 """A object that represent a single pull operation
974
974
975 It purpose is to carry pull related state and very common operation.
975 It purpose is to carry pull related state and very common operation.
976
976
977 A new should be created at the beginning of each pull and discarded
977 A new should be created at the beginning of each pull and discarded
978 afterward.
978 afterward.
979 """
979 """
980
980
981 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
981 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
982 remotebookmarks=None, streamclonerequested=None):
982 remotebookmarks=None, streamclonerequested=None):
983 # repo we pull into
983 # repo we pull into
984 self.repo = repo
984 self.repo = repo
985 # repo we pull from
985 # repo we pull from
986 self.remote = remote
986 self.remote = remote
987 # revision we try to pull (None is "all")
987 # revision we try to pull (None is "all")
988 self.heads = heads
988 self.heads = heads
989 # bookmark pulled explicitly
989 # bookmark pulled explicitly
990 self.explicitbookmarks = bookmarks
990 self.explicitbookmarks = bookmarks
991 # do we force pull?
991 # do we force pull?
992 self.force = force
992 self.force = force
993 # whether a streaming clone was requested
993 # whether a streaming clone was requested
994 self.streamclonerequested = streamclonerequested
994 self.streamclonerequested = streamclonerequested
995 # transaction manager
995 # transaction manager
996 self.trmanager = None
996 self.trmanager = None
997 # set of common changeset between local and remote before pull
997 # set of common changeset between local and remote before pull
998 self.common = None
998 self.common = None
999 # set of pulled head
999 # set of pulled head
1000 self.rheads = None
1000 self.rheads = None
1001 # list of missing changeset to fetch remotely
1001 # list of missing changeset to fetch remotely
1002 self.fetch = None
1002 self.fetch = None
1003 # remote bookmarks data
1003 # remote bookmarks data
1004 self.remotebookmarks = remotebookmarks
1004 self.remotebookmarks = remotebookmarks
1005 # result of changegroup pulling (used as return code by pull)
1005 # result of changegroup pulling (used as return code by pull)
1006 self.cgresult = None
1006 self.cgresult = None
1007 # list of step already done
1007 # list of step already done
1008 self.stepsdone = set()
1008 self.stepsdone = set()
1009 # Whether we attempted a clone from pre-generated bundles.
1009 # Whether we attempted a clone from pre-generated bundles.
1010 self.clonebundleattempted = False
1010 self.clonebundleattempted = False
1011
1011
1012 @util.propertycache
1012 @util.propertycache
1013 def pulledsubset(self):
1013 def pulledsubset(self):
1014 """heads of the set of changeset target by the pull"""
1014 """heads of the set of changeset target by the pull"""
1015 # compute target subset
1015 # compute target subset
1016 if self.heads is None:
1016 if self.heads is None:
1017 # We pulled every thing possible
1017 # We pulled every thing possible
1018 # sync on everything common
1018 # sync on everything common
1019 c = set(self.common)
1019 c = set(self.common)
1020 ret = list(self.common)
1020 ret = list(self.common)
1021 for n in self.rheads:
1021 for n in self.rheads:
1022 if n not in c:
1022 if n not in c:
1023 ret.append(n)
1023 ret.append(n)
1024 return ret
1024 return ret
1025 else:
1025 else:
1026 # We pulled a specific subset
1026 # We pulled a specific subset
1027 # sync on this subset
1027 # sync on this subset
1028 return self.heads
1028 return self.heads
1029
1029
1030 @util.propertycache
1030 @util.propertycache
1031 def canusebundle2(self):
1031 def canusebundle2(self):
1032 return _canusebundle2(self)
1032 return _canusebundle2(self)
1033
1033
1034 @util.propertycache
1034 @util.propertycache
1035 def remotebundle2caps(self):
1035 def remotebundle2caps(self):
1036 return bundle2.bundle2caps(self.remote)
1036 return bundle2.bundle2caps(self.remote)
1037
1037
1038 def gettransaction(self):
1038 def gettransaction(self):
1039 # deprecated; talk to trmanager directly
1039 # deprecated; talk to trmanager directly
1040 return self.trmanager.transaction()
1040 return self.trmanager.transaction()
1041
1041
1042 class transactionmanager(object):
1042 class transactionmanager(object):
1043 """An object to manage the life cycle of a transaction
1043 """An object to manage the life cycle of a transaction
1044
1044
1045 It creates the transaction on demand and calls the appropriate hooks when
1045 It creates the transaction on demand and calls the appropriate hooks when
1046 closing the transaction."""
1046 closing the transaction."""
1047 def __init__(self, repo, source, url):
1047 def __init__(self, repo, source, url):
1048 self.repo = repo
1048 self.repo = repo
1049 self.source = source
1049 self.source = source
1050 self.url = url
1050 self.url = url
1051 self._tr = None
1051 self._tr = None
1052
1052
1053 def transaction(self):
1053 def transaction(self):
1054 """Return an open transaction object, constructing if necessary"""
1054 """Return an open transaction object, constructing if necessary"""
1055 if not self._tr:
1055 if not self._tr:
1056 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1056 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1057 self._tr = self.repo.transaction(trname)
1057 self._tr = self.repo.transaction(trname)
1058 self._tr.hookargs['source'] = self.source
1058 self._tr.hookargs['source'] = self.source
1059 self._tr.hookargs['url'] = self.url
1059 self._tr.hookargs['url'] = self.url
1060 return self._tr
1060 return self._tr
1061
1061
1062 def close(self):
1062 def close(self):
1063 """close transaction if created"""
1063 """close transaction if created"""
1064 if self._tr is not None:
1064 if self._tr is not None:
1065 self._tr.close()
1065 self._tr.close()
1066
1066
1067 def release(self):
1067 def release(self):
1068 """release transaction if created"""
1068 """release transaction if created"""
1069 if self._tr is not None:
1069 if self._tr is not None:
1070 self._tr.release()
1070 self._tr.release()
1071
1071
1072 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1072 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1073 streamclonerequested=None):
1073 streamclonerequested=None):
1074 """Fetch repository data from a remote.
1074 """Fetch repository data from a remote.
1075
1075
1076 This is the main function used to retrieve data from a remote repository.
1076 This is the main function used to retrieve data from a remote repository.
1077
1077
1078 ``repo`` is the local repository to clone into.
1078 ``repo`` is the local repository to clone into.
1079 ``remote`` is a peer instance.
1079 ``remote`` is a peer instance.
1080 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1080 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1081 default) means to pull everything from the remote.
1081 default) means to pull everything from the remote.
1082 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1082 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1083 default, all remote bookmarks are pulled.
1083 default, all remote bookmarks are pulled.
1084 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1084 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1085 initialization.
1085 initialization.
1086 ``streamclonerequested`` is a boolean indicating whether a "streaming
1086 ``streamclonerequested`` is a boolean indicating whether a "streaming
1087 clone" is requested. A "streaming clone" is essentially a raw file copy
1087 clone" is requested. A "streaming clone" is essentially a raw file copy
1088 of revlogs from the server. This only works when the local repository is
1088 of revlogs from the server. This only works when the local repository is
1089 empty. The default value of ``None`` means to respect the server
1089 empty. The default value of ``None`` means to respect the server
1090 configuration for preferring stream clones.
1090 configuration for preferring stream clones.
1091
1091
1092 Returns the ``pulloperation`` created for this pull.
1092 Returns the ``pulloperation`` created for this pull.
1093 """
1093 """
1094 if opargs is None:
1094 if opargs is None:
1095 opargs = {}
1095 opargs = {}
1096 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1096 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1097 streamclonerequested=streamclonerequested, **opargs)
1097 streamclonerequested=streamclonerequested, **opargs)
1098 if pullop.remote.local():
1098 if pullop.remote.local():
1099 missing = set(pullop.remote.requirements) - pullop.repo.supported
1099 missing = set(pullop.remote.requirements) - pullop.repo.supported
1100 if missing:
1100 if missing:
1101 msg = _("required features are not"
1101 msg = _("required features are not"
1102 " supported in the destination:"
1102 " supported in the destination:"
1103 " %s") % (', '.join(sorted(missing)))
1103 " %s") % (', '.join(sorted(missing)))
1104 raise error.Abort(msg)
1104 raise error.Abort(msg)
1105
1105
1106 lock = pullop.repo.lock()
1106 lock = pullop.repo.lock()
1107 try:
1107 try:
1108 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1108 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1109 streamclone.maybeperformlegacystreamclone(pullop)
1109 streamclone.maybeperformlegacystreamclone(pullop)
1110 # This should ideally be in _pullbundle2(). However, it needs to run
1110 # This should ideally be in _pullbundle2(). However, it needs to run
1111 # before discovery to avoid extra work.
1111 # before discovery to avoid extra work.
1112 _maybeapplyclonebundle(pullop)
1112 _maybeapplyclonebundle(pullop)
1113 _pulldiscovery(pullop)
1113 _pulldiscovery(pullop)
1114 if pullop.canusebundle2:
1114 if pullop.canusebundle2:
1115 _pullbundle2(pullop)
1115 _pullbundle2(pullop)
1116 _pullchangeset(pullop)
1116 _pullchangeset(pullop)
1117 _pullphase(pullop)
1117 _pullphase(pullop)
1118 _pullbookmarks(pullop)
1118 _pullbookmarks(pullop)
1119 _pullobsolete(pullop)
1119 _pullobsolete(pullop)
1120 pullop.trmanager.close()
1120 pullop.trmanager.close()
1121 finally:
1121 finally:
1122 pullop.trmanager.release()
1122 pullop.trmanager.release()
1123 lock.release()
1123 lock.release()
1124
1124
1125 return pullop
1125 return pullop
1126
1126
1127 # list of steps to perform discovery before pull
1127 # list of steps to perform discovery before pull
1128 pulldiscoveryorder = []
1128 pulldiscoveryorder = []
1129
1129
1130 # Mapping between step name and function
1130 # Mapping between step name and function
1131 #
1131 #
1132 # This exists to help extensions wrap steps if necessary
1132 # This exists to help extensions wrap steps if necessary
1133 pulldiscoverymapping = {}
1133 pulldiscoverymapping = {}
1134
1134
1135 def pulldiscovery(stepname):
1135 def pulldiscovery(stepname):
1136 """decorator for function performing discovery before pull
1136 """decorator for function performing discovery before pull
1137
1137
1138 The function is added to the step -> function mapping and appended to the
1138 The function is added to the step -> function mapping and appended to the
1139 list of steps. Beware that decorated function will be added in order (this
1139 list of steps. Beware that decorated function will be added in order (this
1140 may matter).
1140 may matter).
1141
1141
1142 You can only use this decorator for a new step, if you want to wrap a step
1142 You can only use this decorator for a new step, if you want to wrap a step
1143 from an extension, change the pulldiscovery dictionary directly."""
1143 from an extension, change the pulldiscovery dictionary directly."""
1144 def dec(func):
1144 def dec(func):
1145 assert stepname not in pulldiscoverymapping
1145 assert stepname not in pulldiscoverymapping
1146 pulldiscoverymapping[stepname] = func
1146 pulldiscoverymapping[stepname] = func
1147 pulldiscoveryorder.append(stepname)
1147 pulldiscoveryorder.append(stepname)
1148 return func
1148 return func
1149 return dec
1149 return dec
1150
1150
1151 def _pulldiscovery(pullop):
1151 def _pulldiscovery(pullop):
1152 """Run all discovery steps"""
1152 """Run all discovery steps"""
1153 for stepname in pulldiscoveryorder:
1153 for stepname in pulldiscoveryorder:
1154 step = pulldiscoverymapping[stepname]
1154 step = pulldiscoverymapping[stepname]
1155 step(pullop)
1155 step(pullop)
1156
1156
1157 @pulldiscovery('b1:bookmarks')
1157 @pulldiscovery('b1:bookmarks')
1158 def _pullbookmarkbundle1(pullop):
1158 def _pullbookmarkbundle1(pullop):
1159 """fetch bookmark data in bundle1 case
1159 """fetch bookmark data in bundle1 case
1160
1160
1161 If not using bundle2, we have to fetch bookmarks before changeset
1161 If not using bundle2, we have to fetch bookmarks before changeset
1162 discovery to reduce the chance and impact of race conditions."""
1162 discovery to reduce the chance and impact of race conditions."""
1163 if pullop.remotebookmarks is not None:
1163 if pullop.remotebookmarks is not None:
1164 return
1164 return
1165 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1165 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1166 # all known bundle2 servers now support listkeys, but lets be nice with
1166 # all known bundle2 servers now support listkeys, but lets be nice with
1167 # new implementation.
1167 # new implementation.
1168 return
1168 return
1169 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1169 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1170
1170
1171
1171
1172 @pulldiscovery('changegroup')
1172 @pulldiscovery('changegroup')
1173 def _pulldiscoverychangegroup(pullop):
1173 def _pulldiscoverychangegroup(pullop):
1174 """discovery phase for the pull
1174 """discovery phase for the pull
1175
1175
1176 Current handle changeset discovery only, will change handle all discovery
1176 Current handle changeset discovery only, will change handle all discovery
1177 at some point."""
1177 at some point."""
1178 tmp = discovery.findcommonincoming(pullop.repo,
1178 tmp = discovery.findcommonincoming(pullop.repo,
1179 pullop.remote,
1179 pullop.remote,
1180 heads=pullop.heads,
1180 heads=pullop.heads,
1181 force=pullop.force)
1181 force=pullop.force)
1182 common, fetch, rheads = tmp
1182 common, fetch, rheads = tmp
1183 nm = pullop.repo.unfiltered().changelog.nodemap
1183 nm = pullop.repo.unfiltered().changelog.nodemap
1184 if fetch and rheads:
1184 if fetch and rheads:
1185 # If a remote heads in filtered locally, lets drop it from the unknown
1185 # If a remote heads in filtered locally, lets drop it from the unknown
1186 # remote heads and put in back in common.
1186 # remote heads and put in back in common.
1187 #
1187 #
1188 # This is a hackish solution to catch most of "common but locally
1188 # This is a hackish solution to catch most of "common but locally
1189 # hidden situation". We do not performs discovery on unfiltered
1189 # hidden situation". We do not performs discovery on unfiltered
1190 # repository because it end up doing a pathological amount of round
1190 # repository because it end up doing a pathological amount of round
1191 # trip for w huge amount of changeset we do not care about.
1191 # trip for w huge amount of changeset we do not care about.
1192 #
1192 #
1193 # If a set of such "common but filtered" changeset exist on the server
1193 # If a set of such "common but filtered" changeset exist on the server
1194 # but are not including a remote heads, we'll not be able to detect it,
1194 # but are not including a remote heads, we'll not be able to detect it,
1195 scommon = set(common)
1195 scommon = set(common)
1196 filteredrheads = []
1196 filteredrheads = []
1197 for n in rheads:
1197 for n in rheads:
1198 if n in nm:
1198 if n in nm:
1199 if n not in scommon:
1199 if n not in scommon:
1200 common.append(n)
1200 common.append(n)
1201 else:
1201 else:
1202 filteredrheads.append(n)
1202 filteredrheads.append(n)
1203 if not filteredrheads:
1203 if not filteredrheads:
1204 fetch = []
1204 fetch = []
1205 rheads = filteredrheads
1205 rheads = filteredrheads
1206 pullop.common = common
1206 pullop.common = common
1207 pullop.fetch = fetch
1207 pullop.fetch = fetch
1208 pullop.rheads = rheads
1208 pullop.rheads = rheads
1209
1209
1210 def _pullbundle2(pullop):
1210 def _pullbundle2(pullop):
1211 """pull data using bundle2
1211 """pull data using bundle2
1212
1212
1213 For now, the only supported data are changegroup."""
1213 For now, the only supported data are changegroup."""
1214 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1214 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1215
1215
1216 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1216 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1217
1217
1218 # pulling changegroup
1218 # pulling changegroup
1219 pullop.stepsdone.add('changegroup')
1219 pullop.stepsdone.add('changegroup')
1220
1220
1221 kwargs['common'] = pullop.common
1221 kwargs['common'] = pullop.common
1222 kwargs['heads'] = pullop.heads or pullop.rheads
1222 kwargs['heads'] = pullop.heads or pullop.rheads
1223 kwargs['cg'] = pullop.fetch
1223 kwargs['cg'] = pullop.fetch
1224 if 'listkeys' in pullop.remotebundle2caps:
1224 if 'listkeys' in pullop.remotebundle2caps:
1225 kwargs['listkeys'] = ['phase']
1225 kwargs['listkeys'] = ['phase']
1226 if pullop.remotebookmarks is None:
1226 if pullop.remotebookmarks is None:
1227 # make sure to always includes bookmark data when migrating
1227 # make sure to always includes bookmark data when migrating
1228 # `hg incoming --bundle` to using this function.
1228 # `hg incoming --bundle` to using this function.
1229 kwargs['listkeys'].append('bookmarks')
1229 kwargs['listkeys'].append('bookmarks')
1230
1230
1231 # If this is a full pull / clone and the server supports the clone bundles
1231 # If this is a full pull / clone and the server supports the clone bundles
1232 # feature, tell the server whether we attempted a clone bundle. The
1232 # feature, tell the server whether we attempted a clone bundle. The
1233 # presence of this flag indicates the client supports clone bundles. This
1233 # presence of this flag indicates the client supports clone bundles. This
1234 # will enable the server to treat clients that support clone bundles
1234 # will enable the server to treat clients that support clone bundles
1235 # differently from those that don't.
1235 # differently from those that don't.
1236 if (pullop.remote.capable('clonebundles')
1236 if (pullop.remote.capable('clonebundles')
1237 and pullop.heads is None and list(pullop.common) == [nullid]):
1237 and pullop.heads is None and list(pullop.common) == [nullid]):
1238 kwargs['cbattempted'] = pullop.clonebundleattempted
1238 kwargs['cbattempted'] = pullop.clonebundleattempted
1239
1239
1240 if streaming:
1240 if streaming:
1241 pullop.repo.ui.status(_('streaming all changes\n'))
1241 pullop.repo.ui.status(_('streaming all changes\n'))
1242 elif not pullop.fetch:
1242 elif not pullop.fetch:
1243 pullop.repo.ui.status(_("no changes found\n"))
1243 pullop.repo.ui.status(_("no changes found\n"))
1244 pullop.cgresult = 0
1244 pullop.cgresult = 0
1245 else:
1245 else:
1246 if pullop.heads is None and list(pullop.common) == [nullid]:
1246 if pullop.heads is None and list(pullop.common) == [nullid]:
1247 pullop.repo.ui.status(_("requesting all changes\n"))
1247 pullop.repo.ui.status(_("requesting all changes\n"))
1248 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1248 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1249 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1249 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1250 if obsolete.commonversion(remoteversions) is not None:
1250 if obsolete.commonversion(remoteversions) is not None:
1251 kwargs['obsmarkers'] = True
1251 kwargs['obsmarkers'] = True
1252 pullop.stepsdone.add('obsmarkers')
1252 pullop.stepsdone.add('obsmarkers')
1253 _pullbundle2extraprepare(pullop, kwargs)
1253 _pullbundle2extraprepare(pullop, kwargs)
1254 bundle = pullop.remote.getbundle('pull', **kwargs)
1254 bundle = pullop.remote.getbundle('pull', **kwargs)
1255 try:
1255 try:
1256 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1256 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1257 except error.BundleValueError as exc:
1257 except error.BundleValueError as exc:
1258 raise error.Abort('missing support for %s' % exc)
1258 raise error.Abort('missing support for %s' % exc)
1259
1259
1260 if pullop.fetch:
1260 if pullop.fetch:
1261 results = [cg['return'] for cg in op.records['changegroup']]
1261 results = [cg['return'] for cg in op.records['changegroup']]
1262 pullop.cgresult = changegroup.combineresults(results)
1262 pullop.cgresult = changegroup.combineresults(results)
1263
1263
1264 # processing phases change
1264 # processing phases change
1265 for namespace, value in op.records['listkeys']:
1265 for namespace, value in op.records['listkeys']:
1266 if namespace == 'phases':
1266 if namespace == 'phases':
1267 _pullapplyphases(pullop, value)
1267 _pullapplyphases(pullop, value)
1268
1268
1269 # processing bookmark update
1269 # processing bookmark update
1270 for namespace, value in op.records['listkeys']:
1270 for namespace, value in op.records['listkeys']:
1271 if namespace == 'bookmarks':
1271 if namespace == 'bookmarks':
1272 pullop.remotebookmarks = value
1272 pullop.remotebookmarks = value
1273
1273
1274 # bookmark data were either already there or pulled in the bundle
1274 # bookmark data were either already there or pulled in the bundle
1275 if pullop.remotebookmarks is not None:
1275 if pullop.remotebookmarks is not None:
1276 _pullbookmarks(pullop)
1276 _pullbookmarks(pullop)
1277
1277
1278 def _pullbundle2extraprepare(pullop, kwargs):
1278 def _pullbundle2extraprepare(pullop, kwargs):
1279 """hook function so that extensions can extend the getbundle call"""
1279 """hook function so that extensions can extend the getbundle call"""
1280 pass
1280 pass
1281
1281
1282 def _pullchangeset(pullop):
1282 def _pullchangeset(pullop):
1283 """pull changeset from unbundle into the local repo"""
1283 """pull changeset from unbundle into the local repo"""
1284 # We delay the open of the transaction as late as possible so we
1284 # We delay the open of the transaction as late as possible so we
1285 # don't open transaction for nothing or you break future useful
1285 # don't open transaction for nothing or you break future useful
1286 # rollback call
1286 # rollback call
1287 if 'changegroup' in pullop.stepsdone:
1287 if 'changegroup' in pullop.stepsdone:
1288 return
1288 return
1289 pullop.stepsdone.add('changegroup')
1289 pullop.stepsdone.add('changegroup')
1290 if not pullop.fetch:
1290 if not pullop.fetch:
1291 pullop.repo.ui.status(_("no changes found\n"))
1291 pullop.repo.ui.status(_("no changes found\n"))
1292 pullop.cgresult = 0
1292 pullop.cgresult = 0
1293 return
1293 return
1294 pullop.gettransaction()
1294 pullop.gettransaction()
1295 if pullop.heads is None and list(pullop.common) == [nullid]:
1295 if pullop.heads is None and list(pullop.common) == [nullid]:
1296 pullop.repo.ui.status(_("requesting all changes\n"))
1296 pullop.repo.ui.status(_("requesting all changes\n"))
1297 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1297 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1298 # issue1320, avoid a race if remote changed after discovery
1298 # issue1320, avoid a race if remote changed after discovery
1299 pullop.heads = pullop.rheads
1299 pullop.heads = pullop.rheads
1300
1300
1301 if pullop.remote.capable('getbundle'):
1301 if pullop.remote.capable('getbundle'):
1302 # TODO: get bundlecaps from remote
1302 # TODO: get bundlecaps from remote
1303 cg = pullop.remote.getbundle('pull', common=pullop.common,
1303 cg = pullop.remote.getbundle('pull', common=pullop.common,
1304 heads=pullop.heads or pullop.rheads)
1304 heads=pullop.heads or pullop.rheads)
1305 elif pullop.heads is None:
1305 elif pullop.heads is None:
1306 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1306 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1307 elif not pullop.remote.capable('changegroupsubset'):
1307 elif not pullop.remote.capable('changegroupsubset'):
1308 raise error.Abort(_("partial pull cannot be done because "
1308 raise error.Abort(_("partial pull cannot be done because "
1309 "other repository doesn't support "
1309 "other repository doesn't support "
1310 "changegroupsubset."))
1310 "changegroupsubset."))
1311 else:
1311 else:
1312 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1312 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1313 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1313 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1314
1314
1315 def _pullphase(pullop):
1315 def _pullphase(pullop):
1316 # Get remote phases data from remote
1316 # Get remote phases data from remote
1317 if 'phases' in pullop.stepsdone:
1317 if 'phases' in pullop.stepsdone:
1318 return
1318 return
1319 remotephases = pullop.remote.listkeys('phases')
1319 remotephases = pullop.remote.listkeys('phases')
1320 _pullapplyphases(pullop, remotephases)
1320 _pullapplyphases(pullop, remotephases)
1321
1321
1322 def _pullapplyphases(pullop, remotephases):
1322 def _pullapplyphases(pullop, remotephases):
1323 """apply phase movement from observed remote state"""
1323 """apply phase movement from observed remote state"""
1324 if 'phases' in pullop.stepsdone:
1324 if 'phases' in pullop.stepsdone:
1325 return
1325 return
1326 pullop.stepsdone.add('phases')
1326 pullop.stepsdone.add('phases')
1327 publishing = bool(remotephases.get('publishing', False))
1327 publishing = bool(remotephases.get('publishing', False))
1328 if remotephases and not publishing:
1328 if remotephases and not publishing:
1329 # remote is new and unpublishing
1329 # remote is new and unpublishing
1330 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1330 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1331 pullop.pulledsubset,
1331 pullop.pulledsubset,
1332 remotephases)
1332 remotephases)
1333 dheads = pullop.pulledsubset
1333 dheads = pullop.pulledsubset
1334 else:
1334 else:
1335 # Remote is old or publishing all common changesets
1335 # Remote is old or publishing all common changesets
1336 # should be seen as public
1336 # should be seen as public
1337 pheads = pullop.pulledsubset
1337 pheads = pullop.pulledsubset
1338 dheads = []
1338 dheads = []
1339 unfi = pullop.repo.unfiltered()
1339 unfi = pullop.repo.unfiltered()
1340 phase = unfi._phasecache.phase
1340 phase = unfi._phasecache.phase
1341 rev = unfi.changelog.nodemap.get
1341 rev = unfi.changelog.nodemap.get
1342 public = phases.public
1342 public = phases.public
1343 draft = phases.draft
1343 draft = phases.draft
1344
1344
1345 # exclude changesets already public locally and update the others
1345 # exclude changesets already public locally and update the others
1346 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1346 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1347 if pheads:
1347 if pheads:
1348 tr = pullop.gettransaction()
1348 tr = pullop.gettransaction()
1349 phases.advanceboundary(pullop.repo, tr, public, pheads)
1349 phases.advanceboundary(pullop.repo, tr, public, pheads)
1350
1350
1351 # exclude changesets already draft locally and update the others
1351 # exclude changesets already draft locally and update the others
1352 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1352 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1353 if dheads:
1353 if dheads:
1354 tr = pullop.gettransaction()
1354 tr = pullop.gettransaction()
1355 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1355 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1356
1356
1357 def _pullbookmarks(pullop):
1357 def _pullbookmarks(pullop):
1358 """process the remote bookmark information to update the local one"""
1358 """process the remote bookmark information to update the local one"""
1359 if 'bookmarks' in pullop.stepsdone:
1359 if 'bookmarks' in pullop.stepsdone:
1360 return
1360 return
1361 pullop.stepsdone.add('bookmarks')
1361 pullop.stepsdone.add('bookmarks')
1362 repo = pullop.repo
1362 repo = pullop.repo
1363 remotebookmarks = pullop.remotebookmarks
1363 remotebookmarks = pullop.remotebookmarks
1364 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1364 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1365 pullop.remote.url(),
1365 pullop.remote.url(),
1366 pullop.gettransaction,
1366 pullop.gettransaction,
1367 explicit=pullop.explicitbookmarks)
1367 explicit=pullop.explicitbookmarks)
1368
1368
1369 def _pullobsolete(pullop):
1369 def _pullobsolete(pullop):
1370 """utility function to pull obsolete markers from a remote
1370 """utility function to pull obsolete markers from a remote
1371
1371
1372 The `gettransaction` is function that return the pull transaction, creating
1372 The `gettransaction` is function that return the pull transaction, creating
1373 one if necessary. We return the transaction to inform the calling code that
1373 one if necessary. We return the transaction to inform the calling code that
1374 a new transaction have been created (when applicable).
1374 a new transaction have been created (when applicable).
1375
1375
1376 Exists mostly to allow overriding for experimentation purpose"""
1376 Exists mostly to allow overriding for experimentation purpose"""
1377 if 'obsmarkers' in pullop.stepsdone:
1377 if 'obsmarkers' in pullop.stepsdone:
1378 return
1378 return
1379 pullop.stepsdone.add('obsmarkers')
1379 pullop.stepsdone.add('obsmarkers')
1380 tr = None
1380 tr = None
1381 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1381 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1382 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1382 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1383 remoteobs = pullop.remote.listkeys('obsolete')
1383 remoteobs = pullop.remote.listkeys('obsolete')
1384 if 'dump0' in remoteobs:
1384 if 'dump0' in remoteobs:
1385 tr = pullop.gettransaction()
1385 tr = pullop.gettransaction()
1386 for key in sorted(remoteobs, reverse=True):
1386 for key in sorted(remoteobs, reverse=True):
1387 if key.startswith('dump'):
1387 if key.startswith('dump'):
1388 data = base85.b85decode(remoteobs[key])
1388 data = base85.b85decode(remoteobs[key])
1389 pullop.repo.obsstore.mergemarkers(tr, data)
1389 pullop.repo.obsstore.mergemarkers(tr, data)
1390 pullop.repo.invalidatevolatilesets()
1390 pullop.repo.invalidatevolatilesets()
1391 return tr
1391 return tr
1392
1392
1393 def caps20to10(repo):
1393 def caps20to10(repo):
1394 """return a set with appropriate options to use bundle20 during getbundle"""
1394 """return a set with appropriate options to use bundle20 during getbundle"""
1395 caps = set(['HG20'])
1395 caps = set(['HG20'])
1396 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1396 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1397 caps.add('bundle2=' + urllib.quote(capsblob))
1397 caps.add('bundle2=' + urllib.quote(capsblob))
1398 return caps
1398 return caps
1399
1399
1400 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1400 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1401 getbundle2partsorder = []
1401 getbundle2partsorder = []
1402
1402
1403 # Mapping between step name and function
1403 # Mapping between step name and function
1404 #
1404 #
1405 # This exists to help extensions wrap steps if necessary
1405 # This exists to help extensions wrap steps if necessary
1406 getbundle2partsmapping = {}
1406 getbundle2partsmapping = {}
1407
1407
1408 def getbundle2partsgenerator(stepname, idx=None):
1408 def getbundle2partsgenerator(stepname, idx=None):
1409 """decorator for function generating bundle2 part for getbundle
1409 """decorator for function generating bundle2 part for getbundle
1410
1410
1411 The function is added to the step -> function mapping and appended to the
1411 The function is added to the step -> function mapping and appended to the
1412 list of steps. Beware that decorated functions will be added in order
1412 list of steps. Beware that decorated functions will be added in order
1413 (this may matter).
1413 (this may matter).
1414
1414
1415 You can only use this decorator for new steps, if you want to wrap a step
1415 You can only use this decorator for new steps, if you want to wrap a step
1416 from an extension, attack the getbundle2partsmapping dictionary directly."""
1416 from an extension, attack the getbundle2partsmapping dictionary directly."""
1417 def dec(func):
1417 def dec(func):
1418 assert stepname not in getbundle2partsmapping
1418 assert stepname not in getbundle2partsmapping
1419 getbundle2partsmapping[stepname] = func
1419 getbundle2partsmapping[stepname] = func
1420 if idx is None:
1420 if idx is None:
1421 getbundle2partsorder.append(stepname)
1421 getbundle2partsorder.append(stepname)
1422 else:
1422 else:
1423 getbundle2partsorder.insert(idx, stepname)
1423 getbundle2partsorder.insert(idx, stepname)
1424 return func
1424 return func
1425 return dec
1425 return dec
1426
1426
1427 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1427 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1428 **kwargs):
1428 **kwargs):
1429 """return a full bundle (with potentially multiple kind of parts)
1429 """return a full bundle (with potentially multiple kind of parts)
1430
1430
1431 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1431 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1432 passed. For now, the bundle can contain only changegroup, but this will
1432 passed. For now, the bundle can contain only changegroup, but this will
1433 changes when more part type will be available for bundle2.
1433 changes when more part type will be available for bundle2.
1434
1434
1435 This is different from changegroup.getchangegroup that only returns an HG10
1435 This is different from changegroup.getchangegroup that only returns an HG10
1436 changegroup bundle. They may eventually get reunited in the future when we
1436 changegroup bundle. They may eventually get reunited in the future when we
1437 have a clearer idea of the API we what to query different data.
1437 have a clearer idea of the API we what to query different data.
1438
1438
1439 The implementation is at a very early stage and will get massive rework
1439 The implementation is at a very early stage and will get massive rework
1440 when the API of bundle is refined.
1440 when the API of bundle is refined.
1441 """
1441 """
1442 # bundle10 case
1442 # bundle10 case
1443 usebundle2 = False
1443 usebundle2 = False
1444 if bundlecaps is not None:
1444 if bundlecaps is not None:
1445 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1445 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1446 if not usebundle2:
1446 if not usebundle2:
1447 if bundlecaps and not kwargs.get('cg', True):
1447 if bundlecaps and not kwargs.get('cg', True):
1448 raise ValueError(_('request for bundle10 must include changegroup'))
1448 raise ValueError(_('request for bundle10 must include changegroup'))
1449
1449
1450 if kwargs:
1450 if kwargs:
1451 raise ValueError(_('unsupported getbundle arguments: %s')
1451 raise ValueError(_('unsupported getbundle arguments: %s')
1452 % ', '.join(sorted(kwargs.keys())))
1452 % ', '.join(sorted(kwargs.keys())))
1453 return changegroup.getchangegroup(repo, source, heads=heads,
1453 return changegroup.getchangegroup(repo, source, heads=heads,
1454 common=common, bundlecaps=bundlecaps)
1454 common=common, bundlecaps=bundlecaps)
1455
1455
1456 # bundle20 case
1456 # bundle20 case
1457 b2caps = {}
1457 b2caps = {}
1458 for bcaps in bundlecaps:
1458 for bcaps in bundlecaps:
1459 if bcaps.startswith('bundle2='):
1459 if bcaps.startswith('bundle2='):
1460 blob = urllib.unquote(bcaps[len('bundle2='):])
1460 blob = urllib.unquote(bcaps[len('bundle2='):])
1461 b2caps.update(bundle2.decodecaps(blob))
1461 b2caps.update(bundle2.decodecaps(blob))
1462 bundler = bundle2.bundle20(repo.ui, b2caps)
1462 bundler = bundle2.bundle20(repo.ui, b2caps)
1463
1463
1464 kwargs['heads'] = heads
1464 kwargs['heads'] = heads
1465 kwargs['common'] = common
1465 kwargs['common'] = common
1466
1466
1467 for name in getbundle2partsorder:
1467 for name in getbundle2partsorder:
1468 func = getbundle2partsmapping[name]
1468 func = getbundle2partsmapping[name]
1469 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1469 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1470 **kwargs)
1470 **kwargs)
1471
1471
1472 return util.chunkbuffer(bundler.getchunks())
1472 return util.chunkbuffer(bundler.getchunks())
1473
1473
1474 @getbundle2partsgenerator('changegroup')
1474 @getbundle2partsgenerator('changegroup')
1475 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1475 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1476 b2caps=None, heads=None, common=None, **kwargs):
1476 b2caps=None, heads=None, common=None, **kwargs):
1477 """add a changegroup part to the requested bundle"""
1477 """add a changegroup part to the requested bundle"""
1478 cg = None
1478 cg = None
1479 if kwargs.get('cg', True):
1479 if kwargs.get('cg', True):
1480 # build changegroup bundle here.
1480 # build changegroup bundle here.
1481 version = None
1481 version = None
1482 cgversions = b2caps.get('changegroup')
1482 cgversions = b2caps.get('changegroup')
1483 getcgkwargs = {}
1483 getcgkwargs = {}
1484 if cgversions: # 3.1 and 3.2 ship with an empty value
1484 if cgversions: # 3.1 and 3.2 ship with an empty value
1485 cgversions = [v for v in cgversions if v in changegroup.packermap]
1485 cgversions = [v for v in cgversions if v in changegroup.packermap]
1486 if not cgversions:
1486 if not cgversions:
1487 raise ValueError(_('no common changegroup version'))
1487 raise ValueError(_('no common changegroup version'))
1488 version = getcgkwargs['version'] = max(cgversions)
1488 version = getcgkwargs['version'] = max(cgversions)
1489 outgoing = changegroup.computeoutgoing(repo, heads, common)
1489 outgoing = changegroup.computeoutgoing(repo, heads, common)
1490 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1490 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1491 bundlecaps=bundlecaps,
1491 bundlecaps=bundlecaps,
1492 **getcgkwargs)
1492 **getcgkwargs)
1493
1493
1494 if cg:
1494 if cg:
1495 part = bundler.newpart('changegroup', data=cg)
1495 part = bundler.newpart('changegroup', data=cg)
1496 if version is not None:
1496 if version is not None:
1497 part.addparam('version', version)
1497 part.addparam('version', version)
1498 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1498 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1499
1499
1500 @getbundle2partsgenerator('listkeys')
1500 @getbundle2partsgenerator('listkeys')
1501 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1501 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1502 b2caps=None, **kwargs):
1502 b2caps=None, **kwargs):
1503 """add parts containing listkeys namespaces to the requested bundle"""
1503 """add parts containing listkeys namespaces to the requested bundle"""
1504 listkeys = kwargs.get('listkeys', ())
1504 listkeys = kwargs.get('listkeys', ())
1505 for namespace in listkeys:
1505 for namespace in listkeys:
1506 part = bundler.newpart('listkeys')
1506 part = bundler.newpart('listkeys')
1507 part.addparam('namespace', namespace)
1507 part.addparam('namespace', namespace)
1508 keys = repo.listkeys(namespace).items()
1508 keys = repo.listkeys(namespace).items()
1509 part.data = pushkey.encodekeys(keys)
1509 part.data = pushkey.encodekeys(keys)
1510
1510
1511 @getbundle2partsgenerator('obsmarkers')
1511 @getbundle2partsgenerator('obsmarkers')
1512 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1512 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1513 b2caps=None, heads=None, **kwargs):
1513 b2caps=None, heads=None, **kwargs):
1514 """add an obsolescence markers part to the requested bundle"""
1514 """add an obsolescence markers part to the requested bundle"""
1515 if kwargs.get('obsmarkers', False):
1515 if kwargs.get('obsmarkers', False):
1516 if heads is None:
1516 if heads is None:
1517 heads = repo.heads()
1517 heads = repo.heads()
1518 subset = [c.node() for c in repo.set('::%ln', heads)]
1518 subset = [c.node() for c in repo.set('::%ln', heads)]
1519 markers = repo.obsstore.relevantmarkers(subset)
1519 markers = repo.obsstore.relevantmarkers(subset)
1520 markers = sorted(markers)
1520 markers = sorted(markers)
1521 buildobsmarkerspart(bundler, markers)
1521 buildobsmarkerspart(bundler, markers)
1522
1522
1523 @getbundle2partsgenerator('hgtagsfnodes')
1523 @getbundle2partsgenerator('hgtagsfnodes')
1524 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1524 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1525 b2caps=None, heads=None, common=None,
1525 b2caps=None, heads=None, common=None,
1526 **kwargs):
1526 **kwargs):
1527 """Transfer the .hgtags filenodes mapping.
1527 """Transfer the .hgtags filenodes mapping.
1528
1528
1529 Only values for heads in this bundle will be transferred.
1529 Only values for heads in this bundle will be transferred.
1530
1530
1531 The part data consists of pairs of 20 byte changeset node and .hgtags
1531 The part data consists of pairs of 20 byte changeset node and .hgtags
1532 filenodes raw values.
1532 filenodes raw values.
1533 """
1533 """
1534 # Don't send unless:
1534 # Don't send unless:
1535 # - changeset are being exchanged,
1535 # - changeset are being exchanged,
1536 # - the client supports it.
1536 # - the client supports it.
1537 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1537 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1538 return
1538 return
1539
1539
1540 outgoing = changegroup.computeoutgoing(repo, heads, common)
1540 outgoing = changegroup.computeoutgoing(repo, heads, common)
1541
1541
1542 if not outgoing.missingheads:
1542 if not outgoing.missingheads:
1543 return
1543 return
1544
1544
1545 cache = tags.hgtagsfnodescache(repo.unfiltered())
1545 cache = tags.hgtagsfnodescache(repo.unfiltered())
1546 chunks = []
1546 chunks = []
1547
1547
1548 # .hgtags fnodes are only relevant for head changesets. While we could
1548 # .hgtags fnodes are only relevant for head changesets. While we could
1549 # transfer values for all known nodes, there will likely be little to
1549 # transfer values for all known nodes, there will likely be little to
1550 # no benefit.
1550 # no benefit.
1551 #
1551 #
1552 # We don't bother using a generator to produce output data because
1552 # We don't bother using a generator to produce output data because
1553 # a) we only have 40 bytes per head and even esoteric numbers of heads
1553 # a) we only have 40 bytes per head and even esoteric numbers of heads
1554 # consume little memory (1M heads is 40MB) b) we don't want to send the
1554 # consume little memory (1M heads is 40MB) b) we don't want to send the
1555 # part if we don't have entries and knowing if we have entries requires
1555 # part if we don't have entries and knowing if we have entries requires
1556 # cache lookups.
1556 # cache lookups.
1557 for node in outgoing.missingheads:
1557 for node in outgoing.missingheads:
1558 # Don't compute missing, as this may slow down serving.
1558 # Don't compute missing, as this may slow down serving.
1559 fnode = cache.getfnode(node, computemissing=False)
1559 fnode = cache.getfnode(node, computemissing=False)
1560 if fnode is not None:
1560 if fnode is not None:
1561 chunks.extend([node, fnode])
1561 chunks.extend([node, fnode])
1562
1562
1563 if chunks:
1563 if chunks:
1564 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1564 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1565
1565
1566 def check_heads(repo, their_heads, context):
1566 def check_heads(repo, their_heads, context):
1567 """check if the heads of a repo have been modified
1567 """check if the heads of a repo have been modified
1568
1568
1569 Used by peer for unbundling.
1569 Used by peer for unbundling.
1570 """
1570 """
1571 heads = repo.heads()
1571 heads = repo.heads()
1572 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1572 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1573 if not (their_heads == ['force'] or their_heads == heads or
1573 if not (their_heads == ['force'] or their_heads == heads or
1574 their_heads == ['hashed', heads_hash]):
1574 their_heads == ['hashed', heads_hash]):
1575 # someone else committed/pushed/unbundled while we
1575 # someone else committed/pushed/unbundled while we
1576 # were transferring data
1576 # were transferring data
1577 raise error.PushRaced('repository changed while %s - '
1577 raise error.PushRaced('repository changed while %s - '
1578 'please try again' % context)
1578 'please try again' % context)
1579
1579
1580 def unbundle(repo, cg, heads, source, url):
1580 def unbundle(repo, cg, heads, source, url):
1581 """Apply a bundle to a repo.
1581 """Apply a bundle to a repo.
1582
1582
1583 this function makes sure the repo is locked during the application and have
1583 this function makes sure the repo is locked during the application and have
1584 mechanism to check that no push race occurred between the creation of the
1584 mechanism to check that no push race occurred between the creation of the
1585 bundle and its application.
1585 bundle and its application.
1586
1586
1587 If the push was raced as PushRaced exception is raised."""
1587 If the push was raced as PushRaced exception is raised."""
1588 r = 0
1588 r = 0
1589 # need a transaction when processing a bundle2 stream
1589 # need a transaction when processing a bundle2 stream
1590 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1590 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1591 lockandtr = [None, None, None]
1591 lockandtr = [None, None, None]
1592 recordout = None
1592 recordout = None
1593 # quick fix for output mismatch with bundle2 in 3.4
1593 # quick fix for output mismatch with bundle2 in 3.4
1594 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1594 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1595 False)
1595 False)
1596 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1596 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1597 captureoutput = True
1597 captureoutput = True
1598 try:
1598 try:
1599 check_heads(repo, heads, 'uploading changes')
1599 check_heads(repo, heads, 'uploading changes')
1600 # push can proceed
1600 # push can proceed
1601 if util.safehasattr(cg, 'params'):
1601 if util.safehasattr(cg, 'params'):
1602 r = None
1602 r = None
1603 try:
1603 try:
1604 def gettransaction():
1604 def gettransaction():
1605 if not lockandtr[2]:
1605 if not lockandtr[2]:
1606 lockandtr[0] = repo.wlock()
1606 lockandtr[0] = repo.wlock()
1607 lockandtr[1] = repo.lock()
1607 lockandtr[1] = repo.lock()
1608 lockandtr[2] = repo.transaction(source)
1608 lockandtr[2] = repo.transaction(source)
1609 lockandtr[2].hookargs['source'] = source
1609 lockandtr[2].hookargs['source'] = source
1610 lockandtr[2].hookargs['url'] = url
1610 lockandtr[2].hookargs['url'] = url
1611 lockandtr[2].hookargs['bundle2'] = '1'
1611 lockandtr[2].hookargs['bundle2'] = '1'
1612 return lockandtr[2]
1612 return lockandtr[2]
1613
1613
1614 # Do greedy locking by default until we're satisfied with lazy
1614 # Do greedy locking by default until we're satisfied with lazy
1615 # locking.
1615 # locking.
1616 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1616 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1617 gettransaction()
1617 gettransaction()
1618
1618
1619 op = bundle2.bundleoperation(repo, gettransaction,
1619 op = bundle2.bundleoperation(repo, gettransaction,
1620 captureoutput=captureoutput)
1620 captureoutput=captureoutput)
1621 try:
1621 try:
1622 op = bundle2.processbundle(repo, cg, op=op)
1622 op = bundle2.processbundle(repo, cg, op=op)
1623 finally:
1623 finally:
1624 r = op.reply
1624 r = op.reply
1625 if captureoutput and r is not None:
1625 if captureoutput and r is not None:
1626 repo.ui.pushbuffer(error=True, subproc=True)
1626 repo.ui.pushbuffer(error=True, subproc=True)
1627 def recordout(output):
1627 def recordout(output):
1628 r.newpart('output', data=output, mandatory=False)
1628 r.newpart('output', data=output, mandatory=False)
1629 if lockandtr[2] is not None:
1629 if lockandtr[2] is not None:
1630 lockandtr[2].close()
1630 lockandtr[2].close()
1631 except BaseException as exc:
1631 except BaseException as exc:
1632 exc.duringunbundle2 = True
1632 exc.duringunbundle2 = True
1633 if captureoutput and r is not None:
1633 if captureoutput and r is not None:
1634 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1634 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1635 def recordout(output):
1635 def recordout(output):
1636 part = bundle2.bundlepart('output', data=output,
1636 part = bundle2.bundlepart('output', data=output,
1637 mandatory=False)
1637 mandatory=False)
1638 parts.append(part)
1638 parts.append(part)
1639 raise
1639 raise
1640 else:
1640 else:
1641 lockandtr[1] = repo.lock()
1641 lockandtr[1] = repo.lock()
1642 r = cg.apply(repo, source, url)
1642 r = cg.apply(repo, source, url)
1643 finally:
1643 finally:
1644 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1644 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1645 if recordout is not None:
1645 if recordout is not None:
1646 recordout(repo.ui.popbuffer())
1646 recordout(repo.ui.popbuffer())
1647 return r
1647 return r
1648
1648
1649 def _maybeapplyclonebundle(pullop):
1649 def _maybeapplyclonebundle(pullop):
1650 """Apply a clone bundle from a remote, if possible."""
1650 """Apply a clone bundle from a remote, if possible."""
1651
1651
1652 repo = pullop.repo
1652 repo = pullop.repo
1653 remote = pullop.remote
1653 remote = pullop.remote
1654
1654
1655 if not repo.ui.configbool('experimental', 'clonebundles', False):
1655 if not repo.ui.configbool('experimental', 'clonebundles', False):
1656 return
1656 return
1657
1657
1658 if pullop.heads:
1658 if pullop.heads:
1659 return
1659 return
1660
1660
1661 if not remote.capable('clonebundles'):
1661 if not remote.capable('clonebundles'):
1662 return
1662 return
1663
1663
1664 res = remote._call('clonebundles')
1664 res = remote._call('clonebundles')
1665
1665
1666 # If we call the wire protocol command, that's good enough to record the
1666 # If we call the wire protocol command, that's good enough to record the
1667 # attempt.
1667 # attempt.
1668 pullop.clonebundleattempted = True
1668 pullop.clonebundleattempted = True
1669
1669
1670 entries = parseclonebundlesmanifest(repo, res)
1670 entries = parseclonebundlesmanifest(repo, res)
1671 if not entries:
1671 if not entries:
1672 repo.ui.note(_('no clone bundles available on remote; '
1672 repo.ui.note(_('no clone bundles available on remote; '
1673 'falling back to regular clone\n'))
1673 'falling back to regular clone\n'))
1674 return
1674 return
1675
1675
1676 entries = filterclonebundleentries(repo, entries)
1676 entries = filterclonebundleentries(repo, entries)
1677 if not entries:
1677 if not entries:
1678 # There is a thundering herd concern here. However, if a server
1678 # There is a thundering herd concern here. However, if a server
1679 # operator doesn't advertise bundles appropriate for its clients,
1679 # operator doesn't advertise bundles appropriate for its clients,
1680 # they deserve what's coming. Furthermore, from a client's
1680 # they deserve what's coming. Furthermore, from a client's
1681 # perspective, no automatic fallback would mean not being able to
1681 # perspective, no automatic fallback would mean not being able to
1682 # clone!
1682 # clone!
1683 repo.ui.warn(_('no compatible clone bundles available on server; '
1683 repo.ui.warn(_('no compatible clone bundles available on server; '
1684 'falling back to regular clone\n'))
1684 'falling back to regular clone\n'))
1685 repo.ui.warn(_('(you may want to report this to the server '
1685 repo.ui.warn(_('(you may want to report this to the server '
1686 'operator)\n'))
1686 'operator)\n'))
1687 return
1687 return
1688
1688
1689 entries = sortclonebundleentries(repo.ui, entries)
1689 entries = sortclonebundleentries(repo.ui, entries)
1690
1690
1691 url = entries[0]['URL']
1691 url = entries[0]['URL']
1692 repo.ui.status(_('applying clone bundle from %s\n') % url)
1692 repo.ui.status(_('applying clone bundle from %s\n') % url)
1693 if trypullbundlefromurl(repo.ui, repo, url):
1693 if trypullbundlefromurl(repo.ui, repo, url):
1694 repo.ui.status(_('finished applying clone bundle\n'))
1694 repo.ui.status(_('finished applying clone bundle\n'))
1695 # Bundle failed.
1695 # Bundle failed.
1696 #
1696 #
1697 # We abort by default to avoid the thundering herd of
1697 # We abort by default to avoid the thundering herd of
1698 # clients flooding a server that was expecting expensive
1698 # clients flooding a server that was expecting expensive
1699 # clone load to be offloaded.
1699 # clone load to be offloaded.
1700 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1700 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1701 repo.ui.warn(_('falling back to normal clone\n'))
1701 repo.ui.warn(_('falling back to normal clone\n'))
1702 else:
1702 else:
1703 raise error.Abort(_('error applying bundle'),
1703 raise error.Abort(_('error applying bundle'),
1704 hint=_('if this error persists, consider contacting '
1704 hint=_('if this error persists, consider contacting '
1705 'the server operator or disable clone '
1705 'the server operator or disable clone '
1706 'bundles via '
1706 'bundles via '
1707 '"--config experimental.clonebundles=false"'))
1707 '"--config experimental.clonebundles=false"'))
1708
1708
1709 def parseclonebundlesmanifest(repo, s):
1709 def parseclonebundlesmanifest(repo, s):
1710 """Parses the raw text of a clone bundles manifest.
1710 """Parses the raw text of a clone bundles manifest.
1711
1711
1712 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1712 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1713 to the URL and other keys are the attributes for the entry.
1713 to the URL and other keys are the attributes for the entry.
1714 """
1714 """
1715 m = []
1715 m = []
1716 for line in s.splitlines():
1716 for line in s.splitlines():
1717 fields = line.split()
1717 fields = line.split()
1718 if not fields:
1718 if not fields:
1719 continue
1719 continue
1720 attrs = {'URL': fields[0]}
1720 attrs = {'URL': fields[0]}
1721 for rawattr in fields[1:]:
1721 for rawattr in fields[1:]:
1722 key, value = rawattr.split('=', 1)
1722 key, value = rawattr.split('=', 1)
1723 key = urllib.unquote(key)
1723 key = urllib.unquote(key)
1724 value = urllib.unquote(value)
1724 value = urllib.unquote(value)
1725 attrs[key] = value
1725 attrs[key] = value
1726
1726
1727 # Parse BUNDLESPEC into components. This makes client-side
1727 # Parse BUNDLESPEC into components. This makes client-side
1728 # preferences easier to specify since you can prefer a single
1728 # preferences easier to specify since you can prefer a single
1729 # component of the BUNDLESPEC.
1729 # component of the BUNDLESPEC.
1730 if key == 'BUNDLESPEC':
1730 if key == 'BUNDLESPEC':
1731 try:
1731 try:
1732 comp, version, params = parsebundlespec(repo, value,
1732 comp, version, params = parsebundlespec(repo, value,
1733 externalnames=True)
1733 externalnames=True)
1734 attrs['COMPRESSION'] = comp
1734 attrs['COMPRESSION'] = comp
1735 attrs['VERSION'] = version
1735 attrs['VERSION'] = version
1736 except error.InvalidBundleSpecification:
1736 except error.InvalidBundleSpecification:
1737 pass
1737 pass
1738 except error.UnsupportedBundleSpecification:
1738 except error.UnsupportedBundleSpecification:
1739 pass
1739 pass
1740
1740
1741 m.append(attrs)
1741 m.append(attrs)
1742
1742
1743 return m
1743 return m
1744
1744
1745 def filterclonebundleentries(repo, entries):
1745 def filterclonebundleentries(repo, entries):
1746 """Remove incompatible clone bundle manifest entries.
1746 """Remove incompatible clone bundle manifest entries.
1747
1747
1748 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1748 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1749 and returns a new list consisting of only the entries that this client
1749 and returns a new list consisting of only the entries that this client
1750 should be able to apply.
1750 should be able to apply.
1751
1751
1752 There is no guarantee we'll be able to apply all returned entries because
1752 There is no guarantee we'll be able to apply all returned entries because
1753 the metadata we use to filter on may be missing or wrong.
1753 the metadata we use to filter on may be missing or wrong.
1754 """
1754 """
1755 newentries = []
1755 newentries = []
1756 for entry in entries:
1756 for entry in entries:
1757 spec = entry.get('BUNDLESPEC')
1757 spec = entry.get('BUNDLESPEC')
1758 if spec:
1758 if spec:
1759 try:
1759 try:
1760 parsebundlespec(repo, spec, strict=True)
1760 parsebundlespec(repo, spec, strict=True)
1761 except error.InvalidBundleSpecification as e:
1761 except error.InvalidBundleSpecification as e:
1762 repo.ui.debug(str(e) + '\n')
1762 repo.ui.debug(str(e) + '\n')
1763 continue
1763 continue
1764 except error.UnsupportedBundleSpecification as e:
1764 except error.UnsupportedBundleSpecification as e:
1765 repo.ui.debug('filtering %s because unsupported bundle '
1765 repo.ui.debug('filtering %s because unsupported bundle '
1766 'spec: %s\n' % (entry['URL'], str(e)))
1766 'spec: %s\n' % (entry['URL'], str(e)))
1767 continue
1767 continue
1768
1768
1769 if 'REQUIRESNI' in entry and not sslutil.hassni:
1769 if 'REQUIRESNI' in entry and not sslutil.hassni:
1770 repo.ui.debug('filtering %s because SNI not supported\n' %
1770 repo.ui.debug('filtering %s because SNI not supported\n' %
1771 entry['URL'])
1771 entry['URL'])
1772 continue
1772 continue
1773
1773
1774 newentries.append(entry)
1774 newentries.append(entry)
1775
1775
1776 return newentries
1776 return newentries
1777
1777
1778 def sortclonebundleentries(ui, entries):
1778 def sortclonebundleentries(ui, entries):
1779 # experimental config: experimental.clonebundleprefers
1779 # experimental config: experimental.clonebundleprefers
1780 prefers = ui.configlist('experimental', 'clonebundleprefers', default=[])
1780 prefers = ui.configlist('experimental', 'clonebundleprefers', default=[])
1781 if not prefers:
1781 if not prefers:
1782 return list(entries)
1782 return list(entries)
1783
1783
1784 prefers = [p.split('=', 1) for p in prefers]
1784 prefers = [p.split('=', 1) for p in prefers]
1785
1785
1786 # Our sort function.
1786 # Our sort function.
1787 def compareentry(a, b):
1787 def compareentry(a, b):
1788 for prefkey, prefvalue in prefers:
1788 for prefkey, prefvalue in prefers:
1789 avalue = a.get(prefkey)
1789 avalue = a.get(prefkey)
1790 bvalue = b.get(prefkey)
1790 bvalue = b.get(prefkey)
1791
1791
1792 # Special case for b missing attribute and a matches exactly.
1792 # Special case for b missing attribute and a matches exactly.
1793 if avalue is not None and bvalue is None and avalue == prefvalue:
1793 if avalue is not None and bvalue is None and avalue == prefvalue:
1794 return -1
1794 return -1
1795
1795
1796 # Special case for a missing attribute and b matches exactly.
1796 # Special case for a missing attribute and b matches exactly.
1797 if bvalue is not None and avalue is None and bvalue == prefvalue:
1797 if bvalue is not None and avalue is None and bvalue == prefvalue:
1798 return 1
1798 return 1
1799
1799
1800 # We can't compare unless attribute present on both.
1800 # We can't compare unless attribute present on both.
1801 if avalue is None or bvalue is None:
1801 if avalue is None or bvalue is None:
1802 continue
1802 continue
1803
1803
1804 # Same values should fall back to next attribute.
1804 # Same values should fall back to next attribute.
1805 if avalue == bvalue:
1805 if avalue == bvalue:
1806 continue
1806 continue
1807
1807
1808 # Exact matches come first.
1808 # Exact matches come first.
1809 if avalue == prefvalue:
1809 if avalue == prefvalue:
1810 return -1
1810 return -1
1811 if bvalue == prefvalue:
1811 if bvalue == prefvalue:
1812 return 1
1812 return 1
1813
1813
1814 # Fall back to next attribute.
1814 # Fall back to next attribute.
1815 continue
1815 continue
1816
1816
1817 # If we got here we couldn't sort by attributes and prefers. Fall
1817 # If we got here we couldn't sort by attributes and prefers. Fall
1818 # back to index order.
1818 # back to index order.
1819 return 0
1819 return 0
1820
1820
1821 return sorted(entries, cmp=compareentry)
1821 return sorted(entries, cmp=compareentry)
1822
1822
1823 def trypullbundlefromurl(ui, repo, url):
1823 def trypullbundlefromurl(ui, repo, url):
1824 """Attempt to apply a bundle from a URL."""
1824 """Attempt to apply a bundle from a URL."""
1825 lock = repo.lock()
1825 lock = repo.lock()
1826 try:
1826 try:
1827 tr = repo.transaction('bundleurl')
1827 tr = repo.transaction('bundleurl')
1828 try:
1828 try:
1829 try:
1829 try:
1830 fh = urlmod.open(ui, url)
1830 fh = urlmod.open(ui, url)
1831 cg = readbundle(ui, fh, 'stream')
1831 cg = readbundle(ui, fh, 'stream')
1832
1832
1833 if isinstance(cg, bundle2.unbundle20):
1833 if isinstance(cg, bundle2.unbundle20):
1834 bundle2.processbundle(repo, cg, lambda: tr)
1834 bundle2.processbundle(repo, cg, lambda: tr)
1835 elif isinstance(cg, streamclone.streamcloneapplier):
1835 elif isinstance(cg, streamclone.streamcloneapplier):
1836 cg.apply(repo)
1836 cg.apply(repo)
1837 else:
1837 else:
1838 cg.apply(repo, 'clonebundles', url)
1838 cg.apply(repo, 'clonebundles', url)
1839 tr.close()
1839 tr.close()
1840 return True
1840 return True
1841 except urllib2.HTTPError as e:
1841 except urllib2.HTTPError as e:
1842 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1842 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1843 except urllib2.URLError as e:
1843 except urllib2.URLError as e:
1844 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1844 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1845
1845
1846 return False
1846 return False
1847 finally:
1847 finally:
1848 tr.release()
1848 tr.release()
1849 finally:
1849 finally:
1850 lock.release()
1850 lock.release()
General Comments 0
You need to be logged in to leave comments. Login now