##// END OF EJS Templates
Merge with crew-stable
Patrick Mezard -
r5913:7c2921a6 merge default
parent child Browse files
Show More
@@ -1,984 +1,984 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
4 #
5 # Configuration options:
5 # Configuration options:
6 #
6 #
7 # convert.svn.trunk
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
10 # Relative path to tree of branches (default: "branches")
11 # convert.svn.tags
11 # convert.svn.tags
12 # Relative path to tree of tags (default: "tags")
12 # Relative path to tree of tags (default: "tags")
13 #
13 #
14 # Set these in a hgrc, or on the command line as follows:
14 # Set these in a hgrc, or on the command line as follows:
15 #
15 #
16 # hg convert --config convert.svn.trunk=wackoname [...]
16 # hg convert --config convert.svn.trunk=wackoname [...]
17
17
18 import locale
18 import locale
19 import os
19 import os
20 import re
20 import re
21 import sys
21 import sys
22 import cPickle as pickle
22 import cPickle as pickle
23 import tempfile
23 import tempfile
24
24
25 from mercurial import strutil, util
25 from mercurial import strutil, util
26 from mercurial.i18n import _
26 from mercurial.i18n import _
27
27
28 # Subversion stuff. Works best with very recent Python SVN bindings
28 # Subversion stuff. Works best with very recent Python SVN bindings
29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 # these bindings.
30 # these bindings.
31
31
32 from cStringIO import StringIO
32 from cStringIO import StringIO
33
33
34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
35 from common import commandline, converter_sink, mapfile
35 from common import commandline, converter_sink, mapfile
36
36
37 try:
37 try:
38 from svn.core import SubversionException, Pool
38 from svn.core import SubversionException, Pool
39 import svn
39 import svn
40 import svn.client
40 import svn.client
41 import svn.core
41 import svn.core
42 import svn.ra
42 import svn.ra
43 import svn.delta
43 import svn.delta
44 import transport
44 import transport
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47
47
48 def geturl(path):
48 def geturl(path):
49 try:
49 try:
50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
51 except SubversionException:
51 except SubversionException:
52 pass
52 pass
53 if os.path.isdir(path):
53 if os.path.isdir(path):
54 path = os.path.normpath(os.path.abspath(path))
54 path = os.path.normpath(os.path.abspath(path))
55 if os.name == 'nt':
55 if os.name == 'nt':
56 path = '/' + util.normpath(path)
56 path = '/' + util.normpath(path)
57 return 'file://%s' % path
57 return 'file://%s' % path
58 return path
58 return path
59
59
60 def optrev(number):
60 def optrev(number):
61 optrev = svn.core.svn_opt_revision_t()
61 optrev = svn.core.svn_opt_revision_t()
62 optrev.kind = svn.core.svn_opt_revision_number
62 optrev.kind = svn.core.svn_opt_revision_number
63 optrev.value.number = number
63 optrev.value.number = number
64 return optrev
64 return optrev
65
65
66 class changedpath(object):
66 class changedpath(object):
67 def __init__(self, p):
67 def __init__(self, p):
68 self.copyfrom_path = p.copyfrom_path
68 self.copyfrom_path = p.copyfrom_path
69 self.copyfrom_rev = p.copyfrom_rev
69 self.copyfrom_rev = p.copyfrom_rev
70 self.action = p.action
70 self.action = p.action
71
71
72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
73 strict_node_history=False):
73 strict_node_history=False):
74 protocol = -1
74 protocol = -1
75 def receiver(orig_paths, revnum, author, date, message, pool):
75 def receiver(orig_paths, revnum, author, date, message, pool):
76 if orig_paths is not None:
76 if orig_paths is not None:
77 for k, v in orig_paths.iteritems():
77 for k, v in orig_paths.iteritems():
78 orig_paths[k] = changedpath(v)
78 orig_paths[k] = changedpath(v)
79 pickle.dump((orig_paths, revnum, author, date, message),
79 pickle.dump((orig_paths, revnum, author, date, message),
80 fp, protocol)
80 fp, protocol)
81
81
82 try:
82 try:
83 # Use an ra of our own so that our parent can consume
83 # Use an ra of our own so that our parent can consume
84 # our results without confusing the server.
84 # our results without confusing the server.
85 t = transport.SvnRaTransport(url=url)
85 t = transport.SvnRaTransport(url=url)
86 svn.ra.get_log(t.ra, paths, start, end, limit,
86 svn.ra.get_log(t.ra, paths, start, end, limit,
87 discover_changed_paths,
87 discover_changed_paths,
88 strict_node_history,
88 strict_node_history,
89 receiver)
89 receiver)
90 except SubversionException, (inst, num):
90 except SubversionException, (inst, num):
91 pickle.dump(num, fp, protocol)
91 pickle.dump(num, fp, protocol)
92 except IOError:
92 except IOError:
93 # Caller may interrupt the iteration
93 # Caller may interrupt the iteration
94 pickle.dump(None, fp, protocol)
94 pickle.dump(None, fp, protocol)
95 else:
95 else:
96 pickle.dump(None, fp, protocol)
96 pickle.dump(None, fp, protocol)
97 fp.close()
97 fp.close()
98
98
99 def debugsvnlog(ui, **opts):
99 def debugsvnlog(ui, **opts):
100 """Fetch SVN log in a subprocess and channel them back to parent to
100 """Fetch SVN log in a subprocess and channel them back to parent to
101 avoid memory collection issues.
101 avoid memory collection issues.
102 """
102 """
103 util.set_binary(sys.stdin)
103 util.set_binary(sys.stdin)
104 util.set_binary(sys.stdout)
104 util.set_binary(sys.stdout)
105 args = decodeargs(sys.stdin.read())
105 args = decodeargs(sys.stdin.read())
106 get_log_child(sys.stdout, *args)
106 get_log_child(sys.stdout, *args)
107
107
108 class logstream:
108 class logstream:
109 """Interruptible revision log iterator."""
109 """Interruptible revision log iterator."""
110 def __init__(self, stdout):
110 def __init__(self, stdout):
111 self._stdout = stdout
111 self._stdout = stdout
112
112
113 def __iter__(self):
113 def __iter__(self):
114 while True:
114 while True:
115 entry = pickle.load(self._stdout)
115 entry = pickle.load(self._stdout)
116 try:
116 try:
117 orig_paths, revnum, author, date, message = entry
117 orig_paths, revnum, author, date, message = entry
118 except:
118 except:
119 if entry is None:
119 if entry is None:
120 break
120 break
121 raise SubversionException("child raised exception", entry)
121 raise SubversionException("child raised exception", entry)
122 yield entry
122 yield entry
123
123
124 def close(self):
124 def close(self):
125 if self._stdout:
125 if self._stdout:
126 self._stdout.close()
126 self._stdout.close()
127 self._stdout = None
127 self._stdout = None
128
128
129 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
129 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
130 strict_node_history=False):
130 strict_node_history=False):
131 args = [url, paths, start, end, limit, discover_changed_paths,
131 args = [url, paths, start, end, limit, discover_changed_paths,
132 strict_node_history]
132 strict_node_history]
133 arg = encodeargs(args)
133 arg = encodeargs(args)
134 hgexe = util.hgexecutable()
134 hgexe = util.hgexecutable()
135 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
135 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
136 stdin, stdout = os.popen2(cmd, 'b')
136 stdin, stdout = os.popen2(cmd, 'b')
137 stdin.write(arg)
137 stdin.write(arg)
138 stdin.close()
138 stdin.close()
139 return logstream(stdout)
139 return logstream(stdout)
140
140
141 # SVN conversion code stolen from bzr-svn and tailor
141 # SVN conversion code stolen from bzr-svn and tailor
142 #
142 #
143 # Subversion looks like a versioned filesystem, branches structures
143 # Subversion looks like a versioned filesystem, branches structures
144 # are defined by conventions and not enforced by the tool. First,
144 # are defined by conventions and not enforced by the tool. First,
145 # we define the potential branches (modules) as "trunk" and "branches"
145 # we define the potential branches (modules) as "trunk" and "branches"
146 # children directories. Revisions are then identified by their
146 # children directories. Revisions are then identified by their
147 # module and revision number (and a repository identifier).
147 # module and revision number (and a repository identifier).
148 #
148 #
149 # The revision graph is really a tree (or a forest). By default, a
149 # The revision graph is really a tree (or a forest). By default, a
150 # revision parent is the previous revision in the same module. If the
150 # revision parent is the previous revision in the same module. If the
151 # module directory is copied/moved from another module then the
151 # module directory is copied/moved from another module then the
152 # revision is the module root and its parent the source revision in
152 # revision is the module root and its parent the source revision in
153 # the parent module. A revision has at most one parent.
153 # the parent module. A revision has at most one parent.
154 #
154 #
155 class svn_source(converter_source):
155 class svn_source(converter_source):
156 def __init__(self, ui, url, rev=None):
156 def __init__(self, ui, url, rev=None):
157 super(svn_source, self).__init__(ui, url, rev=rev)
157 super(svn_source, self).__init__(ui, url, rev=rev)
158
158
159 try:
159 try:
160 SubversionException
160 SubversionException
161 except NameError:
161 except NameError:
162 raise NoRepo('Subversion python bindings could not be loaded')
162 raise NoRepo('Subversion python bindings could not be loaded')
163
163
164 self.encoding = locale.getpreferredencoding()
164 self.encoding = locale.getpreferredencoding()
165 self.lastrevs = {}
165 self.lastrevs = {}
166
166
167 latest = None
167 latest = None
168 try:
168 try:
169 # Support file://path@rev syntax. Useful e.g. to convert
169 # Support file://path@rev syntax. Useful e.g. to convert
170 # deleted branches.
170 # deleted branches.
171 at = url.rfind('@')
171 at = url.rfind('@')
172 if at >= 0:
172 if at >= 0:
173 latest = int(url[at+1:])
173 latest = int(url[at+1:])
174 url = url[:at]
174 url = url[:at]
175 except ValueError, e:
175 except ValueError, e:
176 pass
176 pass
177 self.url = geturl(url)
177 self.url = geturl(url)
178 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
178 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
179 try:
179 try:
180 self.transport = transport.SvnRaTransport(url=self.url)
180 self.transport = transport.SvnRaTransport(url=self.url)
181 self.ra = self.transport.ra
181 self.ra = self.transport.ra
182 self.ctx = self.transport.client
182 self.ctx = self.transport.client
183 self.base = svn.ra.get_repos_root(self.ra)
183 self.base = svn.ra.get_repos_root(self.ra)
184 self.module = self.url[len(self.base):]
184 self.module = self.url[len(self.base):]
185 self.commits = {}
185 self.commits = {}
186 self.paths = {}
186 self.paths = {}
187 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
187 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
188 except SubversionException, e:
188 except SubversionException, e:
189 ui.print_exc()
189 ui.print_exc()
190 raise NoRepo("%s does not look like a Subversion repo" % self.url)
190 raise NoRepo("%s does not look like a Subversion repo" % self.url)
191
191
192 if rev:
192 if rev:
193 try:
193 try:
194 latest = int(rev)
194 latest = int(rev)
195 except ValueError:
195 except ValueError:
196 raise util.Abort('svn: revision %s is not an integer' % rev)
196 raise util.Abort('svn: revision %s is not an integer' % rev)
197
197
198 try:
198 try:
199 self.get_blacklist()
199 self.get_blacklist()
200 except IOError, e:
200 except IOError, e:
201 pass
201 pass
202
202
203 self.last_changed = self.latest(self.module, latest)
203 self.last_changed = self.latest(self.module, latest)
204
204
205 self.head = self.revid(self.last_changed)
205 self.head = self.revid(self.last_changed)
206 self._changescache = None
206 self._changescache = None
207
207
208 if os.path.exists(os.path.join(url, '.svn/entries')):
208 if os.path.exists(os.path.join(url, '.svn/entries')):
209 self.wc = url
209 self.wc = url
210 else:
210 else:
211 self.wc = None
211 self.wc = None
212 self.convertfp = None
212 self.convertfp = None
213
213
214 def setrevmap(self, revmap):
214 def setrevmap(self, revmap):
215 lastrevs = {}
215 lastrevs = {}
216 for revid in revmap.iterkeys():
216 for revid in revmap.iterkeys():
217 uuid, module, revnum = self.revsplit(revid)
217 uuid, module, revnum = self.revsplit(revid)
218 lastrevnum = lastrevs.setdefault(module, revnum)
218 lastrevnum = lastrevs.setdefault(module, revnum)
219 if revnum > lastrevnum:
219 if revnum > lastrevnum:
220 lastrevs[module] = revnum
220 lastrevs[module] = revnum
221 self.lastrevs = lastrevs
221 self.lastrevs = lastrevs
222
222
223 def exists(self, path, optrev):
223 def exists(self, path, optrev):
224 try:
224 try:
225 svn.client.ls(self.url.rstrip('/') + '/' + path,
225 svn.client.ls(self.url.rstrip('/') + '/' + path,
226 optrev, False, self.ctx)
226 optrev, False, self.ctx)
227 return True
227 return True
228 except SubversionException, err:
228 except SubversionException, err:
229 return False
229 return False
230
230
231 def getheads(self):
231 def getheads(self):
232
232
233 def getcfgpath(name, rev):
233 def getcfgpath(name, rev):
234 cfgpath = self.ui.config('convert', 'svn.' + name)
234 cfgpath = self.ui.config('convert', 'svn.' + name)
235 path = (cfgpath or name).strip('/')
235 path = (cfgpath or name).strip('/')
236 if not self.exists(path, rev):
236 if not self.exists(path, rev):
237 if cfgpath:
237 if cfgpath:
238 raise util.Abort(_('expected %s to be at %r, but not found')
238 raise util.Abort(_('expected %s to be at %r, but not found')
239 % (name, path))
239 % (name, path))
240 return None
240 return None
241 self.ui.note(_('found %s at %r\n') % (name, path))
241 self.ui.note(_('found %s at %r\n') % (name, path))
242 return path
242 return path
243
243
244 rev = optrev(self.last_changed)
244 rev = optrev(self.last_changed)
245 oldmodule = ''
245 oldmodule = ''
246 trunk = getcfgpath('trunk', rev)
246 trunk = getcfgpath('trunk', rev)
247 tags = getcfgpath('tags', rev)
247 tags = getcfgpath('tags', rev)
248 branches = getcfgpath('branches', rev)
248 branches = getcfgpath('branches', rev)
249
249
250 # If the project has a trunk or branches, we will extract heads
250 # If the project has a trunk or branches, we will extract heads
251 # from them. We keep the project root otherwise.
251 # from them. We keep the project root otherwise.
252 if trunk:
252 if trunk:
253 oldmodule = self.module or ''
253 oldmodule = self.module or ''
254 self.module += '/' + trunk
254 self.module += '/' + trunk
255 lt = self.latest(self.module, self.last_changed)
255 lt = self.latest(self.module, self.last_changed)
256 self.head = self.revid(lt)
256 self.head = self.revid(lt)
257
257
258 # First head in the list is the module's head
258 # First head in the list is the module's head
259 self.heads = [self.head]
259 self.heads = [self.head]
260 self.tags = '%s/%s' % (oldmodule , (tags or 'tags'))
260 self.tags = '%s/%s' % (oldmodule , (tags or 'tags'))
261
261
262 # Check if branches bring a few more heads to the list
262 # Check if branches bring a few more heads to the list
263 if branches:
263 if branches:
264 rpath = self.url.strip('/')
264 rpath = self.url.strip('/')
265 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
265 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
266 self.ctx)
266 self.ctx)
267 for branch in branchnames.keys():
267 for branch in branchnames.keys():
268 module = '%s/%s/%s' % (oldmodule, branches, branch)
268 module = '%s/%s/%s' % (oldmodule, branches, branch)
269 brevnum = self.latest(module, self.last_changed)
269 brevnum = self.latest(module, self.last_changed)
270 brev = self.revid(brevnum, module)
270 brev = self.revid(brevnum, module)
271 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
271 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
272 self.heads.append(brev)
272 self.heads.append(brev)
273
273
274 return self.heads
274 return self.heads
275
275
276 def getfile(self, file, rev):
276 def getfile(self, file, rev):
277 data, mode = self._getfile(file, rev)
277 data, mode = self._getfile(file, rev)
278 self.modecache[(file, rev)] = mode
278 self.modecache[(file, rev)] = mode
279 return data
279 return data
280
280
281 def getmode(self, file, rev):
281 def getmode(self, file, rev):
282 return self.modecache[(file, rev)]
282 return self.modecache[(file, rev)]
283
283
284 def getchanges(self, rev):
284 def getchanges(self, rev):
285 if self._changescache and self._changescache[0] == rev:
285 if self._changescache and self._changescache[0] == rev:
286 return self._changescache[1]
286 return self._changescache[1]
287 self._changescache = None
287 self._changescache = None
288 self.modecache = {}
288 self.modecache = {}
289 (paths, parents) = self.paths[rev]
289 (paths, parents) = self.paths[rev]
290 files, copies = self.expandpaths(rev, paths, parents)
290 files, copies = self.expandpaths(rev, paths, parents)
291 files.sort()
291 files.sort()
292 files = zip(files, [rev] * len(files))
292 files = zip(files, [rev] * len(files))
293
293
294 # caller caches the result, so free it here to release memory
294 # caller caches the result, so free it here to release memory
295 del self.paths[rev]
295 del self.paths[rev]
296 return (files, copies)
296 return (files, copies)
297
297
298 def getchangedfiles(self, rev, i):
298 def getchangedfiles(self, rev, i):
299 changes = self.getchanges(rev)
299 changes = self.getchanges(rev)
300 self._changescache = (rev, changes)
300 self._changescache = (rev, changes)
301 return [f[0] for f in changes[0]]
301 return [f[0] for f in changes[0]]
302
302
303 def getcommit(self, rev):
303 def getcommit(self, rev):
304 if rev not in self.commits:
304 if rev not in self.commits:
305 uuid, module, revnum = self.revsplit(rev)
305 uuid, module, revnum = self.revsplit(rev)
306 self.module = module
306 self.module = module
307 self.reparent(module)
307 self.reparent(module)
308 # We assume that:
308 # We assume that:
309 # - requests for revisions after "stop" come from the
309 # - requests for revisions after "stop" come from the
310 # revision graph backward traversal. Cache all of them
310 # revision graph backward traversal. Cache all of them
311 # down to stop, they will be used eventually.
311 # down to stop, they will be used eventually.
312 # - requests for revisions before "stop" come to get
312 # - requests for revisions before "stop" come to get
313 # isolated branches parents. Just fetch what is needed.
313 # isolated branches parents. Just fetch what is needed.
314 stop = self.lastrevs.get(module, 0)
314 stop = self.lastrevs.get(module, 0)
315 if revnum < stop:
315 if revnum < stop:
316 stop = revnum + 1
316 stop = revnum + 1
317 self._fetch_revisions(revnum, stop)
317 self._fetch_revisions(revnum, stop)
318 commit = self.commits[rev]
318 commit = self.commits[rev]
319 # caller caches the result, so free it here to release memory
319 # caller caches the result, so free it here to release memory
320 del self.commits[rev]
320 del self.commits[rev]
321 return commit
321 return commit
322
322
323 def gettags(self):
323 def gettags(self):
324 tags = {}
324 tags = {}
325 start = self.revnum(self.head)
325 start = self.revnum(self.head)
326 try:
326 try:
327 for entry in get_log(self.url, [self.tags], 0, start):
327 for entry in get_log(self.url, [self.tags], 0, start):
328 orig_paths, revnum, author, date, message = entry
328 orig_paths, revnum, author, date, message = entry
329 for path in orig_paths:
329 for path in orig_paths:
330 if not path.startswith(self.tags+'/'):
330 if not path.startswith(self.tags+'/'):
331 continue
331 continue
332 ent = orig_paths[path]
332 ent = orig_paths[path]
333 source = ent.copyfrom_path
333 source = ent.copyfrom_path
334 rev = ent.copyfrom_rev
334 rev = ent.copyfrom_rev
335 tag = path.split('/')[-1]
335 tag = path.split('/')[-1]
336 tags[tag] = self.revid(rev, module=source)
336 tags[tag] = self.revid(rev, module=source)
337 except SubversionException, (inst, num):
337 except SubversionException, (inst, num):
338 self.ui.note('no tags found at revision %d\n' % start)
338 self.ui.note('no tags found at revision %d\n' % start)
339 return tags
339 return tags
340
340
341 def converted(self, rev, destrev):
341 def converted(self, rev, destrev):
342 if not self.wc:
342 if not self.wc:
343 return
343 return
344 if self.convertfp is None:
344 if self.convertfp is None:
345 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
345 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
346 'a')
346 'a')
347 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
347 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
348 self.convertfp.flush()
348 self.convertfp.flush()
349
349
350 # -- helper functions --
350 # -- helper functions --
351
351
352 def revid(self, revnum, module=None):
352 def revid(self, revnum, module=None):
353 if not module:
353 if not module:
354 module = self.module
354 module = self.module
355 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
355 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
356 revnum)
356 revnum)
357
357
358 def revnum(self, rev):
358 def revnum(self, rev):
359 return int(rev.split('@')[-1])
359 return int(rev.split('@')[-1])
360
360
361 def revsplit(self, rev):
361 def revsplit(self, rev):
362 url, revnum = rev.encode(self.encoding).split('@', 1)
362 url, revnum = rev.encode(self.encoding).split('@', 1)
363 revnum = int(revnum)
363 revnum = int(revnum)
364 parts = url.split('/', 1)
364 parts = url.split('/', 1)
365 uuid = parts.pop(0)[4:]
365 uuid = parts.pop(0)[4:]
366 mod = ''
366 mod = ''
367 if parts:
367 if parts:
368 mod = '/' + parts[0]
368 mod = '/' + parts[0]
369 return uuid, mod, revnum
369 return uuid, mod, revnum
370
370
371 def latest(self, path, stop=0):
371 def latest(self, path, stop=0):
372 'find the latest revision affecting path, up to stop'
372 'find the latest revision affecting path, up to stop'
373 if not stop:
373 if not stop:
374 stop = svn.ra.get_latest_revnum(self.ra)
374 stop = svn.ra.get_latest_revnum(self.ra)
375 try:
375 try:
376 self.reparent('')
376 self.reparent('')
377 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
377 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
378 self.reparent(self.module)
378 self.reparent(self.module)
379 except SubversionException:
379 except SubversionException:
380 dirent = None
380 dirent = None
381 if not dirent:
381 if not dirent:
382 raise util.Abort('%s not found up to revision %d' % (path, stop))
382 raise util.Abort('%s not found up to revision %d' % (path, stop))
383
383
384 return dirent.created_rev
384 return dirent.created_rev
385
385
386 def get_blacklist(self):
386 def get_blacklist(self):
387 """Avoid certain revision numbers.
387 """Avoid certain revision numbers.
388 It is not uncommon for two nearby revisions to cancel each other
388 It is not uncommon for two nearby revisions to cancel each other
389 out, e.g. 'I copied trunk into a subdirectory of itself instead
389 out, e.g. 'I copied trunk into a subdirectory of itself instead
390 of making a branch'. The converted repository is significantly
390 of making a branch'. The converted repository is significantly
391 smaller if we ignore such revisions."""
391 smaller if we ignore such revisions."""
392 self.blacklist = util.set()
392 self.blacklist = util.set()
393 blacklist = self.blacklist
393 blacklist = self.blacklist
394 for line in file("blacklist.txt", "r"):
394 for line in file("blacklist.txt", "r"):
395 if not line.startswith("#"):
395 if not line.startswith("#"):
396 try:
396 try:
397 svn_rev = int(line.strip())
397 svn_rev = int(line.strip())
398 blacklist.add(svn_rev)
398 blacklist.add(svn_rev)
399 except ValueError, e:
399 except ValueError, e:
400 pass # not an integer or a comment
400 pass # not an integer or a comment
401
401
402 def is_blacklisted(self, svn_rev):
402 def is_blacklisted(self, svn_rev):
403 return svn_rev in self.blacklist
403 return svn_rev in self.blacklist
404
404
405 def reparent(self, module):
405 def reparent(self, module):
406 svn_url = self.base + module
406 svn_url = self.base + module
407 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
407 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
408 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
408 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
409
409
410 def expandpaths(self, rev, paths, parents):
410 def expandpaths(self, rev, paths, parents):
411 def get_entry_from_path(path, module=self.module):
411 def get_entry_from_path(path, module=self.module):
412 # Given the repository url of this wc, say
412 # Given the repository url of this wc, say
413 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
413 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
414 # extract the "entry" portion (a relative path) from what
414 # extract the "entry" portion (a relative path) from what
415 # svn log --xml says, ie
415 # svn log --xml says, ie
416 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
416 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
417 # that is to say "tests/PloneTestCase.py"
417 # that is to say "tests/PloneTestCase.py"
418 if path.startswith(module):
418 if path.startswith(module):
419 relative = path[len(module):]
419 relative = path[len(module):]
420 if relative.startswith('/'):
420 if relative.startswith('/'):
421 return relative[1:]
421 return relative[1:]
422 else:
422 else:
423 return relative
423 return relative
424
424
425 # The path is outside our tracked tree...
425 # The path is outside our tracked tree...
426 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
426 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
427 return None
427 return None
428
428
429 entries = []
429 entries = []
430 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
430 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
431 copies = {}
431 copies = {}
432
432
433 new_module, revnum = self.revsplit(rev)[1:]
433 new_module, revnum = self.revsplit(rev)[1:]
434 if new_module != self.module:
434 if new_module != self.module:
435 self.module = new_module
435 self.module = new_module
436 self.reparent(self.module)
436 self.reparent(self.module)
437
437
438 for path, ent in paths:
438 for path, ent in paths:
439 entrypath = get_entry_from_path(path, module=self.module)
439 entrypath = get_entry_from_path(path, module=self.module)
440 entry = entrypath.decode(self.encoding)
440 entry = entrypath.decode(self.encoding)
441
441
442 kind = svn.ra.check_path(self.ra, entrypath, revnum)
442 kind = svn.ra.check_path(self.ra, entrypath, revnum)
443 if kind == svn.core.svn_node_file:
443 if kind == svn.core.svn_node_file:
444 if ent.copyfrom_path:
444 if ent.copyfrom_path:
445 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
445 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
446 if copyfrom_path:
446 if copyfrom_path:
447 self.ui.debug("Copied to %s from %s@%s\n" %
447 self.ui.debug("Copied to %s from %s@%s\n" %
448 (entrypath, copyfrom_path,
448 (entrypath, copyfrom_path,
449 ent.copyfrom_rev))
449 ent.copyfrom_rev))
450 # It's probably important for hg that the source
450 # It's probably important for hg that the source
451 # exists in the revision's parent, not just the
451 # exists in the revision's parent, not just the
452 # ent.copyfrom_rev
452 # ent.copyfrom_rev
453 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
453 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
454 if fromkind != 0:
454 if fromkind != 0:
455 copies[self.recode(entry)] = self.recode(copyfrom_path)
455 copies[self.recode(entry)] = self.recode(copyfrom_path)
456 entries.append(self.recode(entry))
456 entries.append(self.recode(entry))
457 elif kind == 0: # gone, but had better be a deleted *file*
457 elif kind == 0: # gone, but had better be a deleted *file*
458 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
458 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
459
459
460 # if a branch is created but entries are removed in the same
460 # if a branch is created but entries are removed in the same
461 # changeset, get the right fromrev
461 # changeset, get the right fromrev
462 # parents cannot be empty here, you cannot remove things from
462 # parents cannot be empty here, you cannot remove things from
463 # a root revision.
463 # a root revision.
464 uuid, old_module, fromrev = self.revsplit(parents[0])
464 uuid, old_module, fromrev = self.revsplit(parents[0])
465
465
466 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
466 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
467 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
467 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
468
468
469 def lookup_parts(p):
469 def lookup_parts(p):
470 rc = None
470 rc = None
471 parts = p.split("/")
471 parts = p.split("/")
472 for i in range(len(parts)):
472 for i in range(len(parts)):
473 part = "/".join(parts[:i])
473 part = "/".join(parts[:i])
474 info = part, copyfrom.get(part, None)
474 info = part, copyfrom.get(part, None)
475 if info[1] is not None:
475 if info[1] is not None:
476 self.ui.debug("Found parent directory %s\n" % info[1])
476 self.ui.debug("Found parent directory %s\n" % info[1])
477 rc = info
477 rc = info
478 return rc
478 return rc
479
479
480 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
480 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
481
481
482 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
482 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
483
483
484 # need to remove fragment from lookup_parts and replace with copyfrom_path
484 # need to remove fragment from lookup_parts and replace with copyfrom_path
485 if frompath is not None:
485 if frompath is not None:
486 self.ui.debug("munge-o-matic\n")
486 self.ui.debug("munge-o-matic\n")
487 self.ui.debug(entrypath + '\n')
487 self.ui.debug(entrypath + '\n')
488 self.ui.debug(entrypath[len(frompath):] + '\n')
488 self.ui.debug(entrypath[len(frompath):] + '\n')
489 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
489 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
490 fromrev = froment.copyfrom_rev
490 fromrev = froment.copyfrom_rev
491 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
491 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
492
492
493 # We can avoid the reparent calls if the module has not changed
493 # We can avoid the reparent calls if the module has not changed
494 # but it probably does not worth the pain.
494 # but it probably does not worth the pain.
495 self.reparent('')
495 self.reparent('')
496 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
496 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
497 self.reparent(self.module)
497 self.reparent(self.module)
498
498
499 if fromkind == svn.core.svn_node_file: # a deleted file
499 if fromkind == svn.core.svn_node_file: # a deleted file
500 entries.append(self.recode(entry))
500 entries.append(self.recode(entry))
501 elif fromkind == svn.core.svn_node_dir:
501 elif fromkind == svn.core.svn_node_dir:
502 # print "Deleted/moved non-file:", revnum, path, ent
502 # print "Deleted/moved non-file:", revnum, path, ent
503 # children = self._find_children(path, revnum - 1)
503 # children = self._find_children(path, revnum - 1)
504 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
504 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
505 # Sometimes this is tricky. For example: in
505 # Sometimes this is tricky. For example: in
506 # The Subversion Repository revision 6940 a dir
506 # The Subversion Repository revision 6940 a dir
507 # was copied and one of its files was deleted
507 # was copied and one of its files was deleted
508 # from the new location in the same commit. This
508 # from the new location in the same commit. This
509 # code can't deal with that yet.
509 # code can't deal with that yet.
510 if ent.action == 'C':
510 if ent.action == 'C':
511 children = self._find_children(path, fromrev)
511 children = self._find_children(path, fromrev)
512 else:
512 else:
513 oroot = entrypath.strip('/')
513 oroot = entrypath.strip('/')
514 nroot = path.strip('/')
514 nroot = path.strip('/')
515 children = self._find_children(oroot, fromrev)
515 children = self._find_children(oroot, fromrev)
516 children = [s.replace(oroot,nroot) for s in children]
516 children = [s.replace(oroot,nroot) for s in children]
517 # Mark all [files, not directories] as deleted.
517 # Mark all [files, not directories] as deleted.
518 for child in children:
518 for child in children:
519 # Can we move a child directory and its
519 # Can we move a child directory and its
520 # parent in the same commit? (probably can). Could
520 # parent in the same commit? (probably can). Could
521 # cause problems if instead of revnum -1,
521 # cause problems if instead of revnum -1,
522 # we have to look in (copyfrom_path, revnum - 1)
522 # we have to look in (copyfrom_path, revnum - 1)
523 entrypath = get_entry_from_path("/" + child, module=old_module)
523 entrypath = get_entry_from_path("/" + child, module=old_module)
524 if entrypath:
524 if entrypath:
525 entry = self.recode(entrypath.decode(self.encoding))
525 entry = self.recode(entrypath.decode(self.encoding))
526 if entry in copies:
526 if entry in copies:
527 # deleted file within a copy
527 # deleted file within a copy
528 del copies[entry]
528 del copies[entry]
529 else:
529 else:
530 entries.append(entry)
530 entries.append(entry)
531 else:
531 else:
532 self.ui.debug('unknown path in revision %d: %s\n' % \
532 self.ui.debug('unknown path in revision %d: %s\n' % \
533 (revnum, path))
533 (revnum, path))
534 elif kind == svn.core.svn_node_dir:
534 elif kind == svn.core.svn_node_dir:
535 # Should probably synthesize normal file entries
535 # Should probably synthesize normal file entries
536 # and handle as above to clean up copy/rename handling.
536 # and handle as above to clean up copy/rename handling.
537
537
538 # If the directory just had a prop change,
538 # If the directory just had a prop change,
539 # then we shouldn't need to look for its children.
539 # then we shouldn't need to look for its children.
540 if ent.action == 'M':
540 if ent.action == 'M':
541 continue
541 continue
542
542
543 # Also this could create duplicate entries. Not sure
543 # Also this could create duplicate entries. Not sure
544 # whether this will matter. Maybe should make entries a set.
544 # whether this will matter. Maybe should make entries a set.
545 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
545 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
546 # This will fail if a directory was copied
546 # This will fail if a directory was copied
547 # from another branch and then some of its files
547 # from another branch and then some of its files
548 # were deleted in the same transaction.
548 # were deleted in the same transaction.
549 children = self._find_children(path, revnum)
549 children = self._find_children(path, revnum)
550 children.sort()
550 children.sort()
551 for child in children:
551 for child in children:
552 # Can we move a child directory and its
552 # Can we move a child directory and its
553 # parent in the same commit? (probably can). Could
553 # parent in the same commit? (probably can). Could
554 # cause problems if instead of revnum -1,
554 # cause problems if instead of revnum -1,
555 # we have to look in (copyfrom_path, revnum - 1)
555 # we have to look in (copyfrom_path, revnum - 1)
556 entrypath = get_entry_from_path("/" + child, module=self.module)
556 entrypath = get_entry_from_path("/" + child, module=self.module)
557 # print child, self.module, entrypath
557 # print child, self.module, entrypath
558 if entrypath:
558 if entrypath:
559 # Need to filter out directories here...
559 # Need to filter out directories here...
560 kind = svn.ra.check_path(self.ra, entrypath, revnum)
560 kind = svn.ra.check_path(self.ra, entrypath, revnum)
561 if kind != svn.core.svn_node_dir:
561 if kind != svn.core.svn_node_dir:
562 entries.append(self.recode(entrypath))
562 entries.append(self.recode(entrypath))
563
563
564 # Copies here (must copy all from source)
564 # Copies here (must copy all from source)
565 # Probably not a real problem for us if
565 # Probably not a real problem for us if
566 # source does not exist
566 # source does not exist
567
567
568 # Can do this with the copy command "hg copy"
568 # Can do this with the copy command "hg copy"
569 # if ent.copyfrom_path:
569 # if ent.copyfrom_path:
570 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
570 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
571 # module=self.module)
571 # module=self.module)
572 # copyto_entry = entrypath
572 # copyto_entry = entrypath
573 #
573 #
574 # print "copy directory", copyfrom_entry, 'to', copyto_entry
574 # print "copy directory", copyfrom_entry, 'to', copyto_entry
575 #
575 #
576 # copies.append((copyfrom_entry, copyto_entry))
576 # copies.append((copyfrom_entry, copyto_entry))
577
577
578 if ent.copyfrom_path:
578 if ent.copyfrom_path:
579 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
579 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
580 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
580 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
581 if copyfrom_entry:
581 if copyfrom_entry:
582 copyfrom[path] = ent
582 copyfrom[path] = ent
583 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
583 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
584
584
585 # Good, /probably/ a regular copy. Really should check
585 # Good, /probably/ a regular copy. Really should check
586 # to see whether the parent revision actually contains
586 # to see whether the parent revision actually contains
587 # the directory in question.
587 # the directory in question.
588 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
588 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
589 children.sort()
589 children.sort()
590 for child in children:
590 for child in children:
591 entrypath = get_entry_from_path("/" + child, module=self.module)
591 entrypath = get_entry_from_path("/" + child, module=self.module)
592 if entrypath:
592 if entrypath:
593 entry = entrypath.decode(self.encoding)
593 entry = entrypath.decode(self.encoding)
594 # print "COPY COPY From", copyfrom_entry, entry
594 # print "COPY COPY From", copyfrom_entry, entry
595 copyto_path = path + entry[len(copyfrom_entry):]
595 copyto_path = path + entry[len(copyfrom_entry):]
596 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
596 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
597 # print "COPY", entry, "COPY To", copyto_entry
597 # print "COPY", entry, "COPY To", copyto_entry
598 copies[self.recode(copyto_entry)] = self.recode(entry)
598 copies[self.recode(copyto_entry)] = self.recode(entry)
599 # copy from quux splort/quuxfile
599 # copy from quux splort/quuxfile
600
600
601 return (entries, copies)
601 return (util.unique(entries), copies)
602
602
603 def _fetch_revisions(self, from_revnum, to_revnum):
603 def _fetch_revisions(self, from_revnum, to_revnum):
604 if from_revnum < to_revnum:
604 if from_revnum < to_revnum:
605 from_revnum, to_revnum = to_revnum, from_revnum
605 from_revnum, to_revnum = to_revnum, from_revnum
606
606
607 self.child_cset = None
607 self.child_cset = None
608 def parselogentry(orig_paths, revnum, author, date, message):
608 def parselogentry(orig_paths, revnum, author, date, message):
609 """Return the parsed commit object or None, and True if
609 """Return the parsed commit object or None, and True if
610 the revision is a branch root.
610 the revision is a branch root.
611 """
611 """
612 self.ui.debug("parsing revision %d (%d changes)\n" %
612 self.ui.debug("parsing revision %d (%d changes)\n" %
613 (revnum, len(orig_paths)))
613 (revnum, len(orig_paths)))
614
614
615 rev = self.revid(revnum)
615 rev = self.revid(revnum)
616 # branch log might return entries for a parent we already have
616 # branch log might return entries for a parent we already have
617
617
618 if (rev in self.commits or revnum < to_revnum):
618 if (rev in self.commits or revnum < to_revnum):
619 return None, False
619 return None, False
620
620
621 parents = []
621 parents = []
622 # check whether this revision is the start of a branch
622 # check whether this revision is the start of a branch
623 if self.module in orig_paths:
623 if self.module in orig_paths:
624 ent = orig_paths[self.module]
624 ent = orig_paths[self.module]
625 if ent.copyfrom_path:
625 if ent.copyfrom_path:
626 # ent.copyfrom_rev may not be the actual last revision
626 # ent.copyfrom_rev may not be the actual last revision
627 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
627 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
628 parents = [self.revid(prev, ent.copyfrom_path)]
628 parents = [self.revid(prev, ent.copyfrom_path)]
629 self.ui.note('found parent of branch %s at %d: %s\n' % \
629 self.ui.note('found parent of branch %s at %d: %s\n' % \
630 (self.module, prev, ent.copyfrom_path))
630 (self.module, prev, ent.copyfrom_path))
631 else:
631 else:
632 self.ui.debug("No copyfrom path, don't know what to do.\n")
632 self.ui.debug("No copyfrom path, don't know what to do.\n")
633
633
634 orig_paths = orig_paths.items()
634 orig_paths = orig_paths.items()
635 orig_paths.sort()
635 orig_paths.sort()
636 paths = []
636 paths = []
637 # filter out unrelated paths
637 # filter out unrelated paths
638 for path, ent in orig_paths:
638 for path, ent in orig_paths:
639 if not path.startswith(self.module):
639 if not path.startswith(self.module):
640 self.ui.debug("boring@%s: %s\n" % (revnum, path))
640 self.ui.debug("boring@%s: %s\n" % (revnum, path))
641 continue
641 continue
642 paths.append((path, ent))
642 paths.append((path, ent))
643
643
644 # Example SVN datetime. Includes microseconds.
644 # Example SVN datetime. Includes microseconds.
645 # ISO-8601 conformant
645 # ISO-8601 conformant
646 # '2007-01-04T17:35:00.902377Z'
646 # '2007-01-04T17:35:00.902377Z'
647 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
647 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
648
648
649 log = message and self.recode(message)
649 log = message and self.recode(message)
650 author = author and self.recode(author) or ''
650 author = author and self.recode(author) or ''
651 try:
651 try:
652 branch = self.module.split("/")[-1]
652 branch = self.module.split("/")[-1]
653 if branch == 'trunk':
653 if branch == 'trunk':
654 branch = ''
654 branch = ''
655 except IndexError:
655 except IndexError:
656 branch = None
656 branch = None
657
657
658 cset = commit(author=author,
658 cset = commit(author=author,
659 date=util.datestr(date),
659 date=util.datestr(date),
660 desc=log,
660 desc=log,
661 parents=parents,
661 parents=parents,
662 branch=branch,
662 branch=branch,
663 rev=rev.encode('utf-8'))
663 rev=rev.encode('utf-8'))
664
664
665 self.commits[rev] = cset
665 self.commits[rev] = cset
666 # The parents list is *shared* among self.paths and the
666 # The parents list is *shared* among self.paths and the
667 # commit object. Both will be updated below.
667 # commit object. Both will be updated below.
668 self.paths[rev] = (paths, cset.parents)
668 self.paths[rev] = (paths, cset.parents)
669 if self.child_cset and not self.child_cset.parents:
669 if self.child_cset and not self.child_cset.parents:
670 self.child_cset.parents[:] = [rev]
670 self.child_cset.parents[:] = [rev]
671 self.child_cset = cset
671 self.child_cset = cset
672 return cset, len(parents) > 0
672 return cset, len(parents) > 0
673
673
674 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
674 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
675 (self.module, from_revnum, to_revnum))
675 (self.module, from_revnum, to_revnum))
676
676
677 try:
677 try:
678 firstcset = None
678 firstcset = None
679 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
679 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
680 try:
680 try:
681 for entry in stream:
681 for entry in stream:
682 paths, revnum, author, date, message = entry
682 paths, revnum, author, date, message = entry
683 if self.is_blacklisted(revnum):
683 if self.is_blacklisted(revnum):
684 self.ui.note('skipping blacklisted revision %d\n'
684 self.ui.note('skipping blacklisted revision %d\n'
685 % revnum)
685 % revnum)
686 continue
686 continue
687 if paths is None:
687 if paths is None:
688 self.ui.debug('revision %d has no entries\n' % revnum)
688 self.ui.debug('revision %d has no entries\n' % revnum)
689 continue
689 continue
690 cset, branched = parselogentry(paths, revnum, author,
690 cset, branched = parselogentry(paths, revnum, author,
691 date, message)
691 date, message)
692 if cset:
692 if cset:
693 firstcset = cset
693 firstcset = cset
694 if branched:
694 if branched:
695 break
695 break
696 finally:
696 finally:
697 stream.close()
697 stream.close()
698
698
699 if firstcset and not firstcset.parents:
699 if firstcset and not firstcset.parents:
700 # The first revision of the sequence (the last fetched one)
700 # The first revision of the sequence (the last fetched one)
701 # has invalid parents if not a branch root. Find the parent
701 # has invalid parents if not a branch root. Find the parent
702 # revision now, if any.
702 # revision now, if any.
703 try:
703 try:
704 firstrevnum = self.revnum(firstcset.rev)
704 firstrevnum = self.revnum(firstcset.rev)
705 if firstrevnum > 1:
705 if firstrevnum > 1:
706 latest = self.latest(self.module, firstrevnum - 1)
706 latest = self.latest(self.module, firstrevnum - 1)
707 firstcset.parents.append(self.revid(latest))
707 firstcset.parents.append(self.revid(latest))
708 except util.Abort:
708 except util.Abort:
709 pass
709 pass
710 except SubversionException, (inst, num):
710 except SubversionException, (inst, num):
711 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
711 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
712 raise NoSuchRevision(branch=self,
712 raise NoSuchRevision(branch=self,
713 revision="Revision number %d" % to_revnum)
713 revision="Revision number %d" % to_revnum)
714 raise
714 raise
715
715
716 def _getfile(self, file, rev):
716 def _getfile(self, file, rev):
717 io = StringIO()
717 io = StringIO()
718 # TODO: ra.get_file transmits the whole file instead of diffs.
718 # TODO: ra.get_file transmits the whole file instead of diffs.
719 mode = ''
719 mode = ''
720 try:
720 try:
721 new_module, revnum = self.revsplit(rev)[1:]
721 new_module, revnum = self.revsplit(rev)[1:]
722 if self.module != new_module:
722 if self.module != new_module:
723 self.module = new_module
723 self.module = new_module
724 self.reparent(self.module)
724 self.reparent(self.module)
725 info = svn.ra.get_file(self.ra, file, revnum, io)
725 info = svn.ra.get_file(self.ra, file, revnum, io)
726 if isinstance(info, list):
726 if isinstance(info, list):
727 info = info[-1]
727 info = info[-1]
728 mode = ("svn:executable" in info) and 'x' or ''
728 mode = ("svn:executable" in info) and 'x' or ''
729 mode = ("svn:special" in info) and 'l' or mode
729 mode = ("svn:special" in info) and 'l' or mode
730 except SubversionException, e:
730 except SubversionException, e:
731 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
731 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
732 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
732 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
733 if e.apr_err in notfound: # File not found
733 if e.apr_err in notfound: # File not found
734 raise IOError()
734 raise IOError()
735 raise
735 raise
736 data = io.getvalue()
736 data = io.getvalue()
737 if mode == 'l':
737 if mode == 'l':
738 link_prefix = "link "
738 link_prefix = "link "
739 if data.startswith(link_prefix):
739 if data.startswith(link_prefix):
740 data = data[len(link_prefix):]
740 data = data[len(link_prefix):]
741 return data, mode
741 return data, mode
742
742
743 def _find_children(self, path, revnum):
743 def _find_children(self, path, revnum):
744 path = path.strip('/')
744 path = path.strip('/')
745 pool = Pool()
745 pool = Pool()
746 rpath = '/'.join([self.base, path]).strip('/')
746 rpath = '/'.join([self.base, path]).strip('/')
747 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
747 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
748
748
749 pre_revprop_change = '''#!/bin/sh
749 pre_revprop_change = '''#!/bin/sh
750
750
751 REPOS="$1"
751 REPOS="$1"
752 REV="$2"
752 REV="$2"
753 USER="$3"
753 USER="$3"
754 PROPNAME="$4"
754 PROPNAME="$4"
755 ACTION="$5"
755 ACTION="$5"
756
756
757 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
757 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
758 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
758 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
759 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
759 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
760
760
761 echo "Changing prohibited revision property" >&2
761 echo "Changing prohibited revision property" >&2
762 exit 1
762 exit 1
763 '''
763 '''
764
764
765 class svn_sink(converter_sink, commandline):
765 class svn_sink(converter_sink, commandline):
766 commit_re = re.compile(r'Committed revision (\d+).', re.M)
766 commit_re = re.compile(r'Committed revision (\d+).', re.M)
767
767
768 def prerun(self):
768 def prerun(self):
769 if self.wc:
769 if self.wc:
770 os.chdir(self.wc)
770 os.chdir(self.wc)
771
771
772 def postrun(self):
772 def postrun(self):
773 if self.wc:
773 if self.wc:
774 os.chdir(self.cwd)
774 os.chdir(self.cwd)
775
775
776 def join(self, name):
776 def join(self, name):
777 return os.path.join(self.wc, '.svn', name)
777 return os.path.join(self.wc, '.svn', name)
778
778
779 def revmapfile(self):
779 def revmapfile(self):
780 return self.join('hg-shamap')
780 return self.join('hg-shamap')
781
781
782 def authorfile(self):
782 def authorfile(self):
783 return self.join('hg-authormap')
783 return self.join('hg-authormap')
784
784
785 def __init__(self, ui, path):
785 def __init__(self, ui, path):
786 converter_sink.__init__(self, ui, path)
786 converter_sink.__init__(self, ui, path)
787 commandline.__init__(self, ui, 'svn')
787 commandline.__init__(self, ui, 'svn')
788 self.delete = []
788 self.delete = []
789 self.setexec = []
789 self.setexec = []
790 self.delexec = []
790 self.delexec = []
791 self.copies = []
791 self.copies = []
792 self.wc = None
792 self.wc = None
793 self.cwd = os.getcwd()
793 self.cwd = os.getcwd()
794
794
795 path = os.path.realpath(path)
795 path = os.path.realpath(path)
796
796
797 created = False
797 created = False
798 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
798 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
799 self.wc = path
799 self.wc = path
800 self.run0('update')
800 self.run0('update')
801 else:
801 else:
802 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
802 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
803
803
804 if os.path.isdir(os.path.dirname(path)):
804 if os.path.isdir(os.path.dirname(path)):
805 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
805 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
806 ui.status(_('initializing svn repo %r\n') %
806 ui.status(_('initializing svn repo %r\n') %
807 os.path.basename(path))
807 os.path.basename(path))
808 commandline(ui, 'svnadmin').run0('create', path)
808 commandline(ui, 'svnadmin').run0('create', path)
809 created = path
809 created = path
810 path = util.normpath(path)
810 path = util.normpath(path)
811 if not path.startswith('/'):
811 if not path.startswith('/'):
812 path = '/' + path
812 path = '/' + path
813 path = 'file://' + path
813 path = 'file://' + path
814
814
815 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
815 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
816 self.run0('checkout', path, wcpath)
816 self.run0('checkout', path, wcpath)
817
817
818 self.wc = wcpath
818 self.wc = wcpath
819 self.opener = util.opener(self.wc)
819 self.opener = util.opener(self.wc)
820 self.wopener = util.opener(self.wc)
820 self.wopener = util.opener(self.wc)
821 self.childmap = mapfile(ui, self.join('hg-childmap'))
821 self.childmap = mapfile(ui, self.join('hg-childmap'))
822 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
822 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
823
823
824 if created:
824 if created:
825 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
825 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
826 fp = open(hook, 'w')
826 fp = open(hook, 'w')
827 fp.write(pre_revprop_change)
827 fp.write(pre_revprop_change)
828 fp.close()
828 fp.close()
829 util.set_flags(hook, "x")
829 util.set_flags(hook, "x")
830
830
831 xport = transport.SvnRaTransport(url=geturl(path))
831 xport = transport.SvnRaTransport(url=geturl(path))
832 self.uuid = svn.ra.get_uuid(xport.ra)
832 self.uuid = svn.ra.get_uuid(xport.ra)
833
833
834 def wjoin(self, *names):
834 def wjoin(self, *names):
835 return os.path.join(self.wc, *names)
835 return os.path.join(self.wc, *names)
836
836
837 def putfile(self, filename, flags, data):
837 def putfile(self, filename, flags, data):
838 if 'l' in flags:
838 if 'l' in flags:
839 self.wopener.symlink(data, filename)
839 self.wopener.symlink(data, filename)
840 else:
840 else:
841 try:
841 try:
842 if os.path.islink(self.wjoin(filename)):
842 if os.path.islink(self.wjoin(filename)):
843 os.unlink(filename)
843 os.unlink(filename)
844 except OSError:
844 except OSError:
845 pass
845 pass
846 self.wopener(filename, 'w').write(data)
846 self.wopener(filename, 'w').write(data)
847
847
848 if self.is_exec:
848 if self.is_exec:
849 was_exec = self.is_exec(self.wjoin(filename))
849 was_exec = self.is_exec(self.wjoin(filename))
850 else:
850 else:
851 # On filesystems not supporting execute-bit, there is no way
851 # On filesystems not supporting execute-bit, there is no way
852 # to know if it is set but asking subversion. Setting it
852 # to know if it is set but asking subversion. Setting it
853 # systematically is just as expensive and much simpler.
853 # systematically is just as expensive and much simpler.
854 was_exec = 'x' not in flags
854 was_exec = 'x' not in flags
855
855
856 util.set_flags(self.wjoin(filename), flags)
856 util.set_flags(self.wjoin(filename), flags)
857 if was_exec:
857 if was_exec:
858 if 'x' not in flags:
858 if 'x' not in flags:
859 self.delexec.append(filename)
859 self.delexec.append(filename)
860 else:
860 else:
861 if 'x' in flags:
861 if 'x' in flags:
862 self.setexec.append(filename)
862 self.setexec.append(filename)
863
863
864 def delfile(self, name):
864 def delfile(self, name):
865 self.delete.append(name)
865 self.delete.append(name)
866
866
867 def copyfile(self, source, dest):
867 def copyfile(self, source, dest):
868 self.copies.append([source, dest])
868 self.copies.append([source, dest])
869
869
870 def _copyfile(self, source, dest):
870 def _copyfile(self, source, dest):
871 # SVN's copy command pukes if the destination file exists, but
871 # SVN's copy command pukes if the destination file exists, but
872 # our copyfile method expects to record a copy that has
872 # our copyfile method expects to record a copy that has
873 # already occurred. Cross the semantic gap.
873 # already occurred. Cross the semantic gap.
874 wdest = self.wjoin(dest)
874 wdest = self.wjoin(dest)
875 exists = os.path.exists(wdest)
875 exists = os.path.exists(wdest)
876 if exists:
876 if exists:
877 fd, tempname = tempfile.mkstemp(
877 fd, tempname = tempfile.mkstemp(
878 prefix='hg-copy-', dir=os.path.dirname(wdest))
878 prefix='hg-copy-', dir=os.path.dirname(wdest))
879 os.close(fd)
879 os.close(fd)
880 os.unlink(tempname)
880 os.unlink(tempname)
881 os.rename(wdest, tempname)
881 os.rename(wdest, tempname)
882 try:
882 try:
883 self.run0('copy', source, dest)
883 self.run0('copy', source, dest)
884 finally:
884 finally:
885 if exists:
885 if exists:
886 try:
886 try:
887 os.unlink(wdest)
887 os.unlink(wdest)
888 except OSError:
888 except OSError:
889 pass
889 pass
890 os.rename(tempname, wdest)
890 os.rename(tempname, wdest)
891
891
892 def dirs_of(self, files):
892 def dirs_of(self, files):
893 dirs = set()
893 dirs = set()
894 for f in files:
894 for f in files:
895 if os.path.isdir(self.wjoin(f)):
895 if os.path.isdir(self.wjoin(f)):
896 dirs.add(f)
896 dirs.add(f)
897 for i in strutil.rfindall(f, '/'):
897 for i in strutil.rfindall(f, '/'):
898 dirs.add(f[:i])
898 dirs.add(f[:i])
899 return dirs
899 return dirs
900
900
901 def add_dirs(self, files):
901 def add_dirs(self, files):
902 add_dirs = [d for d in self.dirs_of(files)
902 add_dirs = [d for d in self.dirs_of(files)
903 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
903 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
904 if add_dirs:
904 if add_dirs:
905 add_dirs.sort()
905 add_dirs.sort()
906 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
906 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
907 return add_dirs
907 return add_dirs
908
908
909 def add_files(self, files):
909 def add_files(self, files):
910 if files:
910 if files:
911 self.xargs(files, 'add', quiet=True)
911 self.xargs(files, 'add', quiet=True)
912 return files
912 return files
913
913
914 def tidy_dirs(self, names):
914 def tidy_dirs(self, names):
915 dirs = list(self.dirs_of(names))
915 dirs = list(self.dirs_of(names))
916 dirs.sort(reverse=True)
916 dirs.sort(reverse=True)
917 deleted = []
917 deleted = []
918 for d in dirs:
918 for d in dirs:
919 wd = self.wjoin(d)
919 wd = self.wjoin(d)
920 if os.listdir(wd) == '.svn':
920 if os.listdir(wd) == '.svn':
921 self.run0('delete', d)
921 self.run0('delete', d)
922 deleted.append(d)
922 deleted.append(d)
923 return deleted
923 return deleted
924
924
925 def addchild(self, parent, child):
925 def addchild(self, parent, child):
926 self.childmap[parent] = child
926 self.childmap[parent] = child
927
927
928 def revid(self, rev):
928 def revid(self, rev):
929 return u"svn:%s@%s" % (self.uuid, rev)
929 return u"svn:%s@%s" % (self.uuid, rev)
930
930
931 def putcommit(self, files, parents, commit):
931 def putcommit(self, files, parents, commit):
932 for parent in parents:
932 for parent in parents:
933 try:
933 try:
934 return self.revid(self.childmap[parent])
934 return self.revid(self.childmap[parent])
935 except KeyError:
935 except KeyError:
936 pass
936 pass
937 entries = set(self.delete)
937 entries = set(self.delete)
938 files = util.frozenset(files)
938 files = util.frozenset(files)
939 entries.update(self.add_dirs(files.difference(entries)))
939 entries.update(self.add_dirs(files.difference(entries)))
940 if self.copies:
940 if self.copies:
941 for s, d in self.copies:
941 for s, d in self.copies:
942 self._copyfile(s, d)
942 self._copyfile(s, d)
943 self.copies = []
943 self.copies = []
944 if self.delete:
944 if self.delete:
945 self.xargs(self.delete, 'delete')
945 self.xargs(self.delete, 'delete')
946 self.delete = []
946 self.delete = []
947 entries.update(self.add_files(files.difference(entries)))
947 entries.update(self.add_files(files.difference(entries)))
948 entries.update(self.tidy_dirs(entries))
948 entries.update(self.tidy_dirs(entries))
949 if self.delexec:
949 if self.delexec:
950 self.xargs(self.delexec, 'propdel', 'svn:executable')
950 self.xargs(self.delexec, 'propdel', 'svn:executable')
951 self.delexec = []
951 self.delexec = []
952 if self.setexec:
952 if self.setexec:
953 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
953 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
954 self.setexec = []
954 self.setexec = []
955
955
956 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
956 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
957 fp = os.fdopen(fd, 'w')
957 fp = os.fdopen(fd, 'w')
958 fp.write(commit.desc)
958 fp.write(commit.desc)
959 fp.close()
959 fp.close()
960 try:
960 try:
961 output = self.run0('commit',
961 output = self.run0('commit',
962 username=util.shortuser(commit.author),
962 username=util.shortuser(commit.author),
963 file=messagefile,
963 file=messagefile,
964 encoding='utf-8')
964 encoding='utf-8')
965 try:
965 try:
966 rev = self.commit_re.search(output).group(1)
966 rev = self.commit_re.search(output).group(1)
967 except AttributeError:
967 except AttributeError:
968 self.ui.warn(_('unexpected svn output:\n'))
968 self.ui.warn(_('unexpected svn output:\n'))
969 self.ui.warn(output)
969 self.ui.warn(output)
970 raise util.Abort(_('unable to cope with svn output'))
970 raise util.Abort(_('unable to cope with svn output'))
971 if commit.rev:
971 if commit.rev:
972 self.run('propset', 'hg:convert-rev', commit.rev,
972 self.run('propset', 'hg:convert-rev', commit.rev,
973 revprop=True, revision=rev)
973 revprop=True, revision=rev)
974 if commit.branch and commit.branch != 'default':
974 if commit.branch and commit.branch != 'default':
975 self.run('propset', 'hg:convert-branch', commit.branch,
975 self.run('propset', 'hg:convert-branch', commit.branch,
976 revprop=True, revision=rev)
976 revprop=True, revision=rev)
977 for parent in parents:
977 for parent in parents:
978 self.addchild(parent, rev)
978 self.addchild(parent, rev)
979 return self.revid(rev)
979 return self.revid(rev)
980 finally:
980 finally:
981 os.unlink(messagefile)
981 os.unlink(messagefile)
982
982
983 def puttags(self, tags):
983 def puttags(self, tags):
984 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
984 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,2059 +1,2061 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, errno, ui
12 import re, lock, transaction, tempfile, stat, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = util.set(('lookup', 'changegroupsubset'))
16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 repo.repository.__init__(self)
20 repo.repository.__init__(self)
21 self.root = os.path.realpath(path)
21 self.root = os.path.realpath(path)
22 self.path = os.path.join(self.root, ".hg")
22 self.path = os.path.join(self.root, ".hg")
23 self.origroot = path
23 self.origroot = path
24 self.opener = util.opener(self.path)
24 self.opener = util.opener(self.path)
25 self.wopener = util.opener(self.root)
25 self.wopener = util.opener(self.root)
26
26
27 if not os.path.isdir(self.path):
27 if not os.path.isdir(self.path):
28 if create:
28 if create:
29 if not os.path.exists(path):
29 if not os.path.exists(path):
30 os.mkdir(path)
30 os.mkdir(path)
31 os.mkdir(self.path)
31 os.mkdir(self.path)
32 requirements = ["revlogv1"]
32 requirements = ["revlogv1"]
33 if parentui.configbool('format', 'usestore', True):
33 if parentui.configbool('format', 'usestore', True):
34 os.mkdir(os.path.join(self.path, "store"))
34 os.mkdir(os.path.join(self.path, "store"))
35 requirements.append("store")
35 requirements.append("store")
36 # create an invalid changelog
36 # create an invalid changelog
37 self.opener("00changelog.i", "a").write(
37 self.opener("00changelog.i", "a").write(
38 '\0\0\0\2' # represents revlogv2
38 '\0\0\0\2' # represents revlogv2
39 ' dummy changelog to prevent using the old repo layout'
39 ' dummy changelog to prevent using the old repo layout'
40 )
40 )
41 reqfile = self.opener("requires", "w")
41 reqfile = self.opener("requires", "w")
42 for r in requirements:
42 for r in requirements:
43 reqfile.write("%s\n" % r)
43 reqfile.write("%s\n" % r)
44 reqfile.close()
44 reqfile.close()
45 else:
45 else:
46 raise repo.RepoError(_("repository %s not found") % path)
46 raise repo.RepoError(_("repository %s not found") % path)
47 elif create:
47 elif create:
48 raise repo.RepoError(_("repository %s already exists") % path)
48 raise repo.RepoError(_("repository %s already exists") % path)
49 else:
49 else:
50 # find requirements
50 # find requirements
51 try:
51 try:
52 requirements = self.opener("requires").read().splitlines()
52 requirements = self.opener("requires").read().splitlines()
53 except IOError, inst:
53 except IOError, inst:
54 if inst.errno != errno.ENOENT:
54 if inst.errno != errno.ENOENT:
55 raise
55 raise
56 requirements = []
56 requirements = []
57 # check them
57 # check them
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61
61
62 # setup store
62 # setup store
63 if "store" in requirements:
63 if "store" in requirements:
64 self.encodefn = util.encodefilename
64 self.encodefn = util.encodefilename
65 self.decodefn = util.decodefilename
65 self.decodefn = util.decodefilename
66 self.spath = os.path.join(self.path, "store")
66 self.spath = os.path.join(self.path, "store")
67 else:
67 else:
68 self.encodefn = lambda x: x
68 self.encodefn = lambda x: x
69 self.decodefn = lambda x: x
69 self.decodefn = lambda x: x
70 self.spath = self.path
70 self.spath = self.path
71 self.sopener = util.encodedopener(util.opener(self.spath),
71 self.sopener = util.encodedopener(util.opener(self.spath),
72 self.encodefn)
72 self.encodefn)
73
73
74 self.ui = ui.ui(parentui=parentui)
74 self.ui = ui.ui(parentui=parentui)
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.filterpats = {}
85 self.filterpats = {}
86 self._transref = self._lockref = self._wlockref = None
86 self._transref = self._lockref = self._wlockref = None
87
87
88 def __getattr__(self, name):
88 def __getattr__(self, name):
89 if name == 'changelog':
89 if name == 'changelog':
90 self.changelog = changelog.changelog(self.sopener)
90 self.changelog = changelog.changelog(self.sopener)
91 self.sopener.defversion = self.changelog.version
91 self.sopener.defversion = self.changelog.version
92 return self.changelog
92 return self.changelog
93 if name == 'manifest':
93 if name == 'manifest':
94 self.changelog
94 self.changelog
95 self.manifest = manifest.manifest(self.sopener)
95 self.manifest = manifest.manifest(self.sopener)
96 return self.manifest
96 return self.manifest
97 if name == 'dirstate':
97 if name == 'dirstate':
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 return self.dirstate
99 return self.dirstate
100 else:
100 else:
101 raise AttributeError, name
101 raise AttributeError, name
102
102
103 def url(self):
103 def url(self):
104 return 'file:' + self.root
104 return 'file:' + self.root
105
105
106 def hook(self, name, throw=False, **args):
106 def hook(self, name, throw=False, **args):
107 return hook.hook(self.ui, self, name, throw, **args)
107 return hook.hook(self.ui, self, name, throw, **args)
108
108
109 tag_disallowed = ':\r\n'
109 tag_disallowed = ':\r\n'
110
110
111 def _tag(self, name, node, message, local, user, date, parent=None,
111 def _tag(self, name, node, message, local, user, date, parent=None,
112 extra={}):
112 extra={}):
113 use_dirstate = parent is None
113 use_dirstate = parent is None
114
114
115 for c in self.tag_disallowed:
115 for c in self.tag_disallowed:
116 if c in name:
116 if c in name:
117 raise util.Abort(_('%r cannot be used in a tag name') % c)
117 raise util.Abort(_('%r cannot be used in a tag name') % c)
118
118
119 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120
120
121 def writetag(fp, name, munge, prevtags):
121 def writetag(fp, name, munge, prevtags):
122 if prevtags and prevtags[-1] != '\n':
122 if prevtags and prevtags[-1] != '\n':
123 fp.write('\n')
123 fp.write('\n')
124 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
125 fp.close()
125 fp.close()
126
126
127 prevtags = ''
127 prevtags = ''
128 if local:
128 if local:
129 try:
129 try:
130 fp = self.opener('localtags', 'r+')
130 fp = self.opener('localtags', 'r+')
131 except IOError, err:
131 except IOError, err:
132 fp = self.opener('localtags', 'a')
132 fp = self.opener('localtags', 'a')
133 else:
133 else:
134 prevtags = fp.read()
134 prevtags = fp.read()
135
135
136 # local tags are stored in the current charset
136 # local tags are stored in the current charset
137 writetag(fp, name, None, prevtags)
137 writetag(fp, name, None, prevtags)
138 self.hook('tag', node=hex(node), tag=name, local=local)
138 self.hook('tag', node=hex(node), tag=name, local=local)
139 return
139 return
140
140
141 if use_dirstate:
141 if use_dirstate:
142 try:
142 try:
143 fp = self.wfile('.hgtags', 'rb+')
143 fp = self.wfile('.hgtags', 'rb+')
144 except IOError, err:
144 except IOError, err:
145 fp = self.wfile('.hgtags', 'ab')
145 fp = self.wfile('.hgtags', 'ab')
146 else:
146 else:
147 prevtags = fp.read()
147 prevtags = fp.read()
148 else:
148 else:
149 try:
149 try:
150 prevtags = self.filectx('.hgtags', parent).data()
150 prevtags = self.filectx('.hgtags', parent).data()
151 except revlog.LookupError:
151 except revlog.LookupError:
152 pass
152 pass
153 fp = self.wfile('.hgtags', 'wb')
153 fp = self.wfile('.hgtags', 'wb')
154 if prevtags:
154 if prevtags:
155 fp.write(prevtags)
155 fp.write(prevtags)
156
156
157 # committed tags are stored in UTF-8
157 # committed tags are stored in UTF-8
158 writetag(fp, name, util.fromlocal, prevtags)
158 writetag(fp, name, util.fromlocal, prevtags)
159
159
160 if use_dirstate and '.hgtags' not in self.dirstate:
160 if use_dirstate and '.hgtags' not in self.dirstate:
161 self.add(['.hgtags'])
161 self.add(['.hgtags'])
162
162
163 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
164 extra=extra)
164 extra=extra)
165
165
166 self.hook('tag', node=hex(node), tag=name, local=local)
166 self.hook('tag', node=hex(node), tag=name, local=local)
167
167
168 return tagnode
168 return tagnode
169
169
170 def tag(self, name, node, message, local, user, date):
170 def tag(self, name, node, message, local, user, date):
171 '''tag a revision with a symbolic name.
171 '''tag a revision with a symbolic name.
172
172
173 if local is True, the tag is stored in a per-repository file.
173 if local is True, the tag is stored in a per-repository file.
174 otherwise, it is stored in the .hgtags file, and a new
174 otherwise, it is stored in the .hgtags file, and a new
175 changeset is committed with the change.
175 changeset is committed with the change.
176
176
177 keyword arguments:
177 keyword arguments:
178
178
179 local: whether to store tag in non-version-controlled file
179 local: whether to store tag in non-version-controlled file
180 (default False)
180 (default False)
181
181
182 message: commit message to use if committing
182 message: commit message to use if committing
183
183
184 user: name of user to use if committing
184 user: name of user to use if committing
185
185
186 date: date tuple to use if committing'''
186 date: date tuple to use if committing'''
187
187
188 for x in self.status()[:5]:
188 for x in self.status()[:5]:
189 if '.hgtags' in x:
189 if '.hgtags' in x:
190 raise util.Abort(_('working copy of .hgtags is changed '
190 raise util.Abort(_('working copy of .hgtags is changed '
191 '(please commit .hgtags manually)'))
191 '(please commit .hgtags manually)'))
192
192
193
193
194 self._tag(name, node, message, local, user, date)
194 self._tag(name, node, message, local, user, date)
195
195
196 def tags(self):
196 def tags(self):
197 '''return a mapping of tag to node'''
197 '''return a mapping of tag to node'''
198 if self.tagscache:
198 if self.tagscache:
199 return self.tagscache
199 return self.tagscache
200
200
201 globaltags = {}
201 globaltags = {}
202 tagtypes = {}
202 tagtypes = {}
203
203
204 def readtags(lines, fn, tagtype):
204 def readtags(lines, fn, tagtype):
205 filetags = {}
205 filetags = {}
206 count = 0
206 count = 0
207
207
208 def warn(msg):
208 def warn(msg):
209 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
209 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
210
210
211 for l in lines:
211 for l in lines:
212 count += 1
212 count += 1
213 if not l:
213 if not l:
214 continue
214 continue
215 s = l.split(" ", 1)
215 s = l.split(" ", 1)
216 if len(s) != 2:
216 if len(s) != 2:
217 warn(_("cannot parse entry"))
217 warn(_("cannot parse entry"))
218 continue
218 continue
219 node, key = s
219 node, key = s
220 key = util.tolocal(key.strip()) # stored in UTF-8
220 key = util.tolocal(key.strip()) # stored in UTF-8
221 try:
221 try:
222 bin_n = bin(node)
222 bin_n = bin(node)
223 except TypeError:
223 except TypeError:
224 warn(_("node '%s' is not well formed") % node)
224 warn(_("node '%s' is not well formed") % node)
225 continue
225 continue
226 if bin_n not in self.changelog.nodemap:
226 if bin_n not in self.changelog.nodemap:
227 warn(_("tag '%s' refers to unknown node") % key)
227 warn(_("tag '%s' refers to unknown node") % key)
228 continue
228 continue
229
229
230 h = []
230 h = []
231 if key in filetags:
231 if key in filetags:
232 n, h = filetags[key]
232 n, h = filetags[key]
233 h.append(n)
233 h.append(n)
234 filetags[key] = (bin_n, h)
234 filetags[key] = (bin_n, h)
235
235
236 for k, nh in filetags.items():
236 for k, nh in filetags.items():
237 if k not in globaltags:
237 if k not in globaltags:
238 globaltags[k] = nh
238 globaltags[k] = nh
239 tagtypes[k] = tagtype
239 tagtypes[k] = tagtype
240 continue
240 continue
241
241
242 # we prefer the global tag if:
242 # we prefer the global tag if:
243 # it supercedes us OR
243 # it supercedes us OR
244 # mutual supercedes and it has a higher rank
244 # mutual supercedes and it has a higher rank
245 # otherwise we win because we're tip-most
245 # otherwise we win because we're tip-most
246 an, ah = nh
246 an, ah = nh
247 bn, bh = globaltags[k]
247 bn, bh = globaltags[k]
248 if (bn != an and an in bh and
248 if (bn != an and an in bh and
249 (bn not in ah or len(bh) > len(ah))):
249 (bn not in ah or len(bh) > len(ah))):
250 an = bn
250 an = bn
251 ah.extend([n for n in bh if n not in ah])
251 ah.extend([n for n in bh if n not in ah])
252 globaltags[k] = an, ah
252 globaltags[k] = an, ah
253 tagtypes[k] = tagtype
253 tagtypes[k] = tagtype
254
254
255 # read the tags file from each head, ending with the tip
255 # read the tags file from each head, ending with the tip
256 f = None
256 f = None
257 for rev, node, fnode in self._hgtagsnodes():
257 for rev, node, fnode in self._hgtagsnodes():
258 f = (f and f.filectx(fnode) or
258 f = (f and f.filectx(fnode) or
259 self.filectx('.hgtags', fileid=fnode))
259 self.filectx('.hgtags', fileid=fnode))
260 readtags(f.data().splitlines(), f, "global")
260 readtags(f.data().splitlines(), f, "global")
261
261
262 try:
262 try:
263 data = util.fromlocal(self.opener("localtags").read())
263 data = util.fromlocal(self.opener("localtags").read())
264 # localtags are stored in the local character set
264 # localtags are stored in the local character set
265 # while the internal tag table is stored in UTF-8
265 # while the internal tag table is stored in UTF-8
266 readtags(data.splitlines(), "localtags", "local")
266 readtags(data.splitlines(), "localtags", "local")
267 except IOError:
267 except IOError:
268 pass
268 pass
269
269
270 self.tagscache = {}
270 self.tagscache = {}
271 self._tagstypecache = {}
271 self._tagstypecache = {}
272 for k,nh in globaltags.items():
272 for k,nh in globaltags.items():
273 n = nh[0]
273 n = nh[0]
274 if n != nullid:
274 if n != nullid:
275 self.tagscache[k] = n
275 self.tagscache[k] = n
276 self._tagstypecache[k] = tagtypes[k]
276 self._tagstypecache[k] = tagtypes[k]
277 self.tagscache['tip'] = self.changelog.tip()
277 self.tagscache['tip'] = self.changelog.tip()
278
278
279 return self.tagscache
279 return self.tagscache
280
280
281 def tagtype(self, tagname):
281 def tagtype(self, tagname):
282 '''
282 '''
283 return the type of the given tag. result can be:
283 return the type of the given tag. result can be:
284
284
285 'local' : a local tag
285 'local' : a local tag
286 'global' : a global tag
286 'global' : a global tag
287 None : tag does not exist
287 None : tag does not exist
288 '''
288 '''
289
289
290 self.tags()
290 self.tags()
291
291
292 return self._tagstypecache.get(tagname)
292 return self._tagstypecache.get(tagname)
293
293
294 def _hgtagsnodes(self):
294 def _hgtagsnodes(self):
295 heads = self.heads()
295 heads = self.heads()
296 heads.reverse()
296 heads.reverse()
297 last = {}
297 last = {}
298 ret = []
298 ret = []
299 for node in heads:
299 for node in heads:
300 c = self.changectx(node)
300 c = self.changectx(node)
301 rev = c.rev()
301 rev = c.rev()
302 try:
302 try:
303 fnode = c.filenode('.hgtags')
303 fnode = c.filenode('.hgtags')
304 except revlog.LookupError:
304 except revlog.LookupError:
305 continue
305 continue
306 ret.append((rev, node, fnode))
306 ret.append((rev, node, fnode))
307 if fnode in last:
307 if fnode in last:
308 ret[last[fnode]] = None
308 ret[last[fnode]] = None
309 last[fnode] = len(ret) - 1
309 last[fnode] = len(ret) - 1
310 return [item for item in ret if item]
310 return [item for item in ret if item]
311
311
312 def tagslist(self):
312 def tagslist(self):
313 '''return a list of tags ordered by revision'''
313 '''return a list of tags ordered by revision'''
314 l = []
314 l = []
315 for t, n in self.tags().items():
315 for t, n in self.tags().items():
316 try:
316 try:
317 r = self.changelog.rev(n)
317 r = self.changelog.rev(n)
318 except:
318 except:
319 r = -2 # sort to the beginning of the list if unknown
319 r = -2 # sort to the beginning of the list if unknown
320 l.append((r, t, n))
320 l.append((r, t, n))
321 l.sort()
321 l.sort()
322 return [(t, n) for r, t, n in l]
322 return [(t, n) for r, t, n in l]
323
323
324 def nodetags(self, node):
324 def nodetags(self, node):
325 '''return the tags associated with a node'''
325 '''return the tags associated with a node'''
326 if not self.nodetagscache:
326 if not self.nodetagscache:
327 self.nodetagscache = {}
327 self.nodetagscache = {}
328 for t, n in self.tags().items():
328 for t, n in self.tags().items():
329 self.nodetagscache.setdefault(n, []).append(t)
329 self.nodetagscache.setdefault(n, []).append(t)
330 return self.nodetagscache.get(node, [])
330 return self.nodetagscache.get(node, [])
331
331
332 def _branchtags(self):
332 def _branchtags(self):
333 partial, last, lrev = self._readbranchcache()
333 partial, last, lrev = self._readbranchcache()
334
334
335 tiprev = self.changelog.count() - 1
335 tiprev = self.changelog.count() - 1
336 if lrev != tiprev:
336 if lrev != tiprev:
337 self._updatebranchcache(partial, lrev+1, tiprev+1)
337 self._updatebranchcache(partial, lrev+1, tiprev+1)
338 self._writebranchcache(partial, self.changelog.tip(), tiprev)
338 self._writebranchcache(partial, self.changelog.tip(), tiprev)
339
339
340 return partial
340 return partial
341
341
342 def branchtags(self):
342 def branchtags(self):
343 if self.branchcache is not None:
343 if self.branchcache is not None:
344 return self.branchcache
344 return self.branchcache
345
345
346 self.branchcache = {} # avoid recursion in changectx
346 self.branchcache = {} # avoid recursion in changectx
347 partial = self._branchtags()
347 partial = self._branchtags()
348
348
349 # the branch cache is stored on disk as UTF-8, but in the local
349 # the branch cache is stored on disk as UTF-8, but in the local
350 # charset internally
350 # charset internally
351 for k, v in partial.items():
351 for k, v in partial.items():
352 self.branchcache[util.tolocal(k)] = v
352 self.branchcache[util.tolocal(k)] = v
353 return self.branchcache
353 return self.branchcache
354
354
355 def _readbranchcache(self):
355 def _readbranchcache(self):
356 partial = {}
356 partial = {}
357 try:
357 try:
358 f = self.opener("branch.cache")
358 f = self.opener("branch.cache")
359 lines = f.read().split('\n')
359 lines = f.read().split('\n')
360 f.close()
360 f.close()
361 except (IOError, OSError):
361 except (IOError, OSError):
362 return {}, nullid, nullrev
362 return {}, nullid, nullrev
363
363
364 try:
364 try:
365 last, lrev = lines.pop(0).split(" ", 1)
365 last, lrev = lines.pop(0).split(" ", 1)
366 last, lrev = bin(last), int(lrev)
366 last, lrev = bin(last), int(lrev)
367 if not (lrev < self.changelog.count() and
367 if not (lrev < self.changelog.count() and
368 self.changelog.node(lrev) == last): # sanity check
368 self.changelog.node(lrev) == last): # sanity check
369 # invalidate the cache
369 # invalidate the cache
370 raise ValueError('Invalid branch cache: unknown tip')
370 raise ValueError('Invalid branch cache: unknown tip')
371 for l in lines:
371 for l in lines:
372 if not l: continue
372 if not l: continue
373 node, label = l.split(" ", 1)
373 node, label = l.split(" ", 1)
374 partial[label.strip()] = bin(node)
374 partial[label.strip()] = bin(node)
375 except (KeyboardInterrupt, util.SignalInterrupt):
375 except (KeyboardInterrupt, util.SignalInterrupt):
376 raise
376 raise
377 except Exception, inst:
377 except Exception, inst:
378 if self.ui.debugflag:
378 if self.ui.debugflag:
379 self.ui.warn(str(inst), '\n')
379 self.ui.warn(str(inst), '\n')
380 partial, last, lrev = {}, nullid, nullrev
380 partial, last, lrev = {}, nullid, nullrev
381 return partial, last, lrev
381 return partial, last, lrev
382
382
383 def _writebranchcache(self, branches, tip, tiprev):
383 def _writebranchcache(self, branches, tip, tiprev):
384 try:
384 try:
385 f = self.opener("branch.cache", "w", atomictemp=True)
385 f = self.opener("branch.cache", "w", atomictemp=True)
386 f.write("%s %s\n" % (hex(tip), tiprev))
386 f.write("%s %s\n" % (hex(tip), tiprev))
387 for label, node in branches.iteritems():
387 for label, node in branches.iteritems():
388 f.write("%s %s\n" % (hex(node), label))
388 f.write("%s %s\n" % (hex(node), label))
389 f.rename()
389 f.rename()
390 except (IOError, OSError):
390 except (IOError, OSError):
391 pass
391 pass
392
392
393 def _updatebranchcache(self, partial, start, end):
393 def _updatebranchcache(self, partial, start, end):
394 for r in xrange(start, end):
394 for r in xrange(start, end):
395 c = self.changectx(r)
395 c = self.changectx(r)
396 b = c.branch()
396 b = c.branch()
397 partial[b] = c.node()
397 partial[b] = c.node()
398
398
399 def lookup(self, key):
399 def lookup(self, key):
400 if key == '.':
400 if key == '.':
401 key, second = self.dirstate.parents()
401 key, second = self.dirstate.parents()
402 if key == nullid:
402 if key == nullid:
403 raise repo.RepoError(_("no revision checked out"))
403 raise repo.RepoError(_("no revision checked out"))
404 if second != nullid:
404 if second != nullid:
405 self.ui.warn(_("warning: working directory has two parents, "
405 self.ui.warn(_("warning: working directory has two parents, "
406 "tag '.' uses the first\n"))
406 "tag '.' uses the first\n"))
407 elif key == 'null':
407 elif key == 'null':
408 return nullid
408 return nullid
409 n = self.changelog._match(key)
409 n = self.changelog._match(key)
410 if n:
410 if n:
411 return n
411 return n
412 if key in self.tags():
412 if key in self.tags():
413 return self.tags()[key]
413 return self.tags()[key]
414 if key in self.branchtags():
414 if key in self.branchtags():
415 return self.branchtags()[key]
415 return self.branchtags()[key]
416 n = self.changelog._partialmatch(key)
416 n = self.changelog._partialmatch(key)
417 if n:
417 if n:
418 return n
418 return n
419 try:
419 try:
420 if len(key) == 20:
420 if len(key) == 20:
421 key = hex(key)
421 key = hex(key)
422 except:
422 except:
423 pass
423 pass
424 raise repo.RepoError(_("unknown revision '%s'") % key)
424 raise repo.RepoError(_("unknown revision '%s'") % key)
425
425
426 def dev(self):
426 def dev(self):
427 return os.lstat(self.path).st_dev
427 return os.lstat(self.path).st_dev
428
428
429 def local(self):
429 def local(self):
430 return True
430 return True
431
431
432 def join(self, f):
432 def join(self, f):
433 return os.path.join(self.path, f)
433 return os.path.join(self.path, f)
434
434
435 def sjoin(self, f):
435 def sjoin(self, f):
436 f = self.encodefn(f)
436 f = self.encodefn(f)
437 return os.path.join(self.spath, f)
437 return os.path.join(self.spath, f)
438
438
439 def wjoin(self, f):
439 def wjoin(self, f):
440 return os.path.join(self.root, f)
440 return os.path.join(self.root, f)
441
441
442 def file(self, f):
442 def file(self, f):
443 if f[0] == '/':
443 if f[0] == '/':
444 f = f[1:]
444 f = f[1:]
445 return filelog.filelog(self.sopener, f)
445 return filelog.filelog(self.sopener, f)
446
446
447 def changectx(self, changeid=None):
447 def changectx(self, changeid=None):
448 return context.changectx(self, changeid)
448 return context.changectx(self, changeid)
449
449
450 def workingctx(self):
450 def workingctx(self):
451 return context.workingctx(self)
451 return context.workingctx(self)
452
452
453 def parents(self, changeid=None):
453 def parents(self, changeid=None):
454 '''
454 '''
455 get list of changectxs for parents of changeid or working directory
455 get list of changectxs for parents of changeid or working directory
456 '''
456 '''
457 if changeid is None:
457 if changeid is None:
458 pl = self.dirstate.parents()
458 pl = self.dirstate.parents()
459 else:
459 else:
460 n = self.changelog.lookup(changeid)
460 n = self.changelog.lookup(changeid)
461 pl = self.changelog.parents(n)
461 pl = self.changelog.parents(n)
462 if pl[1] == nullid:
462 if pl[1] == nullid:
463 return [self.changectx(pl[0])]
463 return [self.changectx(pl[0])]
464 return [self.changectx(pl[0]), self.changectx(pl[1])]
464 return [self.changectx(pl[0]), self.changectx(pl[1])]
465
465
466 def filectx(self, path, changeid=None, fileid=None):
466 def filectx(self, path, changeid=None, fileid=None):
467 """changeid can be a changeset revision, node, or tag.
467 """changeid can be a changeset revision, node, or tag.
468 fileid can be a file revision or node."""
468 fileid can be a file revision or node."""
469 return context.filectx(self, path, changeid, fileid)
469 return context.filectx(self, path, changeid, fileid)
470
470
471 def getcwd(self):
471 def getcwd(self):
472 return self.dirstate.getcwd()
472 return self.dirstate.getcwd()
473
473
474 def pathto(self, f, cwd=None):
474 def pathto(self, f, cwd=None):
475 return self.dirstate.pathto(f, cwd)
475 return self.dirstate.pathto(f, cwd)
476
476
477 def wfile(self, f, mode='r'):
477 def wfile(self, f, mode='r'):
478 return self.wopener(f, mode)
478 return self.wopener(f, mode)
479
479
480 def _link(self, f):
480 def _link(self, f):
481 return os.path.islink(self.wjoin(f))
481 return os.path.islink(self.wjoin(f))
482
482
483 def _filter(self, filter, filename, data):
483 def _filter(self, filter, filename, data):
484 if filter not in self.filterpats:
484 if filter not in self.filterpats:
485 l = []
485 l = []
486 for pat, cmd in self.ui.configitems(filter):
486 for pat, cmd in self.ui.configitems(filter):
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 l.append((mf, cmd))
488 l.append((mf, cmd))
489 self.filterpats[filter] = l
489 self.filterpats[filter] = l
490
490
491 for mf, cmd in self.filterpats[filter]:
491 for mf, cmd in self.filterpats[filter]:
492 if mf(filename):
492 if mf(filename):
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
494 data = util.filter(data, cmd)
494 data = util.filter(data, cmd)
495 break
495 break
496
496
497 return data
497 return data
498
498
499 def wread(self, filename):
499 def wread(self, filename):
500 if self._link(filename):
500 if self._link(filename):
501 data = os.readlink(self.wjoin(filename))
501 data = os.readlink(self.wjoin(filename))
502 else:
502 else:
503 data = self.wopener(filename, 'r').read()
503 data = self.wopener(filename, 'r').read()
504 return self._filter("encode", filename, data)
504 return self._filter("encode", filename, data)
505
505
506 def wwrite(self, filename, data, flags):
506 def wwrite(self, filename, data, flags):
507 data = self._filter("decode", filename, data)
507 data = self._filter("decode", filename, data)
508 try:
508 try:
509 os.unlink(self.wjoin(filename))
509 os.unlink(self.wjoin(filename))
510 except OSError:
510 except OSError:
511 pass
511 pass
512 self.wopener(filename, 'w').write(data)
512 self.wopener(filename, 'w').write(data)
513 util.set_flags(self.wjoin(filename), flags)
513 util.set_flags(self.wjoin(filename), flags)
514
514
515 def wwritedata(self, filename, data):
515 def wwritedata(self, filename, data):
516 return self._filter("decode", filename, data)
516 return self._filter("decode", filename, data)
517
517
518 def transaction(self):
518 def transaction(self):
519 if self._transref and self._transref():
519 if self._transref and self._transref():
520 return self._transref().nest()
520 return self._transref().nest()
521
521
522 # abort here if the journal already exists
522 # abort here if the journal already exists
523 if os.path.exists(self.sjoin("journal")):
523 if os.path.exists(self.sjoin("journal")):
524 raise repo.RepoError(_("journal already exists - run hg recover"))
524 raise repo.RepoError(_("journal already exists - run hg recover"))
525
525
526 # save dirstate for rollback
526 # save dirstate for rollback
527 try:
527 try:
528 ds = self.opener("dirstate").read()
528 ds = self.opener("dirstate").read()
529 except IOError:
529 except IOError:
530 ds = ""
530 ds = ""
531 self.opener("journal.dirstate", "w").write(ds)
531 self.opener("journal.dirstate", "w").write(ds)
532 self.opener("journal.branch", "w").write(self.dirstate.branch())
532 self.opener("journal.branch", "w").write(self.dirstate.branch())
533
533
534 renames = [(self.sjoin("journal"), self.sjoin("undo")),
534 renames = [(self.sjoin("journal"), self.sjoin("undo")),
535 (self.join("journal.dirstate"), self.join("undo.dirstate")),
535 (self.join("journal.dirstate"), self.join("undo.dirstate")),
536 (self.join("journal.branch"), self.join("undo.branch"))]
536 (self.join("journal.branch"), self.join("undo.branch"))]
537 tr = transaction.transaction(self.ui.warn, self.sopener,
537 tr = transaction.transaction(self.ui.warn, self.sopener,
538 self.sjoin("journal"),
538 self.sjoin("journal"),
539 aftertrans(renames))
539 aftertrans(renames))
540 self._transref = weakref.ref(tr)
540 self._transref = weakref.ref(tr)
541 return tr
541 return tr
542
542
543 def recover(self):
543 def recover(self):
544 l = self.lock()
544 l = self.lock()
545 try:
545 try:
546 if os.path.exists(self.sjoin("journal")):
546 if os.path.exists(self.sjoin("journal")):
547 self.ui.status(_("rolling back interrupted transaction\n"))
547 self.ui.status(_("rolling back interrupted transaction\n"))
548 transaction.rollback(self.sopener, self.sjoin("journal"))
548 transaction.rollback(self.sopener, self.sjoin("journal"))
549 self.invalidate()
549 self.invalidate()
550 return True
550 return True
551 else:
551 else:
552 self.ui.warn(_("no interrupted transaction available\n"))
552 self.ui.warn(_("no interrupted transaction available\n"))
553 return False
553 return False
554 finally:
554 finally:
555 del l
555 del l
556
556
557 def rollback(self):
557 def rollback(self):
558 wlock = lock = None
558 wlock = lock = None
559 try:
559 try:
560 wlock = self.wlock()
560 wlock = self.wlock()
561 lock = self.lock()
561 lock = self.lock()
562 if os.path.exists(self.sjoin("undo")):
562 if os.path.exists(self.sjoin("undo")):
563 self.ui.status(_("rolling back last transaction\n"))
563 self.ui.status(_("rolling back last transaction\n"))
564 transaction.rollback(self.sopener, self.sjoin("undo"))
564 transaction.rollback(self.sopener, self.sjoin("undo"))
565 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
565 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
566 branch = self.opener("undo.branch").read()
566 branch = self.opener("undo.branch").read()
567 self.dirstate.setbranch(branch)
567 self.dirstate.setbranch(branch)
568 self.invalidate()
568 self.invalidate()
569 self.dirstate.invalidate()
569 self.dirstate.invalidate()
570 else:
570 else:
571 self.ui.warn(_("no rollback information available\n"))
571 self.ui.warn(_("no rollback information available\n"))
572 finally:
572 finally:
573 del lock, wlock
573 del lock, wlock
574
574
575 def invalidate(self):
575 def invalidate(self):
576 for a in "changelog manifest".split():
576 for a in "changelog manifest".split():
577 if hasattr(self, a):
577 if hasattr(self, a):
578 self.__delattr__(a)
578 self.__delattr__(a)
579 self.tagscache = None
579 self.tagscache = None
580 self._tagstypecache = None
580 self._tagstypecache = None
581 self.nodetagscache = None
581 self.nodetagscache = None
582
582
583 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
583 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
584 try:
584 try:
585 l = lock.lock(lockname, 0, releasefn, desc=desc)
585 l = lock.lock(lockname, 0, releasefn, desc=desc)
586 except lock.LockHeld, inst:
586 except lock.LockHeld, inst:
587 if not wait:
587 if not wait:
588 raise
588 raise
589 self.ui.warn(_("waiting for lock on %s held by %r\n") %
589 self.ui.warn(_("waiting for lock on %s held by %r\n") %
590 (desc, inst.locker))
590 (desc, inst.locker))
591 # default to 600 seconds timeout
591 # default to 600 seconds timeout
592 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
592 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
593 releasefn, desc=desc)
593 releasefn, desc=desc)
594 if acquirefn:
594 if acquirefn:
595 acquirefn()
595 acquirefn()
596 return l
596 return l
597
597
598 def lock(self, wait=True):
598 def lock(self, wait=True):
599 if self._lockref and self._lockref():
599 if self._lockref and self._lockref():
600 return self._lockref()
600 return self._lockref()
601
601
602 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
602 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
603 _('repository %s') % self.origroot)
603 _('repository %s') % self.origroot)
604 self._lockref = weakref.ref(l)
604 self._lockref = weakref.ref(l)
605 return l
605 return l
606
606
607 def wlock(self, wait=True):
607 def wlock(self, wait=True):
608 if self._wlockref and self._wlockref():
608 if self._wlockref and self._wlockref():
609 return self._wlockref()
609 return self._wlockref()
610
610
611 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
611 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
612 self.dirstate.invalidate, _('working directory of %s') %
612 self.dirstate.invalidate, _('working directory of %s') %
613 self.origroot)
613 self.origroot)
614 self._wlockref = weakref.ref(l)
614 self._wlockref = weakref.ref(l)
615 return l
615 return l
616
616
617 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
617 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
618 """
618 """
619 commit an individual file as part of a larger transaction
619 commit an individual file as part of a larger transaction
620 """
620 """
621
621
622 t = self.wread(fn)
622 t = self.wread(fn)
623 fl = self.file(fn)
623 fl = self.file(fn)
624 fp1 = manifest1.get(fn, nullid)
624 fp1 = manifest1.get(fn, nullid)
625 fp2 = manifest2.get(fn, nullid)
625 fp2 = manifest2.get(fn, nullid)
626
626
627 meta = {}
627 meta = {}
628 cp = self.dirstate.copied(fn)
628 cp = self.dirstate.copied(fn)
629 if cp:
629 if cp:
630 # Mark the new revision of this file as a copy of another
630 # Mark the new revision of this file as a copy of another
631 # file. This copy data will effectively act as a parent
631 # file. This copy data will effectively act as a parent
632 # of this new revision. If this is a merge, the first
632 # of this new revision. If this is a merge, the first
633 # parent will be the nullid (meaning "look up the copy data")
633 # parent will be the nullid (meaning "look up the copy data")
634 # and the second one will be the other parent. For example:
634 # and the second one will be the other parent. For example:
635 #
635 #
636 # 0 --- 1 --- 3 rev1 changes file foo
636 # 0 --- 1 --- 3 rev1 changes file foo
637 # \ / rev2 renames foo to bar and changes it
637 # \ / rev2 renames foo to bar and changes it
638 # \- 2 -/ rev3 should have bar with all changes and
638 # \- 2 -/ rev3 should have bar with all changes and
639 # should record that bar descends from
639 # should record that bar descends from
640 # bar in rev2 and foo in rev1
640 # bar in rev2 and foo in rev1
641 #
641 #
642 # this allows this merge to succeed:
642 # this allows this merge to succeed:
643 #
643 #
644 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
644 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
645 # \ / merging rev3 and rev4 should use bar@rev2
645 # \ / merging rev3 and rev4 should use bar@rev2
646 # \- 2 --- 4 as the merge base
646 # \- 2 --- 4 as the merge base
647 #
647 #
648 meta["copy"] = cp
648 meta["copy"] = cp
649 if not manifest2: # not a branch merge
649 if not manifest2: # not a branch merge
650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
651 fp2 = nullid
651 fp2 = nullid
652 elif fp2 != nullid: # copied on remote side
652 elif fp2 != nullid: # copied on remote side
653 meta["copyrev"] = hex(manifest1.get(cp, nullid))
653 meta["copyrev"] = hex(manifest1.get(cp, nullid))
654 elif fp1 != nullid: # copied on local side, reversed
654 elif fp1 != nullid: # copied on local side, reversed
655 meta["copyrev"] = hex(manifest2.get(cp))
655 meta["copyrev"] = hex(manifest2.get(cp))
656 fp2 = fp1
656 fp2 = fp1
657 elif cp in manifest2: # directory rename on local side
657 elif cp in manifest2: # directory rename on local side
658 meta["copyrev"] = hex(manifest2[cp])
658 meta["copyrev"] = hex(manifest2[cp])
659 else: # directory rename on remote side
659 else: # directory rename on remote side
660 meta["copyrev"] = hex(manifest1.get(cp, nullid))
660 meta["copyrev"] = hex(manifest1.get(cp, nullid))
661 self.ui.debug(_(" %s: copy %s:%s\n") %
661 self.ui.debug(_(" %s: copy %s:%s\n") %
662 (fn, cp, meta["copyrev"]))
662 (fn, cp, meta["copyrev"]))
663 fp1 = nullid
663 fp1 = nullid
664 elif fp2 != nullid:
664 elif fp2 != nullid:
665 # is one parent an ancestor of the other?
665 # is one parent an ancestor of the other?
666 fpa = fl.ancestor(fp1, fp2)
666 fpa = fl.ancestor(fp1, fp2)
667 if fpa == fp1:
667 if fpa == fp1:
668 fp1, fp2 = fp2, nullid
668 fp1, fp2 = fp2, nullid
669 elif fpa == fp2:
669 elif fpa == fp2:
670 fp2 = nullid
670 fp2 = nullid
671
671
672 # is the file unmodified from the parent? report existing entry
672 # is the file unmodified from the parent? report existing entry
673 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
673 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
674 return fp1
674 return fp1
675
675
676 changelist.append(fn)
676 changelist.append(fn)
677 return fl.add(t, meta, tr, linkrev, fp1, fp2)
677 return fl.add(t, meta, tr, linkrev, fp1, fp2)
678
678
679 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
679 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
680 if p1 is None:
680 if p1 is None:
681 p1, p2 = self.dirstate.parents()
681 p1, p2 = self.dirstate.parents()
682 return self.commit(files=files, text=text, user=user, date=date,
682 return self.commit(files=files, text=text, user=user, date=date,
683 p1=p1, p2=p2, extra=extra, empty_ok=True)
683 p1=p1, p2=p2, extra=extra, empty_ok=True)
684
684
685 def commit(self, files=None, text="", user=None, date=None,
685 def commit(self, files=None, text="", user=None, date=None,
686 match=util.always, force=False, force_editor=False,
686 match=util.always, force=False, force_editor=False,
687 p1=None, p2=None, extra={}, empty_ok=False):
687 p1=None, p2=None, extra={}, empty_ok=False):
688 wlock = lock = tr = None
688 wlock = lock = tr = None
689 valid = 0 # don't save the dirstate if this isn't set
689 valid = 0 # don't save the dirstate if this isn't set
690 if files:
691 files = util.unique(files)
690 try:
692 try:
691 commit = []
693 commit = []
692 remove = []
694 remove = []
693 changed = []
695 changed = []
694 use_dirstate = (p1 is None) # not rawcommit
696 use_dirstate = (p1 is None) # not rawcommit
695 extra = extra.copy()
697 extra = extra.copy()
696
698
697 if use_dirstate:
699 if use_dirstate:
698 if files:
700 if files:
699 for f in files:
701 for f in files:
700 s = self.dirstate[f]
702 s = self.dirstate[f]
701 if s in 'nma':
703 if s in 'nma':
702 commit.append(f)
704 commit.append(f)
703 elif s == 'r':
705 elif s == 'r':
704 remove.append(f)
706 remove.append(f)
705 else:
707 else:
706 self.ui.warn(_("%s not tracked!\n") % f)
708 self.ui.warn(_("%s not tracked!\n") % f)
707 else:
709 else:
708 changes = self.status(match=match)[:5]
710 changes = self.status(match=match)[:5]
709 modified, added, removed, deleted, unknown = changes
711 modified, added, removed, deleted, unknown = changes
710 commit = modified + added
712 commit = modified + added
711 remove = removed
713 remove = removed
712 else:
714 else:
713 commit = files
715 commit = files
714
716
715 if use_dirstate:
717 if use_dirstate:
716 p1, p2 = self.dirstate.parents()
718 p1, p2 = self.dirstate.parents()
717 update_dirstate = True
719 update_dirstate = True
718 else:
720 else:
719 p1, p2 = p1, p2 or nullid
721 p1, p2 = p1, p2 or nullid
720 update_dirstate = (self.dirstate.parents()[0] == p1)
722 update_dirstate = (self.dirstate.parents()[0] == p1)
721
723
722 c1 = self.changelog.read(p1)
724 c1 = self.changelog.read(p1)
723 c2 = self.changelog.read(p2)
725 c2 = self.changelog.read(p2)
724 m1 = self.manifest.read(c1[0]).copy()
726 m1 = self.manifest.read(c1[0]).copy()
725 m2 = self.manifest.read(c2[0])
727 m2 = self.manifest.read(c2[0])
726
728
727 if use_dirstate:
729 if use_dirstate:
728 branchname = self.workingctx().branch()
730 branchname = self.workingctx().branch()
729 try:
731 try:
730 branchname = branchname.decode('UTF-8').encode('UTF-8')
732 branchname = branchname.decode('UTF-8').encode('UTF-8')
731 except UnicodeDecodeError:
733 except UnicodeDecodeError:
732 raise util.Abort(_('branch name not in UTF-8!'))
734 raise util.Abort(_('branch name not in UTF-8!'))
733 else:
735 else:
734 branchname = ""
736 branchname = ""
735
737
736 if use_dirstate:
738 if use_dirstate:
737 oldname = c1[5].get("branch") # stored in UTF-8
739 oldname = c1[5].get("branch") # stored in UTF-8
738 if (not commit and not remove and not force and p2 == nullid
740 if (not commit and not remove and not force and p2 == nullid
739 and branchname == oldname):
741 and branchname == oldname):
740 self.ui.status(_("nothing changed\n"))
742 self.ui.status(_("nothing changed\n"))
741 return None
743 return None
742
744
743 xp1 = hex(p1)
745 xp1 = hex(p1)
744 if p2 == nullid: xp2 = ''
746 if p2 == nullid: xp2 = ''
745 else: xp2 = hex(p2)
747 else: xp2 = hex(p2)
746
748
747 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
749 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
748
750
749 wlock = self.wlock()
751 wlock = self.wlock()
750 lock = self.lock()
752 lock = self.lock()
751 tr = self.transaction()
753 tr = self.transaction()
752 trp = weakref.proxy(tr)
754 trp = weakref.proxy(tr)
753
755
754 # check in files
756 # check in files
755 new = {}
757 new = {}
756 linkrev = self.changelog.count()
758 linkrev = self.changelog.count()
757 commit.sort()
759 commit.sort()
758 is_exec = util.execfunc(self.root, m1.execf)
760 is_exec = util.execfunc(self.root, m1.execf)
759 is_link = util.linkfunc(self.root, m1.linkf)
761 is_link = util.linkfunc(self.root, m1.linkf)
760 for f in commit:
762 for f in commit:
761 self.ui.note(f + "\n")
763 self.ui.note(f + "\n")
762 try:
764 try:
763 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
765 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
764 new_exec = is_exec(f)
766 new_exec = is_exec(f)
765 new_link = is_link(f)
767 new_link = is_link(f)
766 if ((not changed or changed[-1] != f) and
768 if ((not changed or changed[-1] != f) and
767 m2.get(f) != new[f]):
769 m2.get(f) != new[f]):
768 # mention the file in the changelog if some
770 # mention the file in the changelog if some
769 # flag changed, even if there was no content
771 # flag changed, even if there was no content
770 # change.
772 # change.
771 old_exec = m1.execf(f)
773 old_exec = m1.execf(f)
772 old_link = m1.linkf(f)
774 old_link = m1.linkf(f)
773 if old_exec != new_exec or old_link != new_link:
775 if old_exec != new_exec or old_link != new_link:
774 changed.append(f)
776 changed.append(f)
775 m1.set(f, new_exec, new_link)
777 m1.set(f, new_exec, new_link)
776 if use_dirstate:
778 if use_dirstate:
777 self.dirstate.normal(f)
779 self.dirstate.normal(f)
778
780
779 except (OSError, IOError):
781 except (OSError, IOError):
780 if use_dirstate:
782 if use_dirstate:
781 self.ui.warn(_("trouble committing %s!\n") % f)
783 self.ui.warn(_("trouble committing %s!\n") % f)
782 raise
784 raise
783 else:
785 else:
784 remove.append(f)
786 remove.append(f)
785
787
786 # update manifest
788 # update manifest
787 m1.update(new)
789 m1.update(new)
788 remove.sort()
790 remove.sort()
789 removed = []
791 removed = []
790
792
791 for f in remove:
793 for f in remove:
792 if f in m1:
794 if f in m1:
793 del m1[f]
795 del m1[f]
794 removed.append(f)
796 removed.append(f)
795 elif f in m2:
797 elif f in m2:
796 removed.append(f)
798 removed.append(f)
797 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
799 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
798 (new, removed))
800 (new, removed))
799
801
800 # add changeset
802 # add changeset
801 new = new.keys()
803 new = new.keys()
802 new.sort()
804 new.sort()
803
805
804 user = user or self.ui.username()
806 user = user or self.ui.username()
805 if (not empty_ok and not text) or force_editor:
807 if (not empty_ok and not text) or force_editor:
806 edittext = []
808 edittext = []
807 if text:
809 if text:
808 edittext.append(text)
810 edittext.append(text)
809 edittext.append("")
811 edittext.append("")
810 edittext.append(_("HG: Enter commit message."
812 edittext.append(_("HG: Enter commit message."
811 " Lines beginning with 'HG:' are removed."))
813 " Lines beginning with 'HG:' are removed."))
812 edittext.append("HG: --")
814 edittext.append("HG: --")
813 edittext.append("HG: user: %s" % user)
815 edittext.append("HG: user: %s" % user)
814 if p2 != nullid:
816 if p2 != nullid:
815 edittext.append("HG: branch merge")
817 edittext.append("HG: branch merge")
816 if branchname:
818 if branchname:
817 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
819 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
818 edittext.extend(["HG: changed %s" % f for f in changed])
820 edittext.extend(["HG: changed %s" % f for f in changed])
819 edittext.extend(["HG: removed %s" % f for f in removed])
821 edittext.extend(["HG: removed %s" % f for f in removed])
820 if not changed and not remove:
822 if not changed and not remove:
821 edittext.append("HG: no files changed")
823 edittext.append("HG: no files changed")
822 edittext.append("")
824 edittext.append("")
823 # run editor in the repository root
825 # run editor in the repository root
824 olddir = os.getcwd()
826 olddir = os.getcwd()
825 os.chdir(self.root)
827 os.chdir(self.root)
826 text = self.ui.edit("\n".join(edittext), user)
828 text = self.ui.edit("\n".join(edittext), user)
827 os.chdir(olddir)
829 os.chdir(olddir)
828
830
829 if branchname:
831 if branchname:
830 extra["branch"] = branchname
832 extra["branch"] = branchname
831
833
832 if use_dirstate:
834 if use_dirstate:
833 lines = [line.rstrip() for line in text.rstrip().splitlines()]
835 lines = [line.rstrip() for line in text.rstrip().splitlines()]
834 while lines and not lines[0]:
836 while lines and not lines[0]:
835 del lines[0]
837 del lines[0]
836 if not lines:
838 if not lines:
837 raise util.Abort(_("empty commit message"))
839 raise util.Abort(_("empty commit message"))
838 text = '\n'.join(lines)
840 text = '\n'.join(lines)
839
841
840 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
842 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
841 user, date, extra)
843 user, date, extra)
842 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
844 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
843 parent2=xp2)
845 parent2=xp2)
844 tr.close()
846 tr.close()
845
847
846 if self.branchcache and "branch" in extra:
848 if self.branchcache and "branch" in extra:
847 self.branchcache[util.tolocal(extra["branch"])] = n
849 self.branchcache[util.tolocal(extra["branch"])] = n
848
850
849 if use_dirstate or update_dirstate:
851 if use_dirstate or update_dirstate:
850 self.dirstate.setparents(n)
852 self.dirstate.setparents(n)
851 if use_dirstate:
853 if use_dirstate:
852 for f in removed:
854 for f in removed:
853 self.dirstate.forget(f)
855 self.dirstate.forget(f)
854 valid = 1 # our dirstate updates are complete
856 valid = 1 # our dirstate updates are complete
855
857
856 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
858 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
857 return n
859 return n
858 finally:
860 finally:
859 if not valid: # don't save our updated dirstate
861 if not valid: # don't save our updated dirstate
860 self.dirstate.invalidate()
862 self.dirstate.invalidate()
861 del tr, lock, wlock
863 del tr, lock, wlock
862
864
863 def walk(self, node=None, files=[], match=util.always, badmatch=None):
865 def walk(self, node=None, files=[], match=util.always, badmatch=None):
864 '''
866 '''
865 walk recursively through the directory tree or a given
867 walk recursively through the directory tree or a given
866 changeset, finding all files matched by the match
868 changeset, finding all files matched by the match
867 function
869 function
868
870
869 results are yielded in a tuple (src, filename), where src
871 results are yielded in a tuple (src, filename), where src
870 is one of:
872 is one of:
871 'f' the file was found in the directory tree
873 'f' the file was found in the directory tree
872 'm' the file was only in the dirstate and not in the tree
874 'm' the file was only in the dirstate and not in the tree
873 'b' file was not found and matched badmatch
875 'b' file was not found and matched badmatch
874 '''
876 '''
875
877
876 if node:
878 if node:
877 fdict = dict.fromkeys(files)
879 fdict = dict.fromkeys(files)
878 # for dirstate.walk, files=['.'] means "walk the whole tree".
880 # for dirstate.walk, files=['.'] means "walk the whole tree".
879 # follow that here, too
881 # follow that here, too
880 fdict.pop('.', None)
882 fdict.pop('.', None)
881 mdict = self.manifest.read(self.changelog.read(node)[0])
883 mdict = self.manifest.read(self.changelog.read(node)[0])
882 mfiles = mdict.keys()
884 mfiles = mdict.keys()
883 mfiles.sort()
885 mfiles.sort()
884 for fn in mfiles:
886 for fn in mfiles:
885 for ffn in fdict:
887 for ffn in fdict:
886 # match if the file is the exact name or a directory
888 # match if the file is the exact name or a directory
887 if ffn == fn or fn.startswith("%s/" % ffn):
889 if ffn == fn or fn.startswith("%s/" % ffn):
888 del fdict[ffn]
890 del fdict[ffn]
889 break
891 break
890 if match(fn):
892 if match(fn):
891 yield 'm', fn
893 yield 'm', fn
892 ffiles = fdict.keys()
894 ffiles = fdict.keys()
893 ffiles.sort()
895 ffiles.sort()
894 for fn in ffiles:
896 for fn in ffiles:
895 if badmatch and badmatch(fn):
897 if badmatch and badmatch(fn):
896 if match(fn):
898 if match(fn):
897 yield 'b', fn
899 yield 'b', fn
898 else:
900 else:
899 self.ui.warn(_('%s: No such file in rev %s\n')
901 self.ui.warn(_('%s: No such file in rev %s\n')
900 % (self.pathto(fn), short(node)))
902 % (self.pathto(fn), short(node)))
901 else:
903 else:
902 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
904 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
903 yield src, fn
905 yield src, fn
904
906
905 def status(self, node1=None, node2=None, files=[], match=util.always,
907 def status(self, node1=None, node2=None, files=[], match=util.always,
906 list_ignored=False, list_clean=False):
908 list_ignored=False, list_clean=False):
907 """return status of files between two nodes or node and working directory
909 """return status of files between two nodes or node and working directory
908
910
909 If node1 is None, use the first dirstate parent instead.
911 If node1 is None, use the first dirstate parent instead.
910 If node2 is None, compare node1 with working directory.
912 If node2 is None, compare node1 with working directory.
911 """
913 """
912
914
913 def fcmp(fn, getnode):
915 def fcmp(fn, getnode):
914 t1 = self.wread(fn)
916 t1 = self.wread(fn)
915 return self.file(fn).cmp(getnode(fn), t1)
917 return self.file(fn).cmp(getnode(fn), t1)
916
918
917 def mfmatches(node):
919 def mfmatches(node):
918 change = self.changelog.read(node)
920 change = self.changelog.read(node)
919 mf = self.manifest.read(change[0]).copy()
921 mf = self.manifest.read(change[0]).copy()
920 for fn in mf.keys():
922 for fn in mf.keys():
921 if not match(fn):
923 if not match(fn):
922 del mf[fn]
924 del mf[fn]
923 return mf
925 return mf
924
926
925 modified, added, removed, deleted, unknown = [], [], [], [], []
927 modified, added, removed, deleted, unknown = [], [], [], [], []
926 ignored, clean = [], []
928 ignored, clean = [], []
927
929
928 compareworking = False
930 compareworking = False
929 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
931 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
930 compareworking = True
932 compareworking = True
931
933
932 if not compareworking:
934 if not compareworking:
933 # read the manifest from node1 before the manifest from node2,
935 # read the manifest from node1 before the manifest from node2,
934 # so that we'll hit the manifest cache if we're going through
936 # so that we'll hit the manifest cache if we're going through
935 # all the revisions in parent->child order.
937 # all the revisions in parent->child order.
936 mf1 = mfmatches(node1)
938 mf1 = mfmatches(node1)
937
939
938 # are we comparing the working directory?
940 # are we comparing the working directory?
939 if not node2:
941 if not node2:
940 (lookup, modified, added, removed, deleted, unknown,
942 (lookup, modified, added, removed, deleted, unknown,
941 ignored, clean) = self.dirstate.status(files, match,
943 ignored, clean) = self.dirstate.status(files, match,
942 list_ignored, list_clean)
944 list_ignored, list_clean)
943
945
944 # are we comparing working dir against its parent?
946 # are we comparing working dir against its parent?
945 if compareworking:
947 if compareworking:
946 if lookup:
948 if lookup:
947 fixup = []
949 fixup = []
948 # do a full compare of any files that might have changed
950 # do a full compare of any files that might have changed
949 ctx = self.changectx()
951 ctx = self.changectx()
950 for f in lookup:
952 for f in lookup:
951 if f not in ctx or ctx[f].cmp(self.wread(f)):
953 if f not in ctx or ctx[f].cmp(self.wread(f)):
952 modified.append(f)
954 modified.append(f)
953 else:
955 else:
954 fixup.append(f)
956 fixup.append(f)
955 if list_clean:
957 if list_clean:
956 clean.append(f)
958 clean.append(f)
957
959
958 # update dirstate for files that are actually clean
960 # update dirstate for files that are actually clean
959 if fixup:
961 if fixup:
960 wlock = None
962 wlock = None
961 try:
963 try:
962 try:
964 try:
963 wlock = self.wlock(False)
965 wlock = self.wlock(False)
964 except lock.LockException:
966 except lock.LockException:
965 pass
967 pass
966 if wlock:
968 if wlock:
967 for f in fixup:
969 for f in fixup:
968 self.dirstate.normal(f)
970 self.dirstate.normal(f)
969 finally:
971 finally:
970 del wlock
972 del wlock
971 else:
973 else:
972 # we are comparing working dir against non-parent
974 # we are comparing working dir against non-parent
973 # generate a pseudo-manifest for the working dir
975 # generate a pseudo-manifest for the working dir
974 # XXX: create it in dirstate.py ?
976 # XXX: create it in dirstate.py ?
975 mf2 = mfmatches(self.dirstate.parents()[0])
977 mf2 = mfmatches(self.dirstate.parents()[0])
976 is_exec = util.execfunc(self.root, mf2.execf)
978 is_exec = util.execfunc(self.root, mf2.execf)
977 is_link = util.linkfunc(self.root, mf2.linkf)
979 is_link = util.linkfunc(self.root, mf2.linkf)
978 for f in lookup + modified + added:
980 for f in lookup + modified + added:
979 mf2[f] = ""
981 mf2[f] = ""
980 mf2.set(f, is_exec(f), is_link(f))
982 mf2.set(f, is_exec(f), is_link(f))
981 for f in removed:
983 for f in removed:
982 if f in mf2:
984 if f in mf2:
983 del mf2[f]
985 del mf2[f]
984
986
985 else:
987 else:
986 # we are comparing two revisions
988 # we are comparing two revisions
987 mf2 = mfmatches(node2)
989 mf2 = mfmatches(node2)
988
990
989 if not compareworking:
991 if not compareworking:
990 # flush lists from dirstate before comparing manifests
992 # flush lists from dirstate before comparing manifests
991 modified, added, clean = [], [], []
993 modified, added, clean = [], [], []
992
994
993 # make sure to sort the files so we talk to the disk in a
995 # make sure to sort the files so we talk to the disk in a
994 # reasonable order
996 # reasonable order
995 mf2keys = mf2.keys()
997 mf2keys = mf2.keys()
996 mf2keys.sort()
998 mf2keys.sort()
997 getnode = lambda fn: mf1.get(fn, nullid)
999 getnode = lambda fn: mf1.get(fn, nullid)
998 for fn in mf2keys:
1000 for fn in mf2keys:
999 if mf1.has_key(fn):
1001 if mf1.has_key(fn):
1000 if (mf1.flags(fn) != mf2.flags(fn) or
1002 if (mf1.flags(fn) != mf2.flags(fn) or
1001 (mf1[fn] != mf2[fn] and
1003 (mf1[fn] != mf2[fn] and
1002 (mf2[fn] != "" or fcmp(fn, getnode)))):
1004 (mf2[fn] != "" or fcmp(fn, getnode)))):
1003 modified.append(fn)
1005 modified.append(fn)
1004 elif list_clean:
1006 elif list_clean:
1005 clean.append(fn)
1007 clean.append(fn)
1006 del mf1[fn]
1008 del mf1[fn]
1007 else:
1009 else:
1008 added.append(fn)
1010 added.append(fn)
1009
1011
1010 removed = mf1.keys()
1012 removed = mf1.keys()
1011
1013
1012 # sort and return results:
1014 # sort and return results:
1013 for l in modified, added, removed, deleted, unknown, ignored, clean:
1015 for l in modified, added, removed, deleted, unknown, ignored, clean:
1014 l.sort()
1016 l.sort()
1015 return (modified, added, removed, deleted, unknown, ignored, clean)
1017 return (modified, added, removed, deleted, unknown, ignored, clean)
1016
1018
1017 def add(self, list):
1019 def add(self, list):
1018 wlock = self.wlock()
1020 wlock = self.wlock()
1019 try:
1021 try:
1020 rejected = []
1022 rejected = []
1021 for f in list:
1023 for f in list:
1022 p = self.wjoin(f)
1024 p = self.wjoin(f)
1023 try:
1025 try:
1024 st = os.lstat(p)
1026 st = os.lstat(p)
1025 except:
1027 except:
1026 self.ui.warn(_("%s does not exist!\n") % f)
1028 self.ui.warn(_("%s does not exist!\n") % f)
1027 rejected.append(f)
1029 rejected.append(f)
1028 continue
1030 continue
1029 if st.st_size > 10000000:
1031 if st.st_size > 10000000:
1030 self.ui.warn(_("%s: files over 10MB may cause memory and"
1032 self.ui.warn(_("%s: files over 10MB may cause memory and"
1031 " performance problems\n"
1033 " performance problems\n"
1032 "(use 'hg revert %s' to unadd the file)\n")
1034 "(use 'hg revert %s' to unadd the file)\n")
1033 % (f, f))
1035 % (f, f))
1034 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1036 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1035 self.ui.warn(_("%s not added: only files and symlinks "
1037 self.ui.warn(_("%s not added: only files and symlinks "
1036 "supported currently\n") % f)
1038 "supported currently\n") % f)
1037 rejected.append(p)
1039 rejected.append(p)
1038 elif self.dirstate[f] in 'amn':
1040 elif self.dirstate[f] in 'amn':
1039 self.ui.warn(_("%s already tracked!\n") % f)
1041 self.ui.warn(_("%s already tracked!\n") % f)
1040 elif self.dirstate[f] == 'r':
1042 elif self.dirstate[f] == 'r':
1041 self.dirstate.normallookup(f)
1043 self.dirstate.normallookup(f)
1042 else:
1044 else:
1043 self.dirstate.add(f)
1045 self.dirstate.add(f)
1044 return rejected
1046 return rejected
1045 finally:
1047 finally:
1046 del wlock
1048 del wlock
1047
1049
1048 def forget(self, list):
1050 def forget(self, list):
1049 wlock = self.wlock()
1051 wlock = self.wlock()
1050 try:
1052 try:
1051 for f in list:
1053 for f in list:
1052 if self.dirstate[f] != 'a':
1054 if self.dirstate[f] != 'a':
1053 self.ui.warn(_("%s not added!\n") % f)
1055 self.ui.warn(_("%s not added!\n") % f)
1054 else:
1056 else:
1055 self.dirstate.forget(f)
1057 self.dirstate.forget(f)
1056 finally:
1058 finally:
1057 del wlock
1059 del wlock
1058
1060
1059 def remove(self, list, unlink=False):
1061 def remove(self, list, unlink=False):
1060 wlock = None
1062 wlock = None
1061 try:
1063 try:
1062 if unlink:
1064 if unlink:
1063 for f in list:
1065 for f in list:
1064 try:
1066 try:
1065 util.unlink(self.wjoin(f))
1067 util.unlink(self.wjoin(f))
1066 except OSError, inst:
1068 except OSError, inst:
1067 if inst.errno != errno.ENOENT:
1069 if inst.errno != errno.ENOENT:
1068 raise
1070 raise
1069 wlock = self.wlock()
1071 wlock = self.wlock()
1070 for f in list:
1072 for f in list:
1071 if unlink and os.path.exists(self.wjoin(f)):
1073 if unlink and os.path.exists(self.wjoin(f)):
1072 self.ui.warn(_("%s still exists!\n") % f)
1074 self.ui.warn(_("%s still exists!\n") % f)
1073 elif self.dirstate[f] == 'a':
1075 elif self.dirstate[f] == 'a':
1074 self.dirstate.forget(f)
1076 self.dirstate.forget(f)
1075 elif f not in self.dirstate:
1077 elif f not in self.dirstate:
1076 self.ui.warn(_("%s not tracked!\n") % f)
1078 self.ui.warn(_("%s not tracked!\n") % f)
1077 else:
1079 else:
1078 self.dirstate.remove(f)
1080 self.dirstate.remove(f)
1079 finally:
1081 finally:
1080 del wlock
1082 del wlock
1081
1083
1082 def undelete(self, list):
1084 def undelete(self, list):
1083 wlock = None
1085 wlock = None
1084 try:
1086 try:
1085 manifests = [self.manifest.read(self.changelog.read(p)[0])
1087 manifests = [self.manifest.read(self.changelog.read(p)[0])
1086 for p in self.dirstate.parents() if p != nullid]
1088 for p in self.dirstate.parents() if p != nullid]
1087 wlock = self.wlock()
1089 wlock = self.wlock()
1088 for f in list:
1090 for f in list:
1089 if self.dirstate[f] != 'r':
1091 if self.dirstate[f] != 'r':
1090 self.ui.warn("%s not removed!\n" % f)
1092 self.ui.warn("%s not removed!\n" % f)
1091 else:
1093 else:
1092 m = f in manifests[0] and manifests[0] or manifests[1]
1094 m = f in manifests[0] and manifests[0] or manifests[1]
1093 t = self.file(f).read(m[f])
1095 t = self.file(f).read(m[f])
1094 self.wwrite(f, t, m.flags(f))
1096 self.wwrite(f, t, m.flags(f))
1095 self.dirstate.normal(f)
1097 self.dirstate.normal(f)
1096 finally:
1098 finally:
1097 del wlock
1099 del wlock
1098
1100
1099 def copy(self, source, dest):
1101 def copy(self, source, dest):
1100 wlock = None
1102 wlock = None
1101 try:
1103 try:
1102 p = self.wjoin(dest)
1104 p = self.wjoin(dest)
1103 if not (os.path.exists(p) or os.path.islink(p)):
1105 if not (os.path.exists(p) or os.path.islink(p)):
1104 self.ui.warn(_("%s does not exist!\n") % dest)
1106 self.ui.warn(_("%s does not exist!\n") % dest)
1105 elif not (os.path.isfile(p) or os.path.islink(p)):
1107 elif not (os.path.isfile(p) or os.path.islink(p)):
1106 self.ui.warn(_("copy failed: %s is not a file or a "
1108 self.ui.warn(_("copy failed: %s is not a file or a "
1107 "symbolic link\n") % dest)
1109 "symbolic link\n") % dest)
1108 else:
1110 else:
1109 wlock = self.wlock()
1111 wlock = self.wlock()
1110 if dest not in self.dirstate:
1112 if dest not in self.dirstate:
1111 self.dirstate.add(dest)
1113 self.dirstate.add(dest)
1112 self.dirstate.copy(source, dest)
1114 self.dirstate.copy(source, dest)
1113 finally:
1115 finally:
1114 del wlock
1116 del wlock
1115
1117
1116 def heads(self, start=None):
1118 def heads(self, start=None):
1117 heads = self.changelog.heads(start)
1119 heads = self.changelog.heads(start)
1118 # sort the output in rev descending order
1120 # sort the output in rev descending order
1119 heads = [(-self.changelog.rev(h), h) for h in heads]
1121 heads = [(-self.changelog.rev(h), h) for h in heads]
1120 heads.sort()
1122 heads.sort()
1121 return [n for (r, n) in heads]
1123 return [n for (r, n) in heads]
1122
1124
1123 def branchheads(self, branch, start=None):
1125 def branchheads(self, branch, start=None):
1124 branches = self.branchtags()
1126 branches = self.branchtags()
1125 if branch not in branches:
1127 if branch not in branches:
1126 return []
1128 return []
1127 # The basic algorithm is this:
1129 # The basic algorithm is this:
1128 #
1130 #
1129 # Start from the branch tip since there are no later revisions that can
1131 # Start from the branch tip since there are no later revisions that can
1130 # possibly be in this branch, and the tip is a guaranteed head.
1132 # possibly be in this branch, and the tip is a guaranteed head.
1131 #
1133 #
1132 # Remember the tip's parents as the first ancestors, since these by
1134 # Remember the tip's parents as the first ancestors, since these by
1133 # definition are not heads.
1135 # definition are not heads.
1134 #
1136 #
1135 # Step backwards from the brach tip through all the revisions. We are
1137 # Step backwards from the brach tip through all the revisions. We are
1136 # guaranteed by the rules of Mercurial that we will now be visiting the
1138 # guaranteed by the rules of Mercurial that we will now be visiting the
1137 # nodes in reverse topological order (children before parents).
1139 # nodes in reverse topological order (children before parents).
1138 #
1140 #
1139 # If a revision is one of the ancestors of a head then we can toss it
1141 # If a revision is one of the ancestors of a head then we can toss it
1140 # out of the ancestors set (we've already found it and won't be
1142 # out of the ancestors set (we've already found it and won't be
1141 # visiting it again) and put its parents in the ancestors set.
1143 # visiting it again) and put its parents in the ancestors set.
1142 #
1144 #
1143 # Otherwise, if a revision is in the branch it's another head, since it
1145 # Otherwise, if a revision is in the branch it's another head, since it
1144 # wasn't in the ancestor list of an existing head. So add it to the
1146 # wasn't in the ancestor list of an existing head. So add it to the
1145 # head list, and add its parents to the ancestor list.
1147 # head list, and add its parents to the ancestor list.
1146 #
1148 #
1147 # If it is not in the branch ignore it.
1149 # If it is not in the branch ignore it.
1148 #
1150 #
1149 # Once we have a list of heads, use nodesbetween to filter out all the
1151 # Once we have a list of heads, use nodesbetween to filter out all the
1150 # heads that cannot be reached from startrev. There may be a more
1152 # heads that cannot be reached from startrev. There may be a more
1151 # efficient way to do this as part of the previous algorithm.
1153 # efficient way to do this as part of the previous algorithm.
1152
1154
1153 set = util.set
1155 set = util.set
1154 heads = [self.changelog.rev(branches[branch])]
1156 heads = [self.changelog.rev(branches[branch])]
1155 # Don't care if ancestors contains nullrev or not.
1157 # Don't care if ancestors contains nullrev or not.
1156 ancestors = set(self.changelog.parentrevs(heads[0]))
1158 ancestors = set(self.changelog.parentrevs(heads[0]))
1157 for rev in xrange(heads[0] - 1, nullrev, -1):
1159 for rev in xrange(heads[0] - 1, nullrev, -1):
1158 if rev in ancestors:
1160 if rev in ancestors:
1159 ancestors.update(self.changelog.parentrevs(rev))
1161 ancestors.update(self.changelog.parentrevs(rev))
1160 ancestors.remove(rev)
1162 ancestors.remove(rev)
1161 elif self.changectx(rev).branch() == branch:
1163 elif self.changectx(rev).branch() == branch:
1162 heads.append(rev)
1164 heads.append(rev)
1163 ancestors.update(self.changelog.parentrevs(rev))
1165 ancestors.update(self.changelog.parentrevs(rev))
1164 heads = [self.changelog.node(rev) for rev in heads]
1166 heads = [self.changelog.node(rev) for rev in heads]
1165 if start is not None:
1167 if start is not None:
1166 heads = self.changelog.nodesbetween([start], heads)[2]
1168 heads = self.changelog.nodesbetween([start], heads)[2]
1167 return heads
1169 return heads
1168
1170
1169 def branches(self, nodes):
1171 def branches(self, nodes):
1170 if not nodes:
1172 if not nodes:
1171 nodes = [self.changelog.tip()]
1173 nodes = [self.changelog.tip()]
1172 b = []
1174 b = []
1173 for n in nodes:
1175 for n in nodes:
1174 t = n
1176 t = n
1175 while 1:
1177 while 1:
1176 p = self.changelog.parents(n)
1178 p = self.changelog.parents(n)
1177 if p[1] != nullid or p[0] == nullid:
1179 if p[1] != nullid or p[0] == nullid:
1178 b.append((t, n, p[0], p[1]))
1180 b.append((t, n, p[0], p[1]))
1179 break
1181 break
1180 n = p[0]
1182 n = p[0]
1181 return b
1183 return b
1182
1184
1183 def between(self, pairs):
1185 def between(self, pairs):
1184 r = []
1186 r = []
1185
1187
1186 for top, bottom in pairs:
1188 for top, bottom in pairs:
1187 n, l, i = top, [], 0
1189 n, l, i = top, [], 0
1188 f = 1
1190 f = 1
1189
1191
1190 while n != bottom:
1192 while n != bottom:
1191 p = self.changelog.parents(n)[0]
1193 p = self.changelog.parents(n)[0]
1192 if i == f:
1194 if i == f:
1193 l.append(n)
1195 l.append(n)
1194 f = f * 2
1196 f = f * 2
1195 n = p
1197 n = p
1196 i += 1
1198 i += 1
1197
1199
1198 r.append(l)
1200 r.append(l)
1199
1201
1200 return r
1202 return r
1201
1203
1202 def findincoming(self, remote, base=None, heads=None, force=False):
1204 def findincoming(self, remote, base=None, heads=None, force=False):
1203 """Return list of roots of the subsets of missing nodes from remote
1205 """Return list of roots of the subsets of missing nodes from remote
1204
1206
1205 If base dict is specified, assume that these nodes and their parents
1207 If base dict is specified, assume that these nodes and their parents
1206 exist on the remote side and that no child of a node of base exists
1208 exist on the remote side and that no child of a node of base exists
1207 in both remote and self.
1209 in both remote and self.
1208 Furthermore base will be updated to include the nodes that exists
1210 Furthermore base will be updated to include the nodes that exists
1209 in self and remote but no children exists in self and remote.
1211 in self and remote but no children exists in self and remote.
1210 If a list of heads is specified, return only nodes which are heads
1212 If a list of heads is specified, return only nodes which are heads
1211 or ancestors of these heads.
1213 or ancestors of these heads.
1212
1214
1213 All the ancestors of base are in self and in remote.
1215 All the ancestors of base are in self and in remote.
1214 All the descendants of the list returned are missing in self.
1216 All the descendants of the list returned are missing in self.
1215 (and so we know that the rest of the nodes are missing in remote, see
1217 (and so we know that the rest of the nodes are missing in remote, see
1216 outgoing)
1218 outgoing)
1217 """
1219 """
1218 m = self.changelog.nodemap
1220 m = self.changelog.nodemap
1219 search = []
1221 search = []
1220 fetch = {}
1222 fetch = {}
1221 seen = {}
1223 seen = {}
1222 seenbranch = {}
1224 seenbranch = {}
1223 if base == None:
1225 if base == None:
1224 base = {}
1226 base = {}
1225
1227
1226 if not heads:
1228 if not heads:
1227 heads = remote.heads()
1229 heads = remote.heads()
1228
1230
1229 if self.changelog.tip() == nullid:
1231 if self.changelog.tip() == nullid:
1230 base[nullid] = 1
1232 base[nullid] = 1
1231 if heads != [nullid]:
1233 if heads != [nullid]:
1232 return [nullid]
1234 return [nullid]
1233 return []
1235 return []
1234
1236
1235 # assume we're closer to the tip than the root
1237 # assume we're closer to the tip than the root
1236 # and start by examining the heads
1238 # and start by examining the heads
1237 self.ui.status(_("searching for changes\n"))
1239 self.ui.status(_("searching for changes\n"))
1238
1240
1239 unknown = []
1241 unknown = []
1240 for h in heads:
1242 for h in heads:
1241 if h not in m:
1243 if h not in m:
1242 unknown.append(h)
1244 unknown.append(h)
1243 else:
1245 else:
1244 base[h] = 1
1246 base[h] = 1
1245
1247
1246 if not unknown:
1248 if not unknown:
1247 return []
1249 return []
1248
1250
1249 req = dict.fromkeys(unknown)
1251 req = dict.fromkeys(unknown)
1250 reqcnt = 0
1252 reqcnt = 0
1251
1253
1252 # search through remote branches
1254 # search through remote branches
1253 # a 'branch' here is a linear segment of history, with four parts:
1255 # a 'branch' here is a linear segment of history, with four parts:
1254 # head, root, first parent, second parent
1256 # head, root, first parent, second parent
1255 # (a branch always has two parents (or none) by definition)
1257 # (a branch always has two parents (or none) by definition)
1256 unknown = remote.branches(unknown)
1258 unknown = remote.branches(unknown)
1257 while unknown:
1259 while unknown:
1258 r = []
1260 r = []
1259 while unknown:
1261 while unknown:
1260 n = unknown.pop(0)
1262 n = unknown.pop(0)
1261 if n[0] in seen:
1263 if n[0] in seen:
1262 continue
1264 continue
1263
1265
1264 self.ui.debug(_("examining %s:%s\n")
1266 self.ui.debug(_("examining %s:%s\n")
1265 % (short(n[0]), short(n[1])))
1267 % (short(n[0]), short(n[1])))
1266 if n[0] == nullid: # found the end of the branch
1268 if n[0] == nullid: # found the end of the branch
1267 pass
1269 pass
1268 elif n in seenbranch:
1270 elif n in seenbranch:
1269 self.ui.debug(_("branch already found\n"))
1271 self.ui.debug(_("branch already found\n"))
1270 continue
1272 continue
1271 elif n[1] and n[1] in m: # do we know the base?
1273 elif n[1] and n[1] in m: # do we know the base?
1272 self.ui.debug(_("found incomplete branch %s:%s\n")
1274 self.ui.debug(_("found incomplete branch %s:%s\n")
1273 % (short(n[0]), short(n[1])))
1275 % (short(n[0]), short(n[1])))
1274 search.append(n) # schedule branch range for scanning
1276 search.append(n) # schedule branch range for scanning
1275 seenbranch[n] = 1
1277 seenbranch[n] = 1
1276 else:
1278 else:
1277 if n[1] not in seen and n[1] not in fetch:
1279 if n[1] not in seen and n[1] not in fetch:
1278 if n[2] in m and n[3] in m:
1280 if n[2] in m and n[3] in m:
1279 self.ui.debug(_("found new changeset %s\n") %
1281 self.ui.debug(_("found new changeset %s\n") %
1280 short(n[1]))
1282 short(n[1]))
1281 fetch[n[1]] = 1 # earliest unknown
1283 fetch[n[1]] = 1 # earliest unknown
1282 for p in n[2:4]:
1284 for p in n[2:4]:
1283 if p in m:
1285 if p in m:
1284 base[p] = 1 # latest known
1286 base[p] = 1 # latest known
1285
1287
1286 for p in n[2:4]:
1288 for p in n[2:4]:
1287 if p not in req and p not in m:
1289 if p not in req and p not in m:
1288 r.append(p)
1290 r.append(p)
1289 req[p] = 1
1291 req[p] = 1
1290 seen[n[0]] = 1
1292 seen[n[0]] = 1
1291
1293
1292 if r:
1294 if r:
1293 reqcnt += 1
1295 reqcnt += 1
1294 self.ui.debug(_("request %d: %s\n") %
1296 self.ui.debug(_("request %d: %s\n") %
1295 (reqcnt, " ".join(map(short, r))))
1297 (reqcnt, " ".join(map(short, r))))
1296 for p in xrange(0, len(r), 10):
1298 for p in xrange(0, len(r), 10):
1297 for b in remote.branches(r[p:p+10]):
1299 for b in remote.branches(r[p:p+10]):
1298 self.ui.debug(_("received %s:%s\n") %
1300 self.ui.debug(_("received %s:%s\n") %
1299 (short(b[0]), short(b[1])))
1301 (short(b[0]), short(b[1])))
1300 unknown.append(b)
1302 unknown.append(b)
1301
1303
1302 # do binary search on the branches we found
1304 # do binary search on the branches we found
1303 while search:
1305 while search:
1304 n = search.pop(0)
1306 n = search.pop(0)
1305 reqcnt += 1
1307 reqcnt += 1
1306 l = remote.between([(n[0], n[1])])[0]
1308 l = remote.between([(n[0], n[1])])[0]
1307 l.append(n[1])
1309 l.append(n[1])
1308 p = n[0]
1310 p = n[0]
1309 f = 1
1311 f = 1
1310 for i in l:
1312 for i in l:
1311 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1313 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1312 if i in m:
1314 if i in m:
1313 if f <= 2:
1315 if f <= 2:
1314 self.ui.debug(_("found new branch changeset %s\n") %
1316 self.ui.debug(_("found new branch changeset %s\n") %
1315 short(p))
1317 short(p))
1316 fetch[p] = 1
1318 fetch[p] = 1
1317 base[i] = 1
1319 base[i] = 1
1318 else:
1320 else:
1319 self.ui.debug(_("narrowed branch search to %s:%s\n")
1321 self.ui.debug(_("narrowed branch search to %s:%s\n")
1320 % (short(p), short(i)))
1322 % (short(p), short(i)))
1321 search.append((p, i))
1323 search.append((p, i))
1322 break
1324 break
1323 p, f = i, f * 2
1325 p, f = i, f * 2
1324
1326
1325 # sanity check our fetch list
1327 # sanity check our fetch list
1326 for f in fetch.keys():
1328 for f in fetch.keys():
1327 if f in m:
1329 if f in m:
1328 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1330 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1329
1331
1330 if base.keys() == [nullid]:
1332 if base.keys() == [nullid]:
1331 if force:
1333 if force:
1332 self.ui.warn(_("warning: repository is unrelated\n"))
1334 self.ui.warn(_("warning: repository is unrelated\n"))
1333 else:
1335 else:
1334 raise util.Abort(_("repository is unrelated"))
1336 raise util.Abort(_("repository is unrelated"))
1335
1337
1336 self.ui.debug(_("found new changesets starting at ") +
1338 self.ui.debug(_("found new changesets starting at ") +
1337 " ".join([short(f) for f in fetch]) + "\n")
1339 " ".join([short(f) for f in fetch]) + "\n")
1338
1340
1339 self.ui.debug(_("%d total queries\n") % reqcnt)
1341 self.ui.debug(_("%d total queries\n") % reqcnt)
1340
1342
1341 return fetch.keys()
1343 return fetch.keys()
1342
1344
1343 def findoutgoing(self, remote, base=None, heads=None, force=False):
1345 def findoutgoing(self, remote, base=None, heads=None, force=False):
1344 """Return list of nodes that are roots of subsets not in remote
1346 """Return list of nodes that are roots of subsets not in remote
1345
1347
1346 If base dict is specified, assume that these nodes and their parents
1348 If base dict is specified, assume that these nodes and their parents
1347 exist on the remote side.
1349 exist on the remote side.
1348 If a list of heads is specified, return only nodes which are heads
1350 If a list of heads is specified, return only nodes which are heads
1349 or ancestors of these heads, and return a second element which
1351 or ancestors of these heads, and return a second element which
1350 contains all remote heads which get new children.
1352 contains all remote heads which get new children.
1351 """
1353 """
1352 if base == None:
1354 if base == None:
1353 base = {}
1355 base = {}
1354 self.findincoming(remote, base, heads, force=force)
1356 self.findincoming(remote, base, heads, force=force)
1355
1357
1356 self.ui.debug(_("common changesets up to ")
1358 self.ui.debug(_("common changesets up to ")
1357 + " ".join(map(short, base.keys())) + "\n")
1359 + " ".join(map(short, base.keys())) + "\n")
1358
1360
1359 remain = dict.fromkeys(self.changelog.nodemap)
1361 remain = dict.fromkeys(self.changelog.nodemap)
1360
1362
1361 # prune everything remote has from the tree
1363 # prune everything remote has from the tree
1362 del remain[nullid]
1364 del remain[nullid]
1363 remove = base.keys()
1365 remove = base.keys()
1364 while remove:
1366 while remove:
1365 n = remove.pop(0)
1367 n = remove.pop(0)
1366 if n in remain:
1368 if n in remain:
1367 del remain[n]
1369 del remain[n]
1368 for p in self.changelog.parents(n):
1370 for p in self.changelog.parents(n):
1369 remove.append(p)
1371 remove.append(p)
1370
1372
1371 # find every node whose parents have been pruned
1373 # find every node whose parents have been pruned
1372 subset = []
1374 subset = []
1373 # find every remote head that will get new children
1375 # find every remote head that will get new children
1374 updated_heads = {}
1376 updated_heads = {}
1375 for n in remain:
1377 for n in remain:
1376 p1, p2 = self.changelog.parents(n)
1378 p1, p2 = self.changelog.parents(n)
1377 if p1 not in remain and p2 not in remain:
1379 if p1 not in remain and p2 not in remain:
1378 subset.append(n)
1380 subset.append(n)
1379 if heads:
1381 if heads:
1380 if p1 in heads:
1382 if p1 in heads:
1381 updated_heads[p1] = True
1383 updated_heads[p1] = True
1382 if p2 in heads:
1384 if p2 in heads:
1383 updated_heads[p2] = True
1385 updated_heads[p2] = True
1384
1386
1385 # this is the set of all roots we have to push
1387 # this is the set of all roots we have to push
1386 if heads:
1388 if heads:
1387 return subset, updated_heads.keys()
1389 return subset, updated_heads.keys()
1388 else:
1390 else:
1389 return subset
1391 return subset
1390
1392
1391 def pull(self, remote, heads=None, force=False):
1393 def pull(self, remote, heads=None, force=False):
1392 lock = self.lock()
1394 lock = self.lock()
1393 try:
1395 try:
1394 fetch = self.findincoming(remote, heads=heads, force=force)
1396 fetch = self.findincoming(remote, heads=heads, force=force)
1395 if fetch == [nullid]:
1397 if fetch == [nullid]:
1396 self.ui.status(_("requesting all changes\n"))
1398 self.ui.status(_("requesting all changes\n"))
1397
1399
1398 if not fetch:
1400 if not fetch:
1399 self.ui.status(_("no changes found\n"))
1401 self.ui.status(_("no changes found\n"))
1400 return 0
1402 return 0
1401
1403
1402 if heads is None:
1404 if heads is None:
1403 cg = remote.changegroup(fetch, 'pull')
1405 cg = remote.changegroup(fetch, 'pull')
1404 else:
1406 else:
1405 if 'changegroupsubset' not in remote.capabilities:
1407 if 'changegroupsubset' not in remote.capabilities:
1406 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1408 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1407 cg = remote.changegroupsubset(fetch, heads, 'pull')
1409 cg = remote.changegroupsubset(fetch, heads, 'pull')
1408 return self.addchangegroup(cg, 'pull', remote.url())
1410 return self.addchangegroup(cg, 'pull', remote.url())
1409 finally:
1411 finally:
1410 del lock
1412 del lock
1411
1413
1412 def push(self, remote, force=False, revs=None):
1414 def push(self, remote, force=False, revs=None):
1413 # there are two ways to push to remote repo:
1415 # there are two ways to push to remote repo:
1414 #
1416 #
1415 # addchangegroup assumes local user can lock remote
1417 # addchangegroup assumes local user can lock remote
1416 # repo (local filesystem, old ssh servers).
1418 # repo (local filesystem, old ssh servers).
1417 #
1419 #
1418 # unbundle assumes local user cannot lock remote repo (new ssh
1420 # unbundle assumes local user cannot lock remote repo (new ssh
1419 # servers, http servers).
1421 # servers, http servers).
1420
1422
1421 if remote.capable('unbundle'):
1423 if remote.capable('unbundle'):
1422 return self.push_unbundle(remote, force, revs)
1424 return self.push_unbundle(remote, force, revs)
1423 return self.push_addchangegroup(remote, force, revs)
1425 return self.push_addchangegroup(remote, force, revs)
1424
1426
1425 def prepush(self, remote, force, revs):
1427 def prepush(self, remote, force, revs):
1426 base = {}
1428 base = {}
1427 remote_heads = remote.heads()
1429 remote_heads = remote.heads()
1428 inc = self.findincoming(remote, base, remote_heads, force=force)
1430 inc = self.findincoming(remote, base, remote_heads, force=force)
1429
1431
1430 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1432 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1431 if revs is not None:
1433 if revs is not None:
1432 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1434 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1433 else:
1435 else:
1434 bases, heads = update, self.changelog.heads()
1436 bases, heads = update, self.changelog.heads()
1435
1437
1436 if not bases:
1438 if not bases:
1437 self.ui.status(_("no changes found\n"))
1439 self.ui.status(_("no changes found\n"))
1438 return None, 1
1440 return None, 1
1439 elif not force:
1441 elif not force:
1440 # check if we're creating new remote heads
1442 # check if we're creating new remote heads
1441 # to be a remote head after push, node must be either
1443 # to be a remote head after push, node must be either
1442 # - unknown locally
1444 # - unknown locally
1443 # - a local outgoing head descended from update
1445 # - a local outgoing head descended from update
1444 # - a remote head that's known locally and not
1446 # - a remote head that's known locally and not
1445 # ancestral to an outgoing head
1447 # ancestral to an outgoing head
1446
1448
1447 warn = 0
1449 warn = 0
1448
1450
1449 if remote_heads == [nullid]:
1451 if remote_heads == [nullid]:
1450 warn = 0
1452 warn = 0
1451 elif not revs and len(heads) > len(remote_heads):
1453 elif not revs and len(heads) > len(remote_heads):
1452 warn = 1
1454 warn = 1
1453 else:
1455 else:
1454 newheads = list(heads)
1456 newheads = list(heads)
1455 for r in remote_heads:
1457 for r in remote_heads:
1456 if r in self.changelog.nodemap:
1458 if r in self.changelog.nodemap:
1457 desc = self.changelog.heads(r, heads)
1459 desc = self.changelog.heads(r, heads)
1458 l = [h for h in heads if h in desc]
1460 l = [h for h in heads if h in desc]
1459 if not l:
1461 if not l:
1460 newheads.append(r)
1462 newheads.append(r)
1461 else:
1463 else:
1462 newheads.append(r)
1464 newheads.append(r)
1463 if len(newheads) > len(remote_heads):
1465 if len(newheads) > len(remote_heads):
1464 warn = 1
1466 warn = 1
1465
1467
1466 if warn:
1468 if warn:
1467 self.ui.warn(_("abort: push creates new remote branches!\n"))
1469 self.ui.warn(_("abort: push creates new remote branches!\n"))
1468 self.ui.status(_("(did you forget to merge?"
1470 self.ui.status(_("(did you forget to merge?"
1469 " use push -f to force)\n"))
1471 " use push -f to force)\n"))
1470 return None, 1
1472 return None, 1
1471 elif inc:
1473 elif inc:
1472 self.ui.warn(_("note: unsynced remote changes!\n"))
1474 self.ui.warn(_("note: unsynced remote changes!\n"))
1473
1475
1474
1476
1475 if revs is None:
1477 if revs is None:
1476 cg = self.changegroup(update, 'push')
1478 cg = self.changegroup(update, 'push')
1477 else:
1479 else:
1478 cg = self.changegroupsubset(update, revs, 'push')
1480 cg = self.changegroupsubset(update, revs, 'push')
1479 return cg, remote_heads
1481 return cg, remote_heads
1480
1482
1481 def push_addchangegroup(self, remote, force, revs):
1483 def push_addchangegroup(self, remote, force, revs):
1482 lock = remote.lock()
1484 lock = remote.lock()
1483 try:
1485 try:
1484 ret = self.prepush(remote, force, revs)
1486 ret = self.prepush(remote, force, revs)
1485 if ret[0] is not None:
1487 if ret[0] is not None:
1486 cg, remote_heads = ret
1488 cg, remote_heads = ret
1487 return remote.addchangegroup(cg, 'push', self.url())
1489 return remote.addchangegroup(cg, 'push', self.url())
1488 return ret[1]
1490 return ret[1]
1489 finally:
1491 finally:
1490 del lock
1492 del lock
1491
1493
1492 def push_unbundle(self, remote, force, revs):
1494 def push_unbundle(self, remote, force, revs):
1493 # local repo finds heads on server, finds out what revs it
1495 # local repo finds heads on server, finds out what revs it
1494 # must push. once revs transferred, if server finds it has
1496 # must push. once revs transferred, if server finds it has
1495 # different heads (someone else won commit/push race), server
1497 # different heads (someone else won commit/push race), server
1496 # aborts.
1498 # aborts.
1497
1499
1498 ret = self.prepush(remote, force, revs)
1500 ret = self.prepush(remote, force, revs)
1499 if ret[0] is not None:
1501 if ret[0] is not None:
1500 cg, remote_heads = ret
1502 cg, remote_heads = ret
1501 if force: remote_heads = ['force']
1503 if force: remote_heads = ['force']
1502 return remote.unbundle(cg, remote_heads, 'push')
1504 return remote.unbundle(cg, remote_heads, 'push')
1503 return ret[1]
1505 return ret[1]
1504
1506
1505 def changegroupinfo(self, nodes, source):
1507 def changegroupinfo(self, nodes, source):
1506 if self.ui.verbose or source == 'bundle':
1508 if self.ui.verbose or source == 'bundle':
1507 self.ui.status(_("%d changesets found\n") % len(nodes))
1509 self.ui.status(_("%d changesets found\n") % len(nodes))
1508 if self.ui.debugflag:
1510 if self.ui.debugflag:
1509 self.ui.debug(_("List of changesets:\n"))
1511 self.ui.debug(_("List of changesets:\n"))
1510 for node in nodes:
1512 for node in nodes:
1511 self.ui.debug("%s\n" % hex(node))
1513 self.ui.debug("%s\n" % hex(node))
1512
1514
1513 def changegroupsubset(self, bases, heads, source, extranodes=None):
1515 def changegroupsubset(self, bases, heads, source, extranodes=None):
1514 """This function generates a changegroup consisting of all the nodes
1516 """This function generates a changegroup consisting of all the nodes
1515 that are descendents of any of the bases, and ancestors of any of
1517 that are descendents of any of the bases, and ancestors of any of
1516 the heads.
1518 the heads.
1517
1519
1518 It is fairly complex as determining which filenodes and which
1520 It is fairly complex as determining which filenodes and which
1519 manifest nodes need to be included for the changeset to be complete
1521 manifest nodes need to be included for the changeset to be complete
1520 is non-trivial.
1522 is non-trivial.
1521
1523
1522 Another wrinkle is doing the reverse, figuring out which changeset in
1524 Another wrinkle is doing the reverse, figuring out which changeset in
1523 the changegroup a particular filenode or manifestnode belongs to.
1525 the changegroup a particular filenode or manifestnode belongs to.
1524
1526
1525 The caller can specify some nodes that must be included in the
1527 The caller can specify some nodes that must be included in the
1526 changegroup using the extranodes argument. It should be a dict
1528 changegroup using the extranodes argument. It should be a dict
1527 where the keys are the filenames (or 1 for the manifest), and the
1529 where the keys are the filenames (or 1 for the manifest), and the
1528 values are lists of (node, linknode) tuples, where node is a wanted
1530 values are lists of (node, linknode) tuples, where node is a wanted
1529 node and linknode is the changelog node that should be transmitted as
1531 node and linknode is the changelog node that should be transmitted as
1530 the linkrev.
1532 the linkrev.
1531 """
1533 """
1532
1534
1533 self.hook('preoutgoing', throw=True, source=source)
1535 self.hook('preoutgoing', throw=True, source=source)
1534
1536
1535 # Set up some initial variables
1537 # Set up some initial variables
1536 # Make it easy to refer to self.changelog
1538 # Make it easy to refer to self.changelog
1537 cl = self.changelog
1539 cl = self.changelog
1538 # msng is short for missing - compute the list of changesets in this
1540 # msng is short for missing - compute the list of changesets in this
1539 # changegroup.
1541 # changegroup.
1540 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1542 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1541 self.changegroupinfo(msng_cl_lst, source)
1543 self.changegroupinfo(msng_cl_lst, source)
1542 # Some bases may turn out to be superfluous, and some heads may be
1544 # Some bases may turn out to be superfluous, and some heads may be
1543 # too. nodesbetween will return the minimal set of bases and heads
1545 # too. nodesbetween will return the minimal set of bases and heads
1544 # necessary to re-create the changegroup.
1546 # necessary to re-create the changegroup.
1545
1547
1546 # Known heads are the list of heads that it is assumed the recipient
1548 # Known heads are the list of heads that it is assumed the recipient
1547 # of this changegroup will know about.
1549 # of this changegroup will know about.
1548 knownheads = {}
1550 knownheads = {}
1549 # We assume that all parents of bases are known heads.
1551 # We assume that all parents of bases are known heads.
1550 for n in bases:
1552 for n in bases:
1551 for p in cl.parents(n):
1553 for p in cl.parents(n):
1552 if p != nullid:
1554 if p != nullid:
1553 knownheads[p] = 1
1555 knownheads[p] = 1
1554 knownheads = knownheads.keys()
1556 knownheads = knownheads.keys()
1555 if knownheads:
1557 if knownheads:
1556 # Now that we know what heads are known, we can compute which
1558 # Now that we know what heads are known, we can compute which
1557 # changesets are known. The recipient must know about all
1559 # changesets are known. The recipient must know about all
1558 # changesets required to reach the known heads from the null
1560 # changesets required to reach the known heads from the null
1559 # changeset.
1561 # changeset.
1560 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1562 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1561 junk = None
1563 junk = None
1562 # Transform the list into an ersatz set.
1564 # Transform the list into an ersatz set.
1563 has_cl_set = dict.fromkeys(has_cl_set)
1565 has_cl_set = dict.fromkeys(has_cl_set)
1564 else:
1566 else:
1565 # If there were no known heads, the recipient cannot be assumed to
1567 # If there were no known heads, the recipient cannot be assumed to
1566 # know about any changesets.
1568 # know about any changesets.
1567 has_cl_set = {}
1569 has_cl_set = {}
1568
1570
1569 # Make it easy to refer to self.manifest
1571 # Make it easy to refer to self.manifest
1570 mnfst = self.manifest
1572 mnfst = self.manifest
1571 # We don't know which manifests are missing yet
1573 # We don't know which manifests are missing yet
1572 msng_mnfst_set = {}
1574 msng_mnfst_set = {}
1573 # Nor do we know which filenodes are missing.
1575 # Nor do we know which filenodes are missing.
1574 msng_filenode_set = {}
1576 msng_filenode_set = {}
1575
1577
1576 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1578 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1577 junk = None
1579 junk = None
1578
1580
1579 # A changeset always belongs to itself, so the changenode lookup
1581 # A changeset always belongs to itself, so the changenode lookup
1580 # function for a changenode is identity.
1582 # function for a changenode is identity.
1581 def identity(x):
1583 def identity(x):
1582 return x
1584 return x
1583
1585
1584 # A function generating function. Sets up an environment for the
1586 # A function generating function. Sets up an environment for the
1585 # inner function.
1587 # inner function.
1586 def cmp_by_rev_func(revlog):
1588 def cmp_by_rev_func(revlog):
1587 # Compare two nodes by their revision number in the environment's
1589 # Compare two nodes by their revision number in the environment's
1588 # revision history. Since the revision number both represents the
1590 # revision history. Since the revision number both represents the
1589 # most efficient order to read the nodes in, and represents a
1591 # most efficient order to read the nodes in, and represents a
1590 # topological sorting of the nodes, this function is often useful.
1592 # topological sorting of the nodes, this function is often useful.
1591 def cmp_by_rev(a, b):
1593 def cmp_by_rev(a, b):
1592 return cmp(revlog.rev(a), revlog.rev(b))
1594 return cmp(revlog.rev(a), revlog.rev(b))
1593 return cmp_by_rev
1595 return cmp_by_rev
1594
1596
1595 # If we determine that a particular file or manifest node must be a
1597 # If we determine that a particular file or manifest node must be a
1596 # node that the recipient of the changegroup will already have, we can
1598 # node that the recipient of the changegroup will already have, we can
1597 # also assume the recipient will have all the parents. This function
1599 # also assume the recipient will have all the parents. This function
1598 # prunes them from the set of missing nodes.
1600 # prunes them from the set of missing nodes.
1599 def prune_parents(revlog, hasset, msngset):
1601 def prune_parents(revlog, hasset, msngset):
1600 haslst = hasset.keys()
1602 haslst = hasset.keys()
1601 haslst.sort(cmp_by_rev_func(revlog))
1603 haslst.sort(cmp_by_rev_func(revlog))
1602 for node in haslst:
1604 for node in haslst:
1603 parentlst = [p for p in revlog.parents(node) if p != nullid]
1605 parentlst = [p for p in revlog.parents(node) if p != nullid]
1604 while parentlst:
1606 while parentlst:
1605 n = parentlst.pop()
1607 n = parentlst.pop()
1606 if n not in hasset:
1608 if n not in hasset:
1607 hasset[n] = 1
1609 hasset[n] = 1
1608 p = [p for p in revlog.parents(n) if p != nullid]
1610 p = [p for p in revlog.parents(n) if p != nullid]
1609 parentlst.extend(p)
1611 parentlst.extend(p)
1610 for n in hasset:
1612 for n in hasset:
1611 msngset.pop(n, None)
1613 msngset.pop(n, None)
1612
1614
1613 # This is a function generating function used to set up an environment
1615 # This is a function generating function used to set up an environment
1614 # for the inner function to execute in.
1616 # for the inner function to execute in.
1615 def manifest_and_file_collector(changedfileset):
1617 def manifest_and_file_collector(changedfileset):
1616 # This is an information gathering function that gathers
1618 # This is an information gathering function that gathers
1617 # information from each changeset node that goes out as part of
1619 # information from each changeset node that goes out as part of
1618 # the changegroup. The information gathered is a list of which
1620 # the changegroup. The information gathered is a list of which
1619 # manifest nodes are potentially required (the recipient may
1621 # manifest nodes are potentially required (the recipient may
1620 # already have them) and total list of all files which were
1622 # already have them) and total list of all files which were
1621 # changed in any changeset in the changegroup.
1623 # changed in any changeset in the changegroup.
1622 #
1624 #
1623 # We also remember the first changenode we saw any manifest
1625 # We also remember the first changenode we saw any manifest
1624 # referenced by so we can later determine which changenode 'owns'
1626 # referenced by so we can later determine which changenode 'owns'
1625 # the manifest.
1627 # the manifest.
1626 def collect_manifests_and_files(clnode):
1628 def collect_manifests_and_files(clnode):
1627 c = cl.read(clnode)
1629 c = cl.read(clnode)
1628 for f in c[3]:
1630 for f in c[3]:
1629 # This is to make sure we only have one instance of each
1631 # This is to make sure we only have one instance of each
1630 # filename string for each filename.
1632 # filename string for each filename.
1631 changedfileset.setdefault(f, f)
1633 changedfileset.setdefault(f, f)
1632 msng_mnfst_set.setdefault(c[0], clnode)
1634 msng_mnfst_set.setdefault(c[0], clnode)
1633 return collect_manifests_and_files
1635 return collect_manifests_and_files
1634
1636
1635 # Figure out which manifest nodes (of the ones we think might be part
1637 # Figure out which manifest nodes (of the ones we think might be part
1636 # of the changegroup) the recipient must know about and remove them
1638 # of the changegroup) the recipient must know about and remove them
1637 # from the changegroup.
1639 # from the changegroup.
1638 def prune_manifests():
1640 def prune_manifests():
1639 has_mnfst_set = {}
1641 has_mnfst_set = {}
1640 for n in msng_mnfst_set:
1642 for n in msng_mnfst_set:
1641 # If a 'missing' manifest thinks it belongs to a changenode
1643 # If a 'missing' manifest thinks it belongs to a changenode
1642 # the recipient is assumed to have, obviously the recipient
1644 # the recipient is assumed to have, obviously the recipient
1643 # must have that manifest.
1645 # must have that manifest.
1644 linknode = cl.node(mnfst.linkrev(n))
1646 linknode = cl.node(mnfst.linkrev(n))
1645 if linknode in has_cl_set:
1647 if linknode in has_cl_set:
1646 has_mnfst_set[n] = 1
1648 has_mnfst_set[n] = 1
1647 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1649 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1648
1650
1649 # Use the information collected in collect_manifests_and_files to say
1651 # Use the information collected in collect_manifests_and_files to say
1650 # which changenode any manifestnode belongs to.
1652 # which changenode any manifestnode belongs to.
1651 def lookup_manifest_link(mnfstnode):
1653 def lookup_manifest_link(mnfstnode):
1652 return msng_mnfst_set[mnfstnode]
1654 return msng_mnfst_set[mnfstnode]
1653
1655
1654 # A function generating function that sets up the initial environment
1656 # A function generating function that sets up the initial environment
1655 # the inner function.
1657 # the inner function.
1656 def filenode_collector(changedfiles):
1658 def filenode_collector(changedfiles):
1657 next_rev = [0]
1659 next_rev = [0]
1658 # This gathers information from each manifestnode included in the
1660 # This gathers information from each manifestnode included in the
1659 # changegroup about which filenodes the manifest node references
1661 # changegroup about which filenodes the manifest node references
1660 # so we can include those in the changegroup too.
1662 # so we can include those in the changegroup too.
1661 #
1663 #
1662 # It also remembers which changenode each filenode belongs to. It
1664 # It also remembers which changenode each filenode belongs to. It
1663 # does this by assuming the a filenode belongs to the changenode
1665 # does this by assuming the a filenode belongs to the changenode
1664 # the first manifest that references it belongs to.
1666 # the first manifest that references it belongs to.
1665 def collect_msng_filenodes(mnfstnode):
1667 def collect_msng_filenodes(mnfstnode):
1666 r = mnfst.rev(mnfstnode)
1668 r = mnfst.rev(mnfstnode)
1667 if r == next_rev[0]:
1669 if r == next_rev[0]:
1668 # If the last rev we looked at was the one just previous,
1670 # If the last rev we looked at was the one just previous,
1669 # we only need to see a diff.
1671 # we only need to see a diff.
1670 deltamf = mnfst.readdelta(mnfstnode)
1672 deltamf = mnfst.readdelta(mnfstnode)
1671 # For each line in the delta
1673 # For each line in the delta
1672 for f, fnode in deltamf.items():
1674 for f, fnode in deltamf.items():
1673 f = changedfiles.get(f, None)
1675 f = changedfiles.get(f, None)
1674 # And if the file is in the list of files we care
1676 # And if the file is in the list of files we care
1675 # about.
1677 # about.
1676 if f is not None:
1678 if f is not None:
1677 # Get the changenode this manifest belongs to
1679 # Get the changenode this manifest belongs to
1678 clnode = msng_mnfst_set[mnfstnode]
1680 clnode = msng_mnfst_set[mnfstnode]
1679 # Create the set of filenodes for the file if
1681 # Create the set of filenodes for the file if
1680 # there isn't one already.
1682 # there isn't one already.
1681 ndset = msng_filenode_set.setdefault(f, {})
1683 ndset = msng_filenode_set.setdefault(f, {})
1682 # And set the filenode's changelog node to the
1684 # And set the filenode's changelog node to the
1683 # manifest's if it hasn't been set already.
1685 # manifest's if it hasn't been set already.
1684 ndset.setdefault(fnode, clnode)
1686 ndset.setdefault(fnode, clnode)
1685 else:
1687 else:
1686 # Otherwise we need a full manifest.
1688 # Otherwise we need a full manifest.
1687 m = mnfst.read(mnfstnode)
1689 m = mnfst.read(mnfstnode)
1688 # For every file in we care about.
1690 # For every file in we care about.
1689 for f in changedfiles:
1691 for f in changedfiles:
1690 fnode = m.get(f, None)
1692 fnode = m.get(f, None)
1691 # If it's in the manifest
1693 # If it's in the manifest
1692 if fnode is not None:
1694 if fnode is not None:
1693 # See comments above.
1695 # See comments above.
1694 clnode = msng_mnfst_set[mnfstnode]
1696 clnode = msng_mnfst_set[mnfstnode]
1695 ndset = msng_filenode_set.setdefault(f, {})
1697 ndset = msng_filenode_set.setdefault(f, {})
1696 ndset.setdefault(fnode, clnode)
1698 ndset.setdefault(fnode, clnode)
1697 # Remember the revision we hope to see next.
1699 # Remember the revision we hope to see next.
1698 next_rev[0] = r + 1
1700 next_rev[0] = r + 1
1699 return collect_msng_filenodes
1701 return collect_msng_filenodes
1700
1702
1701 # We have a list of filenodes we think we need for a file, lets remove
1703 # We have a list of filenodes we think we need for a file, lets remove
1702 # all those we now the recipient must have.
1704 # all those we now the recipient must have.
1703 def prune_filenodes(f, filerevlog):
1705 def prune_filenodes(f, filerevlog):
1704 msngset = msng_filenode_set[f]
1706 msngset = msng_filenode_set[f]
1705 hasset = {}
1707 hasset = {}
1706 # If a 'missing' filenode thinks it belongs to a changenode we
1708 # If a 'missing' filenode thinks it belongs to a changenode we
1707 # assume the recipient must have, then the recipient must have
1709 # assume the recipient must have, then the recipient must have
1708 # that filenode.
1710 # that filenode.
1709 for n in msngset:
1711 for n in msngset:
1710 clnode = cl.node(filerevlog.linkrev(n))
1712 clnode = cl.node(filerevlog.linkrev(n))
1711 if clnode in has_cl_set:
1713 if clnode in has_cl_set:
1712 hasset[n] = 1
1714 hasset[n] = 1
1713 prune_parents(filerevlog, hasset, msngset)
1715 prune_parents(filerevlog, hasset, msngset)
1714
1716
1715 # A function generator function that sets up the a context for the
1717 # A function generator function that sets up the a context for the
1716 # inner function.
1718 # inner function.
1717 def lookup_filenode_link_func(fname):
1719 def lookup_filenode_link_func(fname):
1718 msngset = msng_filenode_set[fname]
1720 msngset = msng_filenode_set[fname]
1719 # Lookup the changenode the filenode belongs to.
1721 # Lookup the changenode the filenode belongs to.
1720 def lookup_filenode_link(fnode):
1722 def lookup_filenode_link(fnode):
1721 return msngset[fnode]
1723 return msngset[fnode]
1722 return lookup_filenode_link
1724 return lookup_filenode_link
1723
1725
1724 # Add the nodes that were explicitly requested.
1726 # Add the nodes that were explicitly requested.
1725 def add_extra_nodes(name, nodes):
1727 def add_extra_nodes(name, nodes):
1726 if not extranodes or name not in extranodes:
1728 if not extranodes or name not in extranodes:
1727 return
1729 return
1728
1730
1729 for node, linknode in extranodes[name]:
1731 for node, linknode in extranodes[name]:
1730 if node not in nodes:
1732 if node not in nodes:
1731 nodes[node] = linknode
1733 nodes[node] = linknode
1732
1734
1733 # Now that we have all theses utility functions to help out and
1735 # Now that we have all theses utility functions to help out and
1734 # logically divide up the task, generate the group.
1736 # logically divide up the task, generate the group.
1735 def gengroup():
1737 def gengroup():
1736 # The set of changed files starts empty.
1738 # The set of changed files starts empty.
1737 changedfiles = {}
1739 changedfiles = {}
1738 # Create a changenode group generator that will call our functions
1740 # Create a changenode group generator that will call our functions
1739 # back to lookup the owning changenode and collect information.
1741 # back to lookup the owning changenode and collect information.
1740 group = cl.group(msng_cl_lst, identity,
1742 group = cl.group(msng_cl_lst, identity,
1741 manifest_and_file_collector(changedfiles))
1743 manifest_and_file_collector(changedfiles))
1742 for chnk in group:
1744 for chnk in group:
1743 yield chnk
1745 yield chnk
1744
1746
1745 # The list of manifests has been collected by the generator
1747 # The list of manifests has been collected by the generator
1746 # calling our functions back.
1748 # calling our functions back.
1747 prune_manifests()
1749 prune_manifests()
1748 add_extra_nodes(1, msng_mnfst_set)
1750 add_extra_nodes(1, msng_mnfst_set)
1749 msng_mnfst_lst = msng_mnfst_set.keys()
1751 msng_mnfst_lst = msng_mnfst_set.keys()
1750 # Sort the manifestnodes by revision number.
1752 # Sort the manifestnodes by revision number.
1751 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1753 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1752 # Create a generator for the manifestnodes that calls our lookup
1754 # Create a generator for the manifestnodes that calls our lookup
1753 # and data collection functions back.
1755 # and data collection functions back.
1754 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1756 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1755 filenode_collector(changedfiles))
1757 filenode_collector(changedfiles))
1756 for chnk in group:
1758 for chnk in group:
1757 yield chnk
1759 yield chnk
1758
1760
1759 # These are no longer needed, dereference and toss the memory for
1761 # These are no longer needed, dereference and toss the memory for
1760 # them.
1762 # them.
1761 msng_mnfst_lst = None
1763 msng_mnfst_lst = None
1762 msng_mnfst_set.clear()
1764 msng_mnfst_set.clear()
1763
1765
1764 if extranodes:
1766 if extranodes:
1765 for fname in extranodes:
1767 for fname in extranodes:
1766 if isinstance(fname, int):
1768 if isinstance(fname, int):
1767 continue
1769 continue
1768 add_extra_nodes(fname,
1770 add_extra_nodes(fname,
1769 msng_filenode_set.setdefault(fname, {}))
1771 msng_filenode_set.setdefault(fname, {}))
1770 changedfiles[fname] = 1
1772 changedfiles[fname] = 1
1771 changedfiles = changedfiles.keys()
1773 changedfiles = changedfiles.keys()
1772 changedfiles.sort()
1774 changedfiles.sort()
1773 # Go through all our files in order sorted by name.
1775 # Go through all our files in order sorted by name.
1774 for fname in changedfiles:
1776 for fname in changedfiles:
1775 filerevlog = self.file(fname)
1777 filerevlog = self.file(fname)
1776 if filerevlog.count() == 0:
1778 if filerevlog.count() == 0:
1777 raise util.Abort(_("empty or missing revlog for %s") % fname)
1779 raise util.Abort(_("empty or missing revlog for %s") % fname)
1778 # Toss out the filenodes that the recipient isn't really
1780 # Toss out the filenodes that the recipient isn't really
1779 # missing.
1781 # missing.
1780 if msng_filenode_set.has_key(fname):
1782 if msng_filenode_set.has_key(fname):
1781 prune_filenodes(fname, filerevlog)
1783 prune_filenodes(fname, filerevlog)
1782 msng_filenode_lst = msng_filenode_set[fname].keys()
1784 msng_filenode_lst = msng_filenode_set[fname].keys()
1783 else:
1785 else:
1784 msng_filenode_lst = []
1786 msng_filenode_lst = []
1785 # If any filenodes are left, generate the group for them,
1787 # If any filenodes are left, generate the group for them,
1786 # otherwise don't bother.
1788 # otherwise don't bother.
1787 if len(msng_filenode_lst) > 0:
1789 if len(msng_filenode_lst) > 0:
1788 yield changegroup.chunkheader(len(fname))
1790 yield changegroup.chunkheader(len(fname))
1789 yield fname
1791 yield fname
1790 # Sort the filenodes by their revision #
1792 # Sort the filenodes by their revision #
1791 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1793 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1792 # Create a group generator and only pass in a changenode
1794 # Create a group generator and only pass in a changenode
1793 # lookup function as we need to collect no information
1795 # lookup function as we need to collect no information
1794 # from filenodes.
1796 # from filenodes.
1795 group = filerevlog.group(msng_filenode_lst,
1797 group = filerevlog.group(msng_filenode_lst,
1796 lookup_filenode_link_func(fname))
1798 lookup_filenode_link_func(fname))
1797 for chnk in group:
1799 for chnk in group:
1798 yield chnk
1800 yield chnk
1799 if msng_filenode_set.has_key(fname):
1801 if msng_filenode_set.has_key(fname):
1800 # Don't need this anymore, toss it to free memory.
1802 # Don't need this anymore, toss it to free memory.
1801 del msng_filenode_set[fname]
1803 del msng_filenode_set[fname]
1802 # Signal that no more groups are left.
1804 # Signal that no more groups are left.
1803 yield changegroup.closechunk()
1805 yield changegroup.closechunk()
1804
1806
1805 if msng_cl_lst:
1807 if msng_cl_lst:
1806 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1808 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1807
1809
1808 return util.chunkbuffer(gengroup())
1810 return util.chunkbuffer(gengroup())
1809
1811
1810 def changegroup(self, basenodes, source):
1812 def changegroup(self, basenodes, source):
1811 """Generate a changegroup of all nodes that we have that a recipient
1813 """Generate a changegroup of all nodes that we have that a recipient
1812 doesn't.
1814 doesn't.
1813
1815
1814 This is much easier than the previous function as we can assume that
1816 This is much easier than the previous function as we can assume that
1815 the recipient has any changenode we aren't sending them."""
1817 the recipient has any changenode we aren't sending them."""
1816
1818
1817 self.hook('preoutgoing', throw=True, source=source)
1819 self.hook('preoutgoing', throw=True, source=source)
1818
1820
1819 cl = self.changelog
1821 cl = self.changelog
1820 nodes = cl.nodesbetween(basenodes, None)[0]
1822 nodes = cl.nodesbetween(basenodes, None)[0]
1821 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1823 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1822 self.changegroupinfo(nodes, source)
1824 self.changegroupinfo(nodes, source)
1823
1825
1824 def identity(x):
1826 def identity(x):
1825 return x
1827 return x
1826
1828
1827 def gennodelst(revlog):
1829 def gennodelst(revlog):
1828 for r in xrange(0, revlog.count()):
1830 for r in xrange(0, revlog.count()):
1829 n = revlog.node(r)
1831 n = revlog.node(r)
1830 if revlog.linkrev(n) in revset:
1832 if revlog.linkrev(n) in revset:
1831 yield n
1833 yield n
1832
1834
1833 def changed_file_collector(changedfileset):
1835 def changed_file_collector(changedfileset):
1834 def collect_changed_files(clnode):
1836 def collect_changed_files(clnode):
1835 c = cl.read(clnode)
1837 c = cl.read(clnode)
1836 for fname in c[3]:
1838 for fname in c[3]:
1837 changedfileset[fname] = 1
1839 changedfileset[fname] = 1
1838 return collect_changed_files
1840 return collect_changed_files
1839
1841
1840 def lookuprevlink_func(revlog):
1842 def lookuprevlink_func(revlog):
1841 def lookuprevlink(n):
1843 def lookuprevlink(n):
1842 return cl.node(revlog.linkrev(n))
1844 return cl.node(revlog.linkrev(n))
1843 return lookuprevlink
1845 return lookuprevlink
1844
1846
1845 def gengroup():
1847 def gengroup():
1846 # construct a list of all changed files
1848 # construct a list of all changed files
1847 changedfiles = {}
1849 changedfiles = {}
1848
1850
1849 for chnk in cl.group(nodes, identity,
1851 for chnk in cl.group(nodes, identity,
1850 changed_file_collector(changedfiles)):
1852 changed_file_collector(changedfiles)):
1851 yield chnk
1853 yield chnk
1852 changedfiles = changedfiles.keys()
1854 changedfiles = changedfiles.keys()
1853 changedfiles.sort()
1855 changedfiles.sort()
1854
1856
1855 mnfst = self.manifest
1857 mnfst = self.manifest
1856 nodeiter = gennodelst(mnfst)
1858 nodeiter = gennodelst(mnfst)
1857 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1859 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1858 yield chnk
1860 yield chnk
1859
1861
1860 for fname in changedfiles:
1862 for fname in changedfiles:
1861 filerevlog = self.file(fname)
1863 filerevlog = self.file(fname)
1862 if filerevlog.count() == 0:
1864 if filerevlog.count() == 0:
1863 raise util.Abort(_("empty or missing revlog for %s") % fname)
1865 raise util.Abort(_("empty or missing revlog for %s") % fname)
1864 nodeiter = gennodelst(filerevlog)
1866 nodeiter = gennodelst(filerevlog)
1865 nodeiter = list(nodeiter)
1867 nodeiter = list(nodeiter)
1866 if nodeiter:
1868 if nodeiter:
1867 yield changegroup.chunkheader(len(fname))
1869 yield changegroup.chunkheader(len(fname))
1868 yield fname
1870 yield fname
1869 lookup = lookuprevlink_func(filerevlog)
1871 lookup = lookuprevlink_func(filerevlog)
1870 for chnk in filerevlog.group(nodeiter, lookup):
1872 for chnk in filerevlog.group(nodeiter, lookup):
1871 yield chnk
1873 yield chnk
1872
1874
1873 yield changegroup.closechunk()
1875 yield changegroup.closechunk()
1874
1876
1875 if nodes:
1877 if nodes:
1876 self.hook('outgoing', node=hex(nodes[0]), source=source)
1878 self.hook('outgoing', node=hex(nodes[0]), source=source)
1877
1879
1878 return util.chunkbuffer(gengroup())
1880 return util.chunkbuffer(gengroup())
1879
1881
1880 def addchangegroup(self, source, srctype, url, emptyok=False):
1882 def addchangegroup(self, source, srctype, url, emptyok=False):
1881 """add changegroup to repo.
1883 """add changegroup to repo.
1882
1884
1883 return values:
1885 return values:
1884 - nothing changed or no source: 0
1886 - nothing changed or no source: 0
1885 - more heads than before: 1+added heads (2..n)
1887 - more heads than before: 1+added heads (2..n)
1886 - less heads than before: -1-removed heads (-2..-n)
1888 - less heads than before: -1-removed heads (-2..-n)
1887 - number of heads stays the same: 1
1889 - number of heads stays the same: 1
1888 """
1890 """
1889 def csmap(x):
1891 def csmap(x):
1890 self.ui.debug(_("add changeset %s\n") % short(x))
1892 self.ui.debug(_("add changeset %s\n") % short(x))
1891 return cl.count()
1893 return cl.count()
1892
1894
1893 def revmap(x):
1895 def revmap(x):
1894 return cl.rev(x)
1896 return cl.rev(x)
1895
1897
1896 if not source:
1898 if not source:
1897 return 0
1899 return 0
1898
1900
1899 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1901 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1900
1902
1901 changesets = files = revisions = 0
1903 changesets = files = revisions = 0
1902
1904
1903 # write changelog data to temp files so concurrent readers will not see
1905 # write changelog data to temp files so concurrent readers will not see
1904 # inconsistent view
1906 # inconsistent view
1905 cl = self.changelog
1907 cl = self.changelog
1906 cl.delayupdate()
1908 cl.delayupdate()
1907 oldheads = len(cl.heads())
1909 oldheads = len(cl.heads())
1908
1910
1909 tr = self.transaction()
1911 tr = self.transaction()
1910 try:
1912 try:
1911 trp = weakref.proxy(tr)
1913 trp = weakref.proxy(tr)
1912 # pull off the changeset group
1914 # pull off the changeset group
1913 self.ui.status(_("adding changesets\n"))
1915 self.ui.status(_("adding changesets\n"))
1914 cor = cl.count() - 1
1916 cor = cl.count() - 1
1915 chunkiter = changegroup.chunkiter(source)
1917 chunkiter = changegroup.chunkiter(source)
1916 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1918 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1917 raise util.Abort(_("received changelog group is empty"))
1919 raise util.Abort(_("received changelog group is empty"))
1918 cnr = cl.count() - 1
1920 cnr = cl.count() - 1
1919 changesets = cnr - cor
1921 changesets = cnr - cor
1920
1922
1921 # pull off the manifest group
1923 # pull off the manifest group
1922 self.ui.status(_("adding manifests\n"))
1924 self.ui.status(_("adding manifests\n"))
1923 chunkiter = changegroup.chunkiter(source)
1925 chunkiter = changegroup.chunkiter(source)
1924 # no need to check for empty manifest group here:
1926 # no need to check for empty manifest group here:
1925 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1927 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1926 # no new manifest will be created and the manifest group will
1928 # no new manifest will be created and the manifest group will
1927 # be empty during the pull
1929 # be empty during the pull
1928 self.manifest.addgroup(chunkiter, revmap, trp)
1930 self.manifest.addgroup(chunkiter, revmap, trp)
1929
1931
1930 # process the files
1932 # process the files
1931 self.ui.status(_("adding file changes\n"))
1933 self.ui.status(_("adding file changes\n"))
1932 while 1:
1934 while 1:
1933 f = changegroup.getchunk(source)
1935 f = changegroup.getchunk(source)
1934 if not f:
1936 if not f:
1935 break
1937 break
1936 self.ui.debug(_("adding %s revisions\n") % f)
1938 self.ui.debug(_("adding %s revisions\n") % f)
1937 fl = self.file(f)
1939 fl = self.file(f)
1938 o = fl.count()
1940 o = fl.count()
1939 chunkiter = changegroup.chunkiter(source)
1941 chunkiter = changegroup.chunkiter(source)
1940 if fl.addgroup(chunkiter, revmap, trp) is None:
1942 if fl.addgroup(chunkiter, revmap, trp) is None:
1941 raise util.Abort(_("received file revlog group is empty"))
1943 raise util.Abort(_("received file revlog group is empty"))
1942 revisions += fl.count() - o
1944 revisions += fl.count() - o
1943 files += 1
1945 files += 1
1944
1946
1945 # make changelog see real files again
1947 # make changelog see real files again
1946 cl.finalize(trp)
1948 cl.finalize(trp)
1947
1949
1948 newheads = len(self.changelog.heads())
1950 newheads = len(self.changelog.heads())
1949 heads = ""
1951 heads = ""
1950 if oldheads and newheads != oldheads:
1952 if oldheads and newheads != oldheads:
1951 heads = _(" (%+d heads)") % (newheads - oldheads)
1953 heads = _(" (%+d heads)") % (newheads - oldheads)
1952
1954
1953 self.ui.status(_("added %d changesets"
1955 self.ui.status(_("added %d changesets"
1954 " with %d changes to %d files%s\n")
1956 " with %d changes to %d files%s\n")
1955 % (changesets, revisions, files, heads))
1957 % (changesets, revisions, files, heads))
1956
1958
1957 if changesets > 0:
1959 if changesets > 0:
1958 self.hook('pretxnchangegroup', throw=True,
1960 self.hook('pretxnchangegroup', throw=True,
1959 node=hex(self.changelog.node(cor+1)), source=srctype,
1961 node=hex(self.changelog.node(cor+1)), source=srctype,
1960 url=url)
1962 url=url)
1961
1963
1962 tr.close()
1964 tr.close()
1963 finally:
1965 finally:
1964 del tr
1966 del tr
1965
1967
1966 if changesets > 0:
1968 if changesets > 0:
1967 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1969 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1968 source=srctype, url=url)
1970 source=srctype, url=url)
1969
1971
1970 for i in xrange(cor + 1, cnr + 1):
1972 for i in xrange(cor + 1, cnr + 1):
1971 self.hook("incoming", node=hex(self.changelog.node(i)),
1973 self.hook("incoming", node=hex(self.changelog.node(i)),
1972 source=srctype, url=url)
1974 source=srctype, url=url)
1973
1975
1974 # never return 0 here:
1976 # never return 0 here:
1975 if newheads < oldheads:
1977 if newheads < oldheads:
1976 return newheads - oldheads - 1
1978 return newheads - oldheads - 1
1977 else:
1979 else:
1978 return newheads - oldheads + 1
1980 return newheads - oldheads + 1
1979
1981
1980
1982
1981 def stream_in(self, remote):
1983 def stream_in(self, remote):
1982 fp = remote.stream_out()
1984 fp = remote.stream_out()
1983 l = fp.readline()
1985 l = fp.readline()
1984 try:
1986 try:
1985 resp = int(l)
1987 resp = int(l)
1986 except ValueError:
1988 except ValueError:
1987 raise util.UnexpectedOutput(
1989 raise util.UnexpectedOutput(
1988 _('Unexpected response from remote server:'), l)
1990 _('Unexpected response from remote server:'), l)
1989 if resp == 1:
1991 if resp == 1:
1990 raise util.Abort(_('operation forbidden by server'))
1992 raise util.Abort(_('operation forbidden by server'))
1991 elif resp == 2:
1993 elif resp == 2:
1992 raise util.Abort(_('locking the remote repository failed'))
1994 raise util.Abort(_('locking the remote repository failed'))
1993 elif resp != 0:
1995 elif resp != 0:
1994 raise util.Abort(_('the server sent an unknown error code'))
1996 raise util.Abort(_('the server sent an unknown error code'))
1995 self.ui.status(_('streaming all changes\n'))
1997 self.ui.status(_('streaming all changes\n'))
1996 l = fp.readline()
1998 l = fp.readline()
1997 try:
1999 try:
1998 total_files, total_bytes = map(int, l.split(' ', 1))
2000 total_files, total_bytes = map(int, l.split(' ', 1))
1999 except ValueError, TypeError:
2001 except ValueError, TypeError:
2000 raise util.UnexpectedOutput(
2002 raise util.UnexpectedOutput(
2001 _('Unexpected response from remote server:'), l)
2003 _('Unexpected response from remote server:'), l)
2002 self.ui.status(_('%d files to transfer, %s of data\n') %
2004 self.ui.status(_('%d files to transfer, %s of data\n') %
2003 (total_files, util.bytecount(total_bytes)))
2005 (total_files, util.bytecount(total_bytes)))
2004 start = time.time()
2006 start = time.time()
2005 for i in xrange(total_files):
2007 for i in xrange(total_files):
2006 # XXX doesn't support '\n' or '\r' in filenames
2008 # XXX doesn't support '\n' or '\r' in filenames
2007 l = fp.readline()
2009 l = fp.readline()
2008 try:
2010 try:
2009 name, size = l.split('\0', 1)
2011 name, size = l.split('\0', 1)
2010 size = int(size)
2012 size = int(size)
2011 except ValueError, TypeError:
2013 except ValueError, TypeError:
2012 raise util.UnexpectedOutput(
2014 raise util.UnexpectedOutput(
2013 _('Unexpected response from remote server:'), l)
2015 _('Unexpected response from remote server:'), l)
2014 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2016 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2015 ofp = self.sopener(name, 'w')
2017 ofp = self.sopener(name, 'w')
2016 for chunk in util.filechunkiter(fp, limit=size):
2018 for chunk in util.filechunkiter(fp, limit=size):
2017 ofp.write(chunk)
2019 ofp.write(chunk)
2018 ofp.close()
2020 ofp.close()
2019 elapsed = time.time() - start
2021 elapsed = time.time() - start
2020 if elapsed <= 0:
2022 if elapsed <= 0:
2021 elapsed = 0.001
2023 elapsed = 0.001
2022 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2024 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2023 (util.bytecount(total_bytes), elapsed,
2025 (util.bytecount(total_bytes), elapsed,
2024 util.bytecount(total_bytes / elapsed)))
2026 util.bytecount(total_bytes / elapsed)))
2025 self.invalidate()
2027 self.invalidate()
2026 return len(self.heads()) + 1
2028 return len(self.heads()) + 1
2027
2029
2028 def clone(self, remote, heads=[], stream=False):
2030 def clone(self, remote, heads=[], stream=False):
2029 '''clone remote repository.
2031 '''clone remote repository.
2030
2032
2031 keyword arguments:
2033 keyword arguments:
2032 heads: list of revs to clone (forces use of pull)
2034 heads: list of revs to clone (forces use of pull)
2033 stream: use streaming clone if possible'''
2035 stream: use streaming clone if possible'''
2034
2036
2035 # now, all clients that can request uncompressed clones can
2037 # now, all clients that can request uncompressed clones can
2036 # read repo formats supported by all servers that can serve
2038 # read repo formats supported by all servers that can serve
2037 # them.
2039 # them.
2038
2040
2039 # if revlog format changes, client will have to check version
2041 # if revlog format changes, client will have to check version
2040 # and format flags on "stream" capability, and use
2042 # and format flags on "stream" capability, and use
2041 # uncompressed only if compatible.
2043 # uncompressed only if compatible.
2042
2044
2043 if stream and not heads and remote.capable('stream'):
2045 if stream and not heads and remote.capable('stream'):
2044 return self.stream_in(remote)
2046 return self.stream_in(remote)
2045 return self.pull(remote, heads)
2047 return self.pull(remote, heads)
2046
2048
2047 # used to avoid circular references so destructors work
2049 # used to avoid circular references so destructors work
2048 def aftertrans(files):
2050 def aftertrans(files):
2049 renamefiles = [tuple(t) for t in files]
2051 renamefiles = [tuple(t) for t in files]
2050 def a():
2052 def a():
2051 for src, dest in renamefiles:
2053 for src, dest in renamefiles:
2052 util.rename(src, dest)
2054 util.rename(src, dest)
2053 return a
2055 return a
2054
2056
2055 def instance(ui, path, create):
2057 def instance(ui, path, create):
2056 return localrepository(ui, util.drop_scheme('file', path), create)
2058 return localrepository(ui, util.drop_scheme('file', path), create)
2057
2059
2058 def islocal(path):
2060 def islocal(path):
2059 return True
2061 return True
@@ -1,101 +1,101 b''
1 % initial svn import
1 % initial svn import
2 Adding projA/trunk
2 Adding projA/trunk
3 Adding projA/branches
3 Adding projA/branches
4 Adding projA/tags
4 Adding projA/tags
5
5
6 Committed revision 1.
6 Committed revision 1.
7 % update svn repository
7 % update svn repository
8 A A/trunk
8 A A/trunk
9 A A/branches
9 A A/branches
10 A A/tags
10 A A/tags
11 Checked out revision 1.
11 Checked out revision 1.
12 A trunk/letter.txt
12 A trunk/letter.txt
13 A trunk/letter2.txt
13 A trunk/letter2.txt
14 A trunk/letter3.txt
14 A trunk/letter3.txt
15 Adding trunk/letter.txt
15 Adding trunk/letter.txt
16 Adding trunk/letter2.txt
16 Adding trunk/letter2.txt
17 Adding trunk/letter3.txt
17 Adding trunk/letter3.txt
18 Transmitting file data ...
18 Transmitting file data ...
19 Committed revision 2.
19 Committed revision 2.
20 % branch to old letters
20 % branch to old letters
21 A branches/old
21 A branches/old
22 D branches/old/letter3.txt
22 D branches/old/letter3.txt
23 Adding branches/old
23 Adding branches/old
24 Adding branches/old/letter.txt
24 Adding branches/old/letter.txt
25 Adding branches/old/letter2.txt
25 Adding branches/old/letter2.txt
26 Deleting branches/old/letter3.txt
26 Deleting branches/old/letter3.txt
27
27
28 Committed revision 3.
28 Committed revision 3.
29 At revision 3.
29 At revision 3.
30 % update trunk
30 % update trunk
31 Sending trunk/letter.txt
31 Sending trunk/letter.txt
32 Transmitting file data .
32 Transmitting file data .
33 Committed revision 4.
33 Committed revision 4.
34 % update old branch
34 % update old branch
35 Sending branches/old/letter2.txt
35 Sending branches/old/letter2.txt
36 Transmitting file data .
36 Transmitting file data .
37 Committed revision 5.
37 Committed revision 5.
38 % create a cross-branch revision
38 % create a cross-branch revision
39 A branches/old/letter3.txt
39 A branches/old/letter3.txt
40 D trunk/letter2.txt
40 D trunk/letter2.txt
41 Adding branches/old/letter3.txt
41 Adding branches/old/letter3.txt
42 Deleting trunk/letter2.txt
42 Deleting trunk/letter2.txt
43 Transmitting file data .
43 Transmitting file data .
44 Committed revision 6.
44 Committed revision 6.
45 % update old branch again
45 % update old branch again
46 Sending branches/old/letter2.txt
46 Sending branches/old/letter2.txt
47 Transmitting file data .
47 Transmitting file data .
48 Committed revision 7.
48 Committed revision 7.
49 % update trunk again
49 % update trunk again
50 Sending trunk/letter.txt
50 Sending trunk/letter.txt
51 Transmitting file data .
51 Transmitting file data .
52 Committed revision 8.
52 Committed revision 8.
53 % convert trunk and branches
53 % convert trunk and branches
54 initializing destination A-hg repository
54 initializing destination A-hg repository
55 scanning source...
55 scanning source...
56 sorting...
56 sorting...
57 converting...
57 converting...
58 8 init projA
58 8 init projA
59 7 hello
59 7 hello
60 6 branch trunk, remove letter3
60 6 branch trunk, remove letter3
61 5 change letter
61 5 change letter
62 4 change letter2
62 4 change letter2
63 3 move and update letter3.txt
63 3 move and update letter3.txt
64 2 move and update letter3.txt
64 2 move and update letter3.txt
65 1 change letter2 again
65 1 change letter2 again
66 0 last change to letter
66 0 last change to letter
67 % branch again from a converted revision
67 % branch again from a converted revision
68 Checked out revision 1.
68 Checked out revision 1.
69 A branches/old2
69 A branches/old2
70 Adding branches/old2
70 Adding branches/old2
71
71
72 Committed revision 9.
72 Committed revision 9.
73 % convert again
73 % convert again
74 scanning source...
74 scanning source...
75 sorting...
75 sorting...
76 converting...
76 converting...
77 0 branch trunk@1 into old2
77 0 branch trunk@1 into old2
78 o 9 branch trunk@1 into old2 files:
78 o 9 branch trunk@1 into old2 files:
79 |
79 |
80 | o 8 last change to letter files: letter.txt
80 | o 8 last change to letter files: letter.txt
81 | |
81 | |
82 | | o 7 change letter2 again files: letter2.txt
82 | | o 7 change letter2 again files: letter2.txt
83 | | |
83 | | |
84 | o | 6 move and update letter3.txt files: letter2.txt
84 | o | 6 move and update letter3.txt files: letter2.txt
85 | | |
85 | | |
86 | | o 5 move and update letter3.txt files: letter3.txt
86 | | o 5 move and update letter3.txt files: letter3.txt
87 | | |
87 | | |
88 | | o 4 change letter2 files: letter2.txt
88 | | o 4 change letter2 files: letter2.txt
89 | | |
89 | | |
90 | o | 3 change letter files: letter.txt
90 | o | 3 change letter files: letter.txt
91 | | |
91 | | |
92 +---o 2 branch trunk, remove letter3 files: letter.txt letter.txt letter2.txt letter2.txt
92 +---o 2 branch trunk, remove letter3 files: letter.txt letter2.txt
93 | |
93 | |
94 | o 1 hello files: letter.txt letter2.txt letter3.txt
94 | o 1 hello files: letter.txt letter2.txt letter3.txt
95 |/
95 |/
96 o 0 init projA files:
96 o 0 init projA files:
97
97
98 old2 9:
98 old2 9:
99 default 8:
99 default 8:
100 old 7:
100 old 7:
101 tip
101 tip
@@ -1,177 +1,180 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 "$TESTDIR/hghave" svn svn-bindings || exit 80
3 "$TESTDIR/hghave" svn svn-bindings || exit 80
4
4
5 fix_path()
5 fix_path()
6 {
6 {
7 tr '\\' /
7 tr '\\' /
8 }
8 }
9
9
10 echo "[extensions]" >> $HGRCPATH
10 echo "[extensions]" >> $HGRCPATH
11 echo "convert = " >> $HGRCPATH
11 echo "convert = " >> $HGRCPATH
12
12
13 svnadmin create svn-repo
13 svnadmin create svn-repo
14
14
15 echo % initial svn import
15 echo % initial svn import
16 mkdir t
16 mkdir t
17 cd t
17 cd t
18 echo a > a
18 echo a > a
19 cd ..
19 cd ..
20
20
21 svnpath=`pwd | fix_path`
21 svnpath=`pwd | fix_path`
22 # SVN wants all paths to start with a slash. Unfortunately,
22 # SVN wants all paths to start with a slash. Unfortunately,
23 # Windows ones don't. Handle that.
23 # Windows ones don't. Handle that.
24 expr $svnpath : "\/" > /dev/null
24 expr $svnpath : "\/" > /dev/null
25 if [ $? -ne 0 ]; then
25 if [ $? -ne 0 ]; then
26 svnpath='/'$svnpath
26 svnpath='/'$svnpath
27 fi
27 fi
28
28
29 svnurl=file://$svnpath/svn-repo/trunk/test
29 svnurl=file://$svnpath/svn-repo/trunk/test
30 svn import -m init t $svnurl | fix_path
30 svn import -m init t $svnurl | fix_path
31
31
32 echo % update svn repository
32 echo % update svn repository
33 svn co $svnurl t2 | fix_path
33 svn co $svnurl t2 | fix_path
34 cd t2
34 cd t2
35 echo b >> a
35 echo b >> a
36 echo b > b
36 echo b > b
37 svn add b
37 svn add b
38 svn ci -m changea
38 svn ci -m changea
39 cd ..
39 cd ..
40
40
41 echo % convert to hg once
41 echo % convert to hg once
42 hg convert $svnurl
42 hg convert $svnurl
43
43
44 echo % update svn repository again
44 echo % update svn repository again
45 cd t2
45 cd t2
46 echo c >> a
46 echo c >> a
47 echo c >> b
47 echo c >> b
48 svn ci -m changeb
48 svn ci -m changeb
49 cd ..
49 cd ..
50
50
51 echo % test incremental conversion
51 echo % test incremental conversion
52 hg convert $svnurl
52 hg convert $svnurl
53
53
54 echo % test filemap
54 echo % test filemap
55 echo 'include b' > filemap
55 echo 'include b' > filemap
56 hg convert --filemap filemap $svnurl fmap
56 hg convert --filemap filemap $svnurl fmap
57 echo '[extensions]' >> $HGRCPATH
57 echo '[extensions]' >> $HGRCPATH
58 echo 'hgext.graphlog =' >> $HGRCPATH
58 echo 'hgext.graphlog =' >> $HGRCPATH
59 hg glog -R fmap --template '#rev# #desc|firstline# files: #files#\n'
59 hg glog -R fmap --template '#rev# #desc|firstline# files: #files#\n'
60
60
61 echo % test stop revision
61 echo % test stop revision
62 hg convert --rev 1 $svnurl stoprev
62 hg convert --rev 1 $svnurl stoprev
63 # Check convert_revision extra-records.
63 # Check convert_revision extra-records.
64 # This is also the only place testing more than one extra field
64 # This is also the only place testing more than one extra field
65 # in a revision.
65 # in a revision.
66 hg --cwd stoprev tip --debug | grep extra | sed 's/=.*/=/'
66 hg --cwd stoprev tip --debug | grep extra | sed 's/=.*/=/'
67
67
68 ########################################
68 ########################################
69
69
70 echo "# now tests that it works with trunk/branches/tags layout"
70 echo "# now tests that it works with trunk/branches/tags layout"
71 echo
71 echo
72 echo % initial svn import
72 echo % initial svn import
73 mkdir projA
73 mkdir projA
74 cd projA
74 cd projA
75 mkdir trunk
75 mkdir trunk
76 mkdir branches
76 mkdir branches
77 mkdir tags
77 mkdir tags
78 cd ..
78 cd ..
79
79
80 svnurl=file://$svnpath/svn-repo/projA
80 svnurl=file://$svnpath/svn-repo/projA
81 svn import -m "init projA" projA $svnurl | fix_path
81 svn import -m "init projA" projA $svnurl | fix_path
82
82
83
83
84 echo % update svn repository
84 echo % update svn repository
85 svn co $svnurl/trunk A | fix_path
85 svn co $svnurl/trunk A | fix_path
86 cd A
86 cd A
87 echo hello > letter.txt
87 echo hello > letter.txt
88 svn add letter.txt
88 svn add letter.txt
89 svn ci -m hello
89 svn ci -m hello
90
90
91 echo world >> letter.txt
91 echo world >> letter.txt
92 svn ci -m world
92 svn ci -m world
93
93
94 svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1
94 svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1
95
95
96 echo 'nice day today!' >> letter.txt
96 echo 'nice day today!' >> letter.txt
97 svn ci -m "nice day"
97 svn ci -m "nice day"
98 cd ..
98 cd ..
99
99
100 echo % convert to hg once
100 echo % convert to hg once
101 hg convert $svnurl A-hg
101 hg convert $svnurl A-hg
102
102
103 echo % update svn repository again
103 echo % update svn repository again
104 cd A
104 cd A
105 echo "see second letter" >> letter.txt
105 echo "see second letter" >> letter.txt
106 echo "nice to meet you" > letter2.txt
106 # Put it in a subdirectory to test duplicate file records
107 svn add letter2.txt
107 # from svn source (issue 714)
108 mkdir todo
109 echo "nice to meet you" > todo/letter2.txt
110 svn add todo
108 svn ci -m "second letter"
111 svn ci -m "second letter"
109
112
110 svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2
113 svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2
111
114
112 echo "blah-blah-blah" >> letter2.txt
115 echo "blah-blah-blah" >> todo/letter2.txt
113 svn ci -m "work in progress"
116 svn ci -m "work in progress"
114 cd ..
117 cd ..
115
118
116 echo % test incremental conversion
119 echo % test incremental conversion
117 hg convert $svnurl A-hg
120 hg convert $svnurl A-hg
118
121
119 cd A-hg
122 cd A-hg
120 hg glog --template '#rev# #desc|firstline# files: #files#\n'
123 hg glog --template '#rev# #desc|firstline# files: #files#\n'
121 hg tags -q
124 hg tags -q
122 cd ..
125 cd ..
123
126
124 ########################################
127 ########################################
125
128
126 echo "# now tests that it works with trunk/tags layout, but no branches yet"
129 echo "# now tests that it works with trunk/tags layout, but no branches yet"
127 echo
130 echo
128 echo % initial svn import
131 echo % initial svn import
129 mkdir projB
132 mkdir projB
130 cd projB
133 cd projB
131 mkdir trunk
134 mkdir trunk
132 mkdir tags
135 mkdir tags
133 cd ..
136 cd ..
134
137
135 svnurl=file://$svnpath/svn-repo/projB
138 svnurl=file://$svnpath/svn-repo/projB
136 svn import -m "init projB" projB $svnurl | fix_path
139 svn import -m "init projB" projB $svnurl | fix_path
137
140
138
141
139 echo % update svn repository
142 echo % update svn repository
140 svn co $svnurl/trunk B | fix_path
143 svn co $svnurl/trunk B | fix_path
141 cd B
144 cd B
142 echo hello > letter.txt
145 echo hello > letter.txt
143 svn add letter.txt
146 svn add letter.txt
144 svn ci -m hello
147 svn ci -m hello
145
148
146 echo world >> letter.txt
149 echo world >> letter.txt
147 svn ci -m world
150 svn ci -m world
148
151
149 svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1
152 svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1
150
153
151 echo 'nice day today!' >> letter.txt
154 echo 'nice day today!' >> letter.txt
152 svn ci -m "nice day"
155 svn ci -m "nice day"
153 cd ..
156 cd ..
154
157
155 echo % convert to hg once
158 echo % convert to hg once
156 hg convert $svnurl B-hg
159 hg convert $svnurl B-hg
157
160
158 echo % update svn repository again
161 echo % update svn repository again
159 cd B
162 cd B
160 echo "see second letter" >> letter.txt
163 echo "see second letter" >> letter.txt
161 echo "nice to meet you" > letter2.txt
164 echo "nice to meet you" > letter2.txt
162 svn add letter2.txt
165 svn add letter2.txt
163 svn ci -m "second letter"
166 svn ci -m "second letter"
164
167
165 svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2
168 svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2
166
169
167 echo "blah-blah-blah" >> letter2.txt
170 echo "blah-blah-blah" >> letter2.txt
168 svn ci -m "work in progress"
171 svn ci -m "work in progress"
169 cd ..
172 cd ..
170
173
171 echo % test incremental conversion
174 echo % test incremental conversion
172 hg convert $svnurl B-hg
175 hg convert $svnurl B-hg
173
176
174 cd B-hg
177 cd B-hg
175 hg glog --template '#rev# #desc|firstline# files: #files#\n'
178 hg glog --template '#rev# #desc|firstline# files: #files#\n'
176 hg tags -q
179 hg tags -q
177 cd ..
180 cd ..
@@ -1,188 +1,190 b''
1 % initial svn import
1 % initial svn import
2 Adding t/a
2 Adding t/a
3
3
4 Committed revision 1.
4 Committed revision 1.
5 % update svn repository
5 % update svn repository
6 A t2/a
6 A t2/a
7 Checked out revision 1.
7 Checked out revision 1.
8 A b
8 A b
9 Sending a
9 Sending a
10 Adding b
10 Adding b
11 Transmitting file data ..
11 Transmitting file data ..
12 Committed revision 2.
12 Committed revision 2.
13 % convert to hg once
13 % convert to hg once
14 assuming destination test-hg
14 assuming destination test-hg
15 initializing destination test-hg repository
15 initializing destination test-hg repository
16 scanning source...
16 scanning source...
17 sorting...
17 sorting...
18 converting...
18 converting...
19 1 init
19 1 init
20 0 changea
20 0 changea
21 % update svn repository again
21 % update svn repository again
22 Sending a
22 Sending a
23 Sending b
23 Sending b
24 Transmitting file data ..
24 Transmitting file data ..
25 Committed revision 3.
25 Committed revision 3.
26 % test incremental conversion
26 % test incremental conversion
27 assuming destination test-hg
27 assuming destination test-hg
28 scanning source...
28 scanning source...
29 sorting...
29 sorting...
30 converting...
30 converting...
31 0 changeb
31 0 changeb
32 % test filemap
32 % test filemap
33 initializing destination fmap repository
33 initializing destination fmap repository
34 scanning source...
34 scanning source...
35 sorting...
35 sorting...
36 converting...
36 converting...
37 2 init
37 2 init
38 1 changea
38 1 changea
39 0 changeb
39 0 changeb
40 o 1 changeb files: b
40 o 1 changeb files: b
41 |
41 |
42 o 0 changea files: b
42 o 0 changea files: b
43
43
44 % test stop revision
44 % test stop revision
45 initializing destination stoprev repository
45 initializing destination stoprev repository
46 scanning source...
46 scanning source...
47 sorting...
47 sorting...
48 converting...
48 converting...
49 0 init
49 0 init
50 extra: branch=
50 extra: branch=
51 extra: convert_revision=
51 extra: convert_revision=
52 # now tests that it works with trunk/branches/tags layout
52 # now tests that it works with trunk/branches/tags layout
53
53
54 % initial svn import
54 % initial svn import
55 Adding projA/trunk
55 Adding projA/trunk
56 Adding projA/branches
56 Adding projA/branches
57 Adding projA/tags
57 Adding projA/tags
58
58
59 Committed revision 4.
59 Committed revision 4.
60 % update svn repository
60 % update svn repository
61 Checked out revision 4.
61 Checked out revision 4.
62 A letter.txt
62 A letter.txt
63 Adding letter.txt
63 Adding letter.txt
64 Transmitting file data .
64 Transmitting file data .
65 Committed revision 5.
65 Committed revision 5.
66 Sending letter.txt
66 Sending letter.txt
67 Transmitting file data .
67 Transmitting file data .
68 Committed revision 6.
68 Committed revision 6.
69
69
70 Committed revision 7.
70 Committed revision 7.
71 Sending letter.txt
71 Sending letter.txt
72 Transmitting file data .
72 Transmitting file data .
73 Committed revision 8.
73 Committed revision 8.
74 % convert to hg once
74 % convert to hg once
75 initializing destination A-hg repository
75 initializing destination A-hg repository
76 scanning source...
76 scanning source...
77 sorting...
77 sorting...
78 converting...
78 converting...
79 3 init projA
79 3 init projA
80 2 hello
80 2 hello
81 1 world
81 1 world
82 0 nice day
82 0 nice day
83 updating tags
83 updating tags
84 % update svn repository again
84 % update svn repository again
85 A letter2.txt
85 A todo
86 A todo/letter2.txt
86 Sending letter.txt
87 Sending letter.txt
87 Adding letter2.txt
88 Adding todo
89 Adding todo/letter2.txt
88 Transmitting file data ..
90 Transmitting file data ..
89 Committed revision 9.
91 Committed revision 9.
90
92
91 Committed revision 10.
93 Committed revision 10.
92 Sending letter2.txt
94 Sending todo/letter2.txt
93 Transmitting file data .
95 Transmitting file data .
94 Committed revision 11.
96 Committed revision 11.
95 % test incremental conversion
97 % test incremental conversion
96 scanning source...
98 scanning source...
97 sorting...
99 sorting...
98 converting...
100 converting...
99 1 second letter
101 1 second letter
100 0 work in progress
102 0 work in progress
101 updating tags
103 updating tags
102 o 7 update tags files: .hgtags
104 o 7 update tags files: .hgtags
103 |
105 |
104 o 6 work in progress files: letter2.txt
106 o 6 work in progress files: todo/letter2.txt
105 |
107 |
106 o 5 second letter files: letter.txt letter2.txt
108 o 5 second letter files: letter.txt todo/letter2.txt
107 |
109 |
108 o 4 update tags files: .hgtags
110 o 4 update tags files: .hgtags
109 |
111 |
110 o 3 nice day files: letter.txt
112 o 3 nice day files: letter.txt
111 |
113 |
112 o 2 world files: letter.txt
114 o 2 world files: letter.txt
113 |
115 |
114 o 1 hello files: letter.txt
116 o 1 hello files: letter.txt
115 |
117 |
116 o 0 init projA files:
118 o 0 init projA files:
117
119
118 tip
120 tip
119 v0.2
121 v0.2
120 v0.1
122 v0.1
121 # now tests that it works with trunk/tags layout, but no branches yet
123 # now tests that it works with trunk/tags layout, but no branches yet
122
124
123 % initial svn import
125 % initial svn import
124 Adding projB/trunk
126 Adding projB/trunk
125 Adding projB/tags
127 Adding projB/tags
126
128
127 Committed revision 12.
129 Committed revision 12.
128 % update svn repository
130 % update svn repository
129 Checked out revision 12.
131 Checked out revision 12.
130 A letter.txt
132 A letter.txt
131 Adding letter.txt
133 Adding letter.txt
132 Transmitting file data .
134 Transmitting file data .
133 Committed revision 13.
135 Committed revision 13.
134 Sending letter.txt
136 Sending letter.txt
135 Transmitting file data .
137 Transmitting file data .
136 Committed revision 14.
138 Committed revision 14.
137
139
138 Committed revision 15.
140 Committed revision 15.
139 Sending letter.txt
141 Sending letter.txt
140 Transmitting file data .
142 Transmitting file data .
141 Committed revision 16.
143 Committed revision 16.
142 % convert to hg once
144 % convert to hg once
143 initializing destination B-hg repository
145 initializing destination B-hg repository
144 scanning source...
146 scanning source...
145 sorting...
147 sorting...
146 converting...
148 converting...
147 3 init projB
149 3 init projB
148 2 hello
150 2 hello
149 1 world
151 1 world
150 0 nice day
152 0 nice day
151 updating tags
153 updating tags
152 % update svn repository again
154 % update svn repository again
153 A letter2.txt
155 A letter2.txt
154 Sending letter.txt
156 Sending letter.txt
155 Adding letter2.txt
157 Adding letter2.txt
156 Transmitting file data ..
158 Transmitting file data ..
157 Committed revision 17.
159 Committed revision 17.
158
160
159 Committed revision 18.
161 Committed revision 18.
160 Sending letter2.txt
162 Sending letter2.txt
161 Transmitting file data .
163 Transmitting file data .
162 Committed revision 19.
164 Committed revision 19.
163 % test incremental conversion
165 % test incremental conversion
164 scanning source...
166 scanning source...
165 sorting...
167 sorting...
166 converting...
168 converting...
167 1 second letter
169 1 second letter
168 0 work in progress
170 0 work in progress
169 updating tags
171 updating tags
170 o 7 update tags files: .hgtags
172 o 7 update tags files: .hgtags
171 |
173 |
172 o 6 work in progress files: letter2.txt
174 o 6 work in progress files: letter2.txt
173 |
175 |
174 o 5 second letter files: letter.txt letter2.txt
176 o 5 second letter files: letter.txt letter2.txt
175 |
177 |
176 o 4 update tags files: .hgtags
178 o 4 update tags files: .hgtags
177 |
179 |
178 o 3 nice day files: letter.txt
180 o 3 nice day files: letter.txt
179 |
181 |
180 o 2 world files: letter.txt
182 o 2 world files: letter.txt
181 |
183 |
182 o 1 hello files: letter.txt
184 o 1 hello files: letter.txt
183 |
185 |
184 o 0 init projB files:
186 o 0 init projB files:
185
187
186 tip
188 tip
187 v0.2
189 v0.2
188 v0.1
190 v0.1
General Comments 0
You need to be logged in to leave comments. Login now