##// END OF EJS Templates
clone: use cancopy
Matt Mackall -
r6315:5c96a4bc default
parent child Browse files
Show More
@@ -1,288 +1,291
1 """
1 """
2 bundlerepo.py - repository class for viewing uncompressed bundles
2 bundlerepo.py - repository class for viewing uncompressed bundles
3
3
4 This provides a read-only repository interface to bundles as if
4 This provides a read-only repository interface to bundles as if
5 they were part of the actual repository.
5 they were part of the actual repository.
6
6
7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import hex, nullid, short
13 from node import hex, nullid, short
14 from i18n import _
14 from i18n import _
15 import changegroup, util, os, struct, bz2, tempfile, shutil, mdiff
15 import changegroup, util, os, struct, bz2, tempfile, shutil, mdiff
16 import repo, localrepo, changelog, manifest, filelog, revlog
16 import repo, localrepo, changelog, manifest, filelog, revlog
17
17
18 class bundlerevlog(revlog.revlog):
18 class bundlerevlog(revlog.revlog):
19 def __init__(self, opener, indexfile, bundlefile,
19 def __init__(self, opener, indexfile, bundlefile,
20 linkmapper=None):
20 linkmapper=None):
21 # How it works:
21 # How it works:
22 # to retrieve a revision, we need to know the offset of
22 # to retrieve a revision, we need to know the offset of
23 # the revision in the bundlefile (an opened file).
23 # the revision in the bundlefile (an opened file).
24 #
24 #
25 # We store this offset in the index (start), to differentiate a
25 # We store this offset in the index (start), to differentiate a
26 # rev in the bundle and from a rev in the revlog, we check
26 # rev in the bundle and from a rev in the revlog, we check
27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 # (it is bigger since we store the node to which the delta is)
28 # (it is bigger since we store the node to which the delta is)
29 #
29 #
30 revlog.revlog.__init__(self, opener, indexfile)
30 revlog.revlog.__init__(self, opener, indexfile)
31 self.bundlefile = bundlefile
31 self.bundlefile = bundlefile
32 self.basemap = {}
32 self.basemap = {}
33 def chunkpositer():
33 def chunkpositer():
34 for chunk in changegroup.chunkiter(bundlefile):
34 for chunk in changegroup.chunkiter(bundlefile):
35 pos = bundlefile.tell()
35 pos = bundlefile.tell()
36 yield chunk, pos - len(chunk)
36 yield chunk, pos - len(chunk)
37 n = self.count()
37 n = self.count()
38 prev = None
38 prev = None
39 for chunk, start in chunkpositer():
39 for chunk, start in chunkpositer():
40 size = len(chunk)
40 size = len(chunk)
41 if size < 80:
41 if size < 80:
42 raise util.Abort("invalid changegroup")
42 raise util.Abort("invalid changegroup")
43 start += 80
43 start += 80
44 size -= 80
44 size -= 80
45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 if node in self.nodemap:
46 if node in self.nodemap:
47 prev = node
47 prev = node
48 continue
48 continue
49 for p in (p1, p2):
49 for p in (p1, p2):
50 if not p in self.nodemap:
50 if not p in self.nodemap:
51 raise revlog.LookupError(p1, self.indexfile,
51 raise revlog.LookupError(p1, self.indexfile,
52 _("unknown parent"))
52 _("unknown parent"))
53 if linkmapper is None:
53 if linkmapper is None:
54 link = n
54 link = n
55 else:
55 else:
56 link = linkmapper(cs)
56 link = linkmapper(cs)
57
57
58 if not prev:
58 if not prev:
59 prev = p1
59 prev = p1
60 # start, size, full unc. size, base (unused), link, p1, p2, node
60 # start, size, full unc. size, base (unused), link, p1, p2, node
61 e = (revlog.offset_type(start, 0), size, -1, -1, link,
61 e = (revlog.offset_type(start, 0), size, -1, -1, link,
62 self.rev(p1), self.rev(p2), node)
62 self.rev(p1), self.rev(p2), node)
63 self.basemap[n] = prev
63 self.basemap[n] = prev
64 self.index.insert(-1, e)
64 self.index.insert(-1, e)
65 self.nodemap[node] = n
65 self.nodemap[node] = n
66 prev = node
66 prev = node
67 n += 1
67 n += 1
68
68
69 def bundle(self, rev):
69 def bundle(self, rev):
70 """is rev from the bundle"""
70 """is rev from the bundle"""
71 if rev < 0:
71 if rev < 0:
72 return False
72 return False
73 return rev in self.basemap
73 return rev in self.basemap
74 def bundlebase(self, rev): return self.basemap[rev]
74 def bundlebase(self, rev): return self.basemap[rev]
75 def chunk(self, rev, df=None, cachelen=4096):
75 def chunk(self, rev, df=None, cachelen=4096):
76 # Warning: in case of bundle, the diff is against bundlebase,
76 # Warning: in case of bundle, the diff is against bundlebase,
77 # not against rev - 1
77 # not against rev - 1
78 # XXX: could use some caching
78 # XXX: could use some caching
79 if not self.bundle(rev):
79 if not self.bundle(rev):
80 return revlog.revlog.chunk(self, rev, df)
80 return revlog.revlog.chunk(self, rev, df)
81 self.bundlefile.seek(self.start(rev))
81 self.bundlefile.seek(self.start(rev))
82 return self.bundlefile.read(self.length(rev))
82 return self.bundlefile.read(self.length(rev))
83
83
84 def revdiff(self, rev1, rev2):
84 def revdiff(self, rev1, rev2):
85 """return or calculate a delta between two revisions"""
85 """return or calculate a delta between two revisions"""
86 if self.bundle(rev1) and self.bundle(rev2):
86 if self.bundle(rev1) and self.bundle(rev2):
87 # hot path for bundle
87 # hot path for bundle
88 revb = self.rev(self.bundlebase(rev2))
88 revb = self.rev(self.bundlebase(rev2))
89 if revb == rev1:
89 if revb == rev1:
90 return self.chunk(rev2)
90 return self.chunk(rev2)
91 elif not self.bundle(rev1) and not self.bundle(rev2):
91 elif not self.bundle(rev1) and not self.bundle(rev2):
92 return revlog.revlog.revdiff(self, rev1, rev2)
92 return revlog.revlog.revdiff(self, rev1, rev2)
93
93
94 return mdiff.textdiff(self.revision(self.node(rev1)),
94 return mdiff.textdiff(self.revision(self.node(rev1)),
95 self.revision(self.node(rev2)))
95 self.revision(self.node(rev2)))
96
96
97 def revision(self, node):
97 def revision(self, node):
98 """return an uncompressed revision of a given"""
98 """return an uncompressed revision of a given"""
99 if node == nullid: return ""
99 if node == nullid: return ""
100
100
101 text = None
101 text = None
102 chain = []
102 chain = []
103 iter_node = node
103 iter_node = node
104 rev = self.rev(iter_node)
104 rev = self.rev(iter_node)
105 # reconstruct the revision if it is from a changegroup
105 # reconstruct the revision if it is from a changegroup
106 while self.bundle(rev):
106 while self.bundle(rev):
107 if self._cache and self._cache[0] == iter_node:
107 if self._cache and self._cache[0] == iter_node:
108 text = self._cache[2]
108 text = self._cache[2]
109 break
109 break
110 chain.append(rev)
110 chain.append(rev)
111 iter_node = self.bundlebase(rev)
111 iter_node = self.bundlebase(rev)
112 rev = self.rev(iter_node)
112 rev = self.rev(iter_node)
113 if text is None:
113 if text is None:
114 text = revlog.revlog.revision(self, iter_node)
114 text = revlog.revlog.revision(self, iter_node)
115
115
116 while chain:
116 while chain:
117 delta = self.chunk(chain.pop())
117 delta = self.chunk(chain.pop())
118 text = mdiff.patches(text, [delta])
118 text = mdiff.patches(text, [delta])
119
119
120 p1, p2 = self.parents(node)
120 p1, p2 = self.parents(node)
121 if node != revlog.hash(text, p1, p2):
121 if node != revlog.hash(text, p1, p2):
122 raise revlog.RevlogError(_("integrity check failed on %s:%d")
122 raise revlog.RevlogError(_("integrity check failed on %s:%d")
123 % (self.datafile, self.rev(node)))
123 % (self.datafile, self.rev(node)))
124
124
125 self._cache = (node, self.rev(node), text)
125 self._cache = (node, self.rev(node), text)
126 return text
126 return text
127
127
128 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
128 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
129 raise NotImplementedError
129 raise NotImplementedError
130 def addgroup(self, revs, linkmapper, transaction, unique=0):
130 def addgroup(self, revs, linkmapper, transaction, unique=0):
131 raise NotImplementedError
131 raise NotImplementedError
132 def strip(self, rev, minlink):
132 def strip(self, rev, minlink):
133 raise NotImplementedError
133 raise NotImplementedError
134 def checksize(self):
134 def checksize(self):
135 raise NotImplementedError
135 raise NotImplementedError
136
136
137 class bundlechangelog(bundlerevlog, changelog.changelog):
137 class bundlechangelog(bundlerevlog, changelog.changelog):
138 def __init__(self, opener, bundlefile):
138 def __init__(self, opener, bundlefile):
139 changelog.changelog.__init__(self, opener)
139 changelog.changelog.__init__(self, opener)
140 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
140 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
141
141
142 class bundlemanifest(bundlerevlog, manifest.manifest):
142 class bundlemanifest(bundlerevlog, manifest.manifest):
143 def __init__(self, opener, bundlefile, linkmapper):
143 def __init__(self, opener, bundlefile, linkmapper):
144 manifest.manifest.__init__(self, opener)
144 manifest.manifest.__init__(self, opener)
145 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
145 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
146 linkmapper)
146 linkmapper)
147
147
148 class bundlefilelog(bundlerevlog, filelog.filelog):
148 class bundlefilelog(bundlerevlog, filelog.filelog):
149 def __init__(self, opener, path, bundlefile, linkmapper):
149 def __init__(self, opener, path, bundlefile, linkmapper):
150 filelog.filelog.__init__(self, opener, path)
150 filelog.filelog.__init__(self, opener, path)
151 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
151 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
152 linkmapper)
152 linkmapper)
153
153
154 class bundlerepository(localrepo.localrepository):
154 class bundlerepository(localrepo.localrepository):
155 def __init__(self, ui, path, bundlename):
155 def __init__(self, ui, path, bundlename):
156 self._tempparent = None
156 self._tempparent = None
157 try:
157 try:
158 localrepo.localrepository.__init__(self, ui, path)
158 localrepo.localrepository.__init__(self, ui, path)
159 except repo.RepoError:
159 except repo.RepoError:
160 self._tempparent = tempfile.mkdtemp()
160 self._tempparent = tempfile.mkdtemp()
161 tmprepo = localrepo.instance(ui,self._tempparent,1)
161 tmprepo = localrepo.instance(ui,self._tempparent,1)
162 localrepo.localrepository.__init__(self, ui, self._tempparent)
162 localrepo.localrepository.__init__(self, ui, self._tempparent)
163
163
164 if path:
164 if path:
165 self._url = 'bundle:' + path + '+' + bundlename
165 self._url = 'bundle:' + path + '+' + bundlename
166 else:
166 else:
167 self._url = 'bundle:' + bundlename
167 self._url = 'bundle:' + bundlename
168
168
169 self.tempfile = None
169 self.tempfile = None
170 self.bundlefile = open(bundlename, "rb")
170 self.bundlefile = open(bundlename, "rb")
171 header = self.bundlefile.read(6)
171 header = self.bundlefile.read(6)
172 if not header.startswith("HG"):
172 if not header.startswith("HG"):
173 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
173 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
174 elif not header.startswith("HG10"):
174 elif not header.startswith("HG10"):
175 raise util.Abort(_("%s: unknown bundle version") % bundlename)
175 raise util.Abort(_("%s: unknown bundle version") % bundlename)
176 elif header == "HG10BZ":
176 elif header == "HG10BZ":
177 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
177 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
178 suffix=".hg10un", dir=self.path)
178 suffix=".hg10un", dir=self.path)
179 self.tempfile = temp
179 self.tempfile = temp
180 fptemp = os.fdopen(fdtemp, 'wb')
180 fptemp = os.fdopen(fdtemp, 'wb')
181 def generator(f):
181 def generator(f):
182 zd = bz2.BZ2Decompressor()
182 zd = bz2.BZ2Decompressor()
183 zd.decompress("BZ")
183 zd.decompress("BZ")
184 for chunk in f:
184 for chunk in f:
185 yield zd.decompress(chunk)
185 yield zd.decompress(chunk)
186 gen = generator(util.filechunkiter(self.bundlefile, 4096))
186 gen = generator(util.filechunkiter(self.bundlefile, 4096))
187
187
188 try:
188 try:
189 fptemp.write("HG10UN")
189 fptemp.write("HG10UN")
190 for chunk in gen:
190 for chunk in gen:
191 fptemp.write(chunk)
191 fptemp.write(chunk)
192 finally:
192 finally:
193 fptemp.close()
193 fptemp.close()
194 self.bundlefile.close()
194 self.bundlefile.close()
195
195
196 self.bundlefile = open(self.tempfile, "rb")
196 self.bundlefile = open(self.tempfile, "rb")
197 # seek right after the header
197 # seek right after the header
198 self.bundlefile.seek(6)
198 self.bundlefile.seek(6)
199 elif header == "HG10UN":
199 elif header == "HG10UN":
200 # nothing to do
200 # nothing to do
201 pass
201 pass
202 else:
202 else:
203 raise util.Abort(_("%s: unknown bundle compression type")
203 raise util.Abort(_("%s: unknown bundle compression type")
204 % bundlename)
204 % bundlename)
205 # dict with the mapping 'filename' -> position in the bundle
205 # dict with the mapping 'filename' -> position in the bundle
206 self.bundlefilespos = {}
206 self.bundlefilespos = {}
207
207
208 def __getattr__(self, name):
208 def __getattr__(self, name):
209 if name == 'changelog':
209 if name == 'changelog':
210 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
210 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
211 self.manstart = self.bundlefile.tell()
211 self.manstart = self.bundlefile.tell()
212 return self.changelog
212 return self.changelog
213 if name == 'manifest':
213 if name == 'manifest':
214 self.bundlefile.seek(self.manstart)
214 self.bundlefile.seek(self.manstart)
215 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
215 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
216 self.changelog.rev)
216 self.changelog.rev)
217 self.filestart = self.bundlefile.tell()
217 self.filestart = self.bundlefile.tell()
218 return self.manifest
218 return self.manifest
219 if name == 'manstart':
219 if name == 'manstart':
220 self.changelog
220 self.changelog
221 return self.manstart
221 return self.manstart
222 if name == 'filestart':
222 if name == 'filestart':
223 self.manifest
223 self.manifest
224 return self.filestart
224 return self.filestart
225 return localrepo.localrepository.__getattr__(self, name)
225 return localrepo.localrepository.__getattr__(self, name)
226
226
227 def url(self):
227 def url(self):
228 return self._url
228 return self._url
229
229
230 def file(self, f):
230 def file(self, f):
231 if not self.bundlefilespos:
231 if not self.bundlefilespos:
232 self.bundlefile.seek(self.filestart)
232 self.bundlefile.seek(self.filestart)
233 while 1:
233 while 1:
234 chunk = changegroup.getchunk(self.bundlefile)
234 chunk = changegroup.getchunk(self.bundlefile)
235 if not chunk:
235 if not chunk:
236 break
236 break
237 self.bundlefilespos[chunk] = self.bundlefile.tell()
237 self.bundlefilespos[chunk] = self.bundlefile.tell()
238 for c in changegroup.chunkiter(self.bundlefile):
238 for c in changegroup.chunkiter(self.bundlefile):
239 pass
239 pass
240
240
241 if f[0] == '/':
241 if f[0] == '/':
242 f = f[1:]
242 f = f[1:]
243 if f in self.bundlefilespos:
243 if f in self.bundlefilespos:
244 self.bundlefile.seek(self.bundlefilespos[f])
244 self.bundlefile.seek(self.bundlefilespos[f])
245 return bundlefilelog(self.sopener, f, self.bundlefile,
245 return bundlefilelog(self.sopener, f, self.bundlefile,
246 self.changelog.rev)
246 self.changelog.rev)
247 else:
247 else:
248 return filelog.filelog(self.sopener, f)
248 return filelog.filelog(self.sopener, f)
249
249
250 def close(self):
250 def close(self):
251 """Close assigned bundle file immediately."""
251 """Close assigned bundle file immediately."""
252 self.bundlefile.close()
252 self.bundlefile.close()
253
253
254 def __del__(self):
254 def __del__(self):
255 bundlefile = getattr(self, 'bundlefile', None)
255 bundlefile = getattr(self, 'bundlefile', None)
256 if bundlefile and not bundlefile.closed:
256 if bundlefile and not bundlefile.closed:
257 bundlefile.close()
257 bundlefile.close()
258 tempfile = getattr(self, 'tempfile', None)
258 tempfile = getattr(self, 'tempfile', None)
259 if tempfile is not None:
259 if tempfile is not None:
260 os.unlink(tempfile)
260 os.unlink(tempfile)
261 if self._tempparent:
261 if self._tempparent:
262 shutil.rmtree(self._tempparent, True)
262 shutil.rmtree(self._tempparent, True)
263
263
264 def cancopy(self):
265 return False
266
264 def instance(ui, path, create):
267 def instance(ui, path, create):
265 if create:
268 if create:
266 raise util.Abort(_('cannot create new bundle repository'))
269 raise util.Abort(_('cannot create new bundle repository'))
267 parentpath = ui.config("bundle", "mainreporoot", "")
270 parentpath = ui.config("bundle", "mainreporoot", "")
268 if parentpath:
271 if parentpath:
269 # Try to make the full path relative so we get a nice, short URL.
272 # Try to make the full path relative so we get a nice, short URL.
270 # In particular, we don't want temp dir names in test outputs.
273 # In particular, we don't want temp dir names in test outputs.
271 cwd = os.getcwd()
274 cwd = os.getcwd()
272 if parentpath == cwd:
275 if parentpath == cwd:
273 parentpath = ''
276 parentpath = ''
274 else:
277 else:
275 cwd = os.path.join(cwd,'')
278 cwd = os.path.join(cwd,'')
276 if parentpath.startswith(cwd):
279 if parentpath.startswith(cwd):
277 parentpath = parentpath[len(cwd):]
280 parentpath = parentpath[len(cwd):]
278 path = util.drop_scheme('file', path)
281 path = util.drop_scheme('file', path)
279 if path.startswith('bundle:'):
282 if path.startswith('bundle:'):
280 path = util.drop_scheme('bundle', path)
283 path = util.drop_scheme('bundle', path)
281 s = path.split("+", 1)
284 s = path.split("+", 1)
282 if len(s) == 1:
285 if len(s) == 1:
283 repopath, bundlename = parentpath, s[0]
286 repopath, bundlename = parentpath, s[0]
284 else:
287 else:
285 repopath, bundlename = s
288 repopath, bundlename = s
286 else:
289 else:
287 repopath, bundlename = parentpath, path
290 repopath, bundlename = parentpath, path
288 return bundlerepository(ui, repopath, bundlename)
291 return bundlerepository(ui, repopath, bundlename)
@@ -1,311 +1,311
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
10 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
11 import errno, lock, os, shutil, util, extensions
11 import errno, lock, os, shutil, util, extensions
12 import merge as _merge
12 import merge as _merge
13 import verify as _verify
13 import verify as _verify
14
14
15 def _local(path):
15 def _local(path):
16 return (os.path.isfile(util.drop_scheme('file', path)) and
16 return (os.path.isfile(util.drop_scheme('file', path)) and
17 bundlerepo or localrepo)
17 bundlerepo or localrepo)
18
18
19 def parseurl(url, revs):
19 def parseurl(url, revs):
20 '''parse url#branch, returning url, branch + revs'''
20 '''parse url#branch, returning url, branch + revs'''
21
21
22 if '#' not in url:
22 if '#' not in url:
23 return url, (revs or None), None
23 return url, (revs or None), None
24
24
25 url, rev = url.split('#', 1)
25 url, rev = url.split('#', 1)
26 return url, revs + [rev], rev
26 return url, revs + [rev], rev
27
27
28 schemes = {
28 schemes = {
29 'bundle': bundlerepo,
29 'bundle': bundlerepo,
30 'file': _local,
30 'file': _local,
31 'http': httprepo,
31 'http': httprepo,
32 'https': httprepo,
32 'https': httprepo,
33 'ssh': sshrepo,
33 'ssh': sshrepo,
34 'static-http': statichttprepo,
34 'static-http': statichttprepo,
35 }
35 }
36
36
37 def _lookup(path):
37 def _lookup(path):
38 scheme = 'file'
38 scheme = 'file'
39 if path:
39 if path:
40 c = path.find(':')
40 c = path.find(':')
41 if c > 0:
41 if c > 0:
42 scheme = path[:c]
42 scheme = path[:c]
43 thing = schemes.get(scheme) or schemes['file']
43 thing = schemes.get(scheme) or schemes['file']
44 try:
44 try:
45 return thing(path)
45 return thing(path)
46 except TypeError:
46 except TypeError:
47 return thing
47 return thing
48
48
49 def islocal(repo):
49 def islocal(repo):
50 '''return true if repo or path is local'''
50 '''return true if repo or path is local'''
51 if isinstance(repo, str):
51 if isinstance(repo, str):
52 try:
52 try:
53 return _lookup(repo).islocal(repo)
53 return _lookup(repo).islocal(repo)
54 except AttributeError:
54 except AttributeError:
55 return False
55 return False
56 return repo.local()
56 return repo.local()
57
57
58 def repository(ui, path='', create=False):
58 def repository(ui, path='', create=False):
59 """return a repository object for the specified path"""
59 """return a repository object for the specified path"""
60 repo = _lookup(path).instance(ui, path, create)
60 repo = _lookup(path).instance(ui, path, create)
61 ui = getattr(repo, "ui", ui)
61 ui = getattr(repo, "ui", ui)
62 for name, module in extensions.extensions():
62 for name, module in extensions.extensions():
63 hook = getattr(module, 'reposetup', None)
63 hook = getattr(module, 'reposetup', None)
64 if hook:
64 if hook:
65 hook(ui, repo)
65 hook(ui, repo)
66 return repo
66 return repo
67
67
68 def defaultdest(source):
68 def defaultdest(source):
69 '''return default destination of clone if none is given'''
69 '''return default destination of clone if none is given'''
70 return os.path.basename(os.path.normpath(source))
70 return os.path.basename(os.path.normpath(source))
71
71
72 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
72 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
73 stream=False):
73 stream=False):
74 """Make a copy of an existing repository.
74 """Make a copy of an existing repository.
75
75
76 Create a copy of an existing repository in a new directory. The
76 Create a copy of an existing repository in a new directory. The
77 source and destination are URLs, as passed to the repository
77 source and destination are URLs, as passed to the repository
78 function. Returns a pair of repository objects, the source and
78 function. Returns a pair of repository objects, the source and
79 newly created destination.
79 newly created destination.
80
80
81 The location of the source is added to the new repository's
81 The location of the source is added to the new repository's
82 .hg/hgrc file, as the default to be used for future pulls and
82 .hg/hgrc file, as the default to be used for future pulls and
83 pushes.
83 pushes.
84
84
85 If an exception is raised, the partly cloned/updated destination
85 If an exception is raised, the partly cloned/updated destination
86 repository will be deleted.
86 repository will be deleted.
87
87
88 Arguments:
88 Arguments:
89
89
90 source: repository object or URL
90 source: repository object or URL
91
91
92 dest: URL of destination repository to create (defaults to base
92 dest: URL of destination repository to create (defaults to base
93 name of source repository)
93 name of source repository)
94
94
95 pull: always pull from source repository, even in local case
95 pull: always pull from source repository, even in local case
96
96
97 stream: stream raw data uncompressed from repository (fast over
97 stream: stream raw data uncompressed from repository (fast over
98 LAN, slow over WAN)
98 LAN, slow over WAN)
99
99
100 rev: revision to clone up to (implies pull=True)
100 rev: revision to clone up to (implies pull=True)
101
101
102 update: update working directory after clone completes, if
102 update: update working directory after clone completes, if
103 destination is local repository
103 destination is local repository
104 """
104 """
105
105
106 if isinstance(source, str):
106 if isinstance(source, str):
107 origsource = ui.expandpath(source)
107 origsource = ui.expandpath(source)
108 source, rev, checkout = parseurl(origsource, rev)
108 source, rev, checkout = parseurl(origsource, rev)
109 src_repo = repository(ui, source)
109 src_repo = repository(ui, source)
110 else:
110 else:
111 src_repo = source
111 src_repo = source
112 origsource = source = src_repo.url()
112 origsource = source = src_repo.url()
113 checkout = None
113 checkout = None
114
114
115 if dest is None:
115 if dest is None:
116 dest = defaultdest(source)
116 dest = defaultdest(source)
117 ui.status(_("destination directory: %s\n") % dest)
117 ui.status(_("destination directory: %s\n") % dest)
118
118
119 def localpath(path):
119 def localpath(path):
120 if path.startswith('file://localhost/'):
120 if path.startswith('file://localhost/'):
121 return path[16:]
121 return path[16:]
122 if path.startswith('file://'):
122 if path.startswith('file://'):
123 return path[7:]
123 return path[7:]
124 if path.startswith('file:'):
124 if path.startswith('file:'):
125 return path[5:]
125 return path[5:]
126 return path
126 return path
127
127
128 dest = localpath(dest)
128 dest = localpath(dest)
129 source = localpath(source)
129 source = localpath(source)
130
130
131 if os.path.exists(dest):
131 if os.path.exists(dest):
132 raise util.Abort(_("destination '%s' already exists") % dest)
132 raise util.Abort(_("destination '%s' already exists") % dest)
133
133
134 class DirCleanup(object):
134 class DirCleanup(object):
135 def __init__(self, dir_):
135 def __init__(self, dir_):
136 self.rmtree = shutil.rmtree
136 self.rmtree = shutil.rmtree
137 self.dir_ = dir_
137 self.dir_ = dir_
138 def close(self):
138 def close(self):
139 self.dir_ = None
139 self.dir_ = None
140 def __del__(self):
140 def __del__(self):
141 if self.dir_:
141 if self.dir_:
142 self.rmtree(self.dir_, True)
142 self.rmtree(self.dir_, True)
143
143
144 src_lock = dest_lock = dir_cleanup = None
144 src_lock = dest_lock = dir_cleanup = None
145 try:
145 try:
146 if islocal(dest):
146 if islocal(dest):
147 dir_cleanup = DirCleanup(dest)
147 dir_cleanup = DirCleanup(dest)
148
148
149 abspath = origsource
149 abspath = origsource
150 copy = False
150 copy = False
151 if src_repo.local() and islocal(dest):
151 if src_repo.cancopy() and islocal(dest):
152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
153 copy = not pull and not rev
153 copy = not pull and not rev
154
154
155 if copy:
155 if copy:
156 try:
156 try:
157 # we use a lock here because if we race with commit, we
157 # we use a lock here because if we race with commit, we
158 # can end up with extra data in the cloned revlogs that's
158 # can end up with extra data in the cloned revlogs that's
159 # not pointed to by changesets, thus causing verify to
159 # not pointed to by changesets, thus causing verify to
160 # fail
160 # fail
161 src_lock = src_repo.lock()
161 src_lock = src_repo.lock()
162 except lock.LockException:
162 except lock.LockException:
163 copy = False
163 copy = False
164
164
165 if copy:
165 if copy:
166 def force_copy(src, dst):
166 def force_copy(src, dst):
167 if not os.path.exists(src):
167 if not os.path.exists(src):
168 # Tolerate empty source repository and optional files
168 # Tolerate empty source repository and optional files
169 return
169 return
170 util.copyfiles(src, dst)
170 util.copyfiles(src, dst)
171
171
172 src_store = os.path.realpath(src_repo.spath)
172 src_store = os.path.realpath(src_repo.spath)
173 if not os.path.exists(dest):
173 if not os.path.exists(dest):
174 os.mkdir(dest)
174 os.mkdir(dest)
175 try:
175 try:
176 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
176 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
177 os.mkdir(dest_path)
177 os.mkdir(dest_path)
178 except OSError, inst:
178 except OSError, inst:
179 if inst.errno == errno.EEXIST:
179 if inst.errno == errno.EEXIST:
180 dir_cleanup.close()
180 dir_cleanup.close()
181 raise util.Abort(_("destination '%s' already exists")
181 raise util.Abort(_("destination '%s' already exists")
182 % dest)
182 % dest)
183 raise
183 raise
184 if src_repo.spath != src_repo.path:
184 if src_repo.spath != src_repo.path:
185 # XXX racy
185 # XXX racy
186 dummy_changelog = os.path.join(dest_path, "00changelog.i")
186 dummy_changelog = os.path.join(dest_path, "00changelog.i")
187 # copy the dummy changelog
187 # copy the dummy changelog
188 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
188 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
189 dest_store = os.path.join(dest_path, "store")
189 dest_store = os.path.join(dest_path, "store")
190 os.mkdir(dest_store)
190 os.mkdir(dest_store)
191 else:
191 else:
192 dest_store = dest_path
192 dest_store = dest_path
193 # copy the requires file
193 # copy the requires file
194 force_copy(src_repo.join("requires"),
194 force_copy(src_repo.join("requires"),
195 os.path.join(dest_path, "requires"))
195 os.path.join(dest_path, "requires"))
196 # we lock here to avoid premature writing to the target
196 # we lock here to avoid premature writing to the target
197 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
197 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
198
198
199 files = ("data",
199 files = ("data",
200 "00manifest.d", "00manifest.i",
200 "00manifest.d", "00manifest.i",
201 "00changelog.d", "00changelog.i")
201 "00changelog.d", "00changelog.i")
202 for f in files:
202 for f in files:
203 src = os.path.join(src_store, f)
203 src = os.path.join(src_store, f)
204 dst = os.path.join(dest_store, f)
204 dst = os.path.join(dest_store, f)
205 force_copy(src, dst)
205 force_copy(src, dst)
206
206
207 # we need to re-init the repo after manually copying the data
207 # we need to re-init the repo after manually copying the data
208 # into it
208 # into it
209 dest_repo = repository(ui, dest)
209 dest_repo = repository(ui, dest)
210
210
211 else:
211 else:
212 try:
212 try:
213 dest_repo = repository(ui, dest, create=True)
213 dest_repo = repository(ui, dest, create=True)
214 except OSError, inst:
214 except OSError, inst:
215 if inst.errno == errno.EEXIST:
215 if inst.errno == errno.EEXIST:
216 dir_cleanup.close()
216 dir_cleanup.close()
217 raise util.Abort(_("destination '%s' already exists")
217 raise util.Abort(_("destination '%s' already exists")
218 % dest)
218 % dest)
219 raise
219 raise
220
220
221 revs = None
221 revs = None
222 if rev:
222 if rev:
223 if 'lookup' not in src_repo.capabilities:
223 if 'lookup' not in src_repo.capabilities:
224 raise util.Abort(_("src repository does not support revision "
224 raise util.Abort(_("src repository does not support revision "
225 "lookup and so doesn't support clone by "
225 "lookup and so doesn't support clone by "
226 "revision"))
226 "revision"))
227 revs = [src_repo.lookup(r) for r in rev]
227 revs = [src_repo.lookup(r) for r in rev]
228
228
229 if dest_repo.local():
229 if dest_repo.local():
230 dest_repo.clone(src_repo, heads=revs, stream=stream)
230 dest_repo.clone(src_repo, heads=revs, stream=stream)
231 elif src_repo.local():
231 elif src_repo.local():
232 src_repo.push(dest_repo, revs=revs)
232 src_repo.push(dest_repo, revs=revs)
233 else:
233 else:
234 raise util.Abort(_("clone from remote to remote not supported"))
234 raise util.Abort(_("clone from remote to remote not supported"))
235
235
236 if dir_cleanup:
236 if dir_cleanup:
237 dir_cleanup.close()
237 dir_cleanup.close()
238
238
239 if dest_repo.local():
239 if dest_repo.local():
240 fp = dest_repo.opener("hgrc", "w", text=True)
240 fp = dest_repo.opener("hgrc", "w", text=True)
241 fp.write("[paths]\n")
241 fp.write("[paths]\n")
242 fp.write("default = %s\n" % abspath)
242 fp.write("default = %s\n" % abspath)
243 fp.close()
243 fp.close()
244
244
245 if update:
245 if update:
246 if not checkout:
246 if not checkout:
247 try:
247 try:
248 checkout = dest_repo.lookup("default")
248 checkout = dest_repo.lookup("default")
249 except:
249 except:
250 checkout = dest_repo.changelog.tip()
250 checkout = dest_repo.changelog.tip()
251 _update(dest_repo, checkout)
251 _update(dest_repo, checkout)
252
252
253 return src_repo, dest_repo
253 return src_repo, dest_repo
254 finally:
254 finally:
255 del src_lock, dest_lock, dir_cleanup
255 del src_lock, dest_lock, dir_cleanup
256
256
257 def _showstats(repo, stats):
257 def _showstats(repo, stats):
258 stats = ((stats[0], _("updated")),
258 stats = ((stats[0], _("updated")),
259 (stats[1], _("merged")),
259 (stats[1], _("merged")),
260 (stats[2], _("removed")),
260 (stats[2], _("removed")),
261 (stats[3], _("unresolved")))
261 (stats[3], _("unresolved")))
262 note = ", ".join([_("%d files %s") % s for s in stats])
262 note = ", ".join([_("%d files %s") % s for s in stats])
263 repo.ui.status("%s\n" % note)
263 repo.ui.status("%s\n" % note)
264
264
265 def _update(repo, node): return update(repo, node)
265 def _update(repo, node): return update(repo, node)
266
266
267 def update(repo, node):
267 def update(repo, node):
268 """update the working directory to node, merging linear changes"""
268 """update the working directory to node, merging linear changes"""
269 pl = repo.parents()
269 pl = repo.parents()
270 stats = _merge.update(repo, node, False, False, None)
270 stats = _merge.update(repo, node, False, False, None)
271 _showstats(repo, stats)
271 _showstats(repo, stats)
272 if stats[3]:
272 if stats[3]:
273 repo.ui.status(_("There are unresolved merges with"
273 repo.ui.status(_("There are unresolved merges with"
274 " locally modified files.\n"))
274 " locally modified files.\n"))
275 if stats[1]:
275 if stats[1]:
276 repo.ui.status(_("You can finish the partial merge using:\n"))
276 repo.ui.status(_("You can finish the partial merge using:\n"))
277 else:
277 else:
278 repo.ui.status(_("You can redo the full merge using:\n"))
278 repo.ui.status(_("You can redo the full merge using:\n"))
279 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
279 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
280 repo.ui.status(_(" hg update %s\n hg update %s\n")
280 repo.ui.status(_(" hg update %s\n hg update %s\n")
281 % (pl[0].rev(), repo.changectx(node).rev()))
281 % (pl[0].rev(), repo.changectx(node).rev()))
282 return stats[3] > 0
282 return stats[3] > 0
283
283
284 def clean(repo, node, show_stats=True):
284 def clean(repo, node, show_stats=True):
285 """forcibly switch the working directory to node, clobbering changes"""
285 """forcibly switch the working directory to node, clobbering changes"""
286 stats = _merge.update(repo, node, False, True, None)
286 stats = _merge.update(repo, node, False, True, None)
287 if show_stats: _showstats(repo, stats)
287 if show_stats: _showstats(repo, stats)
288 return stats[3] > 0
288 return stats[3] > 0
289
289
290 def merge(repo, node, force=None, remind=True):
290 def merge(repo, node, force=None, remind=True):
291 """branch merge with node, resolving changes"""
291 """branch merge with node, resolving changes"""
292 stats = _merge.update(repo, node, True, force, False)
292 stats = _merge.update(repo, node, True, force, False)
293 _showstats(repo, stats)
293 _showstats(repo, stats)
294 if stats[3]:
294 if stats[3]:
295 pl = repo.parents()
295 pl = repo.parents()
296 repo.ui.status(_("There are unresolved merges,"
296 repo.ui.status(_("There are unresolved merges,"
297 " you can redo the full merge using:\n"
297 " you can redo the full merge using:\n"
298 " hg update -C %s\n"
298 " hg update -C %s\n"
299 " hg merge %s\n")
299 " hg merge %s\n")
300 % (pl[0].rev(), pl[1].rev()))
300 % (pl[0].rev(), pl[1].rev()))
301 elif remind:
301 elif remind:
302 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
302 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
303 return stats[3] > 0
303 return stats[3] > 0
304
304
305 def revert(repo, node, choose):
305 def revert(repo, node, choose):
306 """revert changes to revision in node without updating dirstate"""
306 """revert changes to revision in node without updating dirstate"""
307 return _merge.update(repo, node, False, True, choose)[3] > 0
307 return _merge.update(repo, node, False, True, choose)[3] > 0
308
308
309 def verify(repo):
309 def verify(repo):
310 """verify the consistency of a repository"""
310 """verify the consistency of a repository"""
311 return _verify.verify(repo)
311 return _verify.verify(repo)
General Comments 0
You need to be logged in to leave comments. Login now