##// END OF EJS Templates
clone: use cancopy
Matt Mackall -
r6315:5c96a4bc default
parent child Browse files
Show More
@@ -1,288 +1,291
1 1 """
2 2 bundlerepo.py - repository class for viewing uncompressed bundles
3 3
4 4 This provides a read-only repository interface to bundles as if
5 5 they were part of the actual repository.
6 6
7 7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import hex, nullid, short
14 14 from i18n import _
15 15 import changegroup, util, os, struct, bz2, tempfile, shutil, mdiff
16 16 import repo, localrepo, changelog, manifest, filelog, revlog
17 17
18 18 class bundlerevlog(revlog.revlog):
19 19 def __init__(self, opener, indexfile, bundlefile,
20 20 linkmapper=None):
21 21 # How it works:
22 22 # to retrieve a revision, we need to know the offset of
23 23 # the revision in the bundlefile (an opened file).
24 24 #
25 25 # We store this offset in the index (start), to differentiate a
26 26 # rev in the bundle and from a rev in the revlog, we check
27 27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 28 # (it is bigger since we store the node to which the delta is)
29 29 #
30 30 revlog.revlog.__init__(self, opener, indexfile)
31 31 self.bundlefile = bundlefile
32 32 self.basemap = {}
33 33 def chunkpositer():
34 34 for chunk in changegroup.chunkiter(bundlefile):
35 35 pos = bundlefile.tell()
36 36 yield chunk, pos - len(chunk)
37 37 n = self.count()
38 38 prev = None
39 39 for chunk, start in chunkpositer():
40 40 size = len(chunk)
41 41 if size < 80:
42 42 raise util.Abort("invalid changegroup")
43 43 start += 80
44 44 size -= 80
45 45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 46 if node in self.nodemap:
47 47 prev = node
48 48 continue
49 49 for p in (p1, p2):
50 50 if not p in self.nodemap:
51 51 raise revlog.LookupError(p1, self.indexfile,
52 52 _("unknown parent"))
53 53 if linkmapper is None:
54 54 link = n
55 55 else:
56 56 link = linkmapper(cs)
57 57
58 58 if not prev:
59 59 prev = p1
60 60 # start, size, full unc. size, base (unused), link, p1, p2, node
61 61 e = (revlog.offset_type(start, 0), size, -1, -1, link,
62 62 self.rev(p1), self.rev(p2), node)
63 63 self.basemap[n] = prev
64 64 self.index.insert(-1, e)
65 65 self.nodemap[node] = n
66 66 prev = node
67 67 n += 1
68 68
69 69 def bundle(self, rev):
70 70 """is rev from the bundle"""
71 71 if rev < 0:
72 72 return False
73 73 return rev in self.basemap
74 74 def bundlebase(self, rev): return self.basemap[rev]
75 75 def chunk(self, rev, df=None, cachelen=4096):
76 76 # Warning: in case of bundle, the diff is against bundlebase,
77 77 # not against rev - 1
78 78 # XXX: could use some caching
79 79 if not self.bundle(rev):
80 80 return revlog.revlog.chunk(self, rev, df)
81 81 self.bundlefile.seek(self.start(rev))
82 82 return self.bundlefile.read(self.length(rev))
83 83
84 84 def revdiff(self, rev1, rev2):
85 85 """return or calculate a delta between two revisions"""
86 86 if self.bundle(rev1) and self.bundle(rev2):
87 87 # hot path for bundle
88 88 revb = self.rev(self.bundlebase(rev2))
89 89 if revb == rev1:
90 90 return self.chunk(rev2)
91 91 elif not self.bundle(rev1) and not self.bundle(rev2):
92 92 return revlog.revlog.revdiff(self, rev1, rev2)
93 93
94 94 return mdiff.textdiff(self.revision(self.node(rev1)),
95 95 self.revision(self.node(rev2)))
96 96
97 97 def revision(self, node):
98 98 """return an uncompressed revision of a given"""
99 99 if node == nullid: return ""
100 100
101 101 text = None
102 102 chain = []
103 103 iter_node = node
104 104 rev = self.rev(iter_node)
105 105 # reconstruct the revision if it is from a changegroup
106 106 while self.bundle(rev):
107 107 if self._cache and self._cache[0] == iter_node:
108 108 text = self._cache[2]
109 109 break
110 110 chain.append(rev)
111 111 iter_node = self.bundlebase(rev)
112 112 rev = self.rev(iter_node)
113 113 if text is None:
114 114 text = revlog.revlog.revision(self, iter_node)
115 115
116 116 while chain:
117 117 delta = self.chunk(chain.pop())
118 118 text = mdiff.patches(text, [delta])
119 119
120 120 p1, p2 = self.parents(node)
121 121 if node != revlog.hash(text, p1, p2):
122 122 raise revlog.RevlogError(_("integrity check failed on %s:%d")
123 123 % (self.datafile, self.rev(node)))
124 124
125 125 self._cache = (node, self.rev(node), text)
126 126 return text
127 127
128 128 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
129 129 raise NotImplementedError
130 130 def addgroup(self, revs, linkmapper, transaction, unique=0):
131 131 raise NotImplementedError
132 132 def strip(self, rev, minlink):
133 133 raise NotImplementedError
134 134 def checksize(self):
135 135 raise NotImplementedError
136 136
137 137 class bundlechangelog(bundlerevlog, changelog.changelog):
138 138 def __init__(self, opener, bundlefile):
139 139 changelog.changelog.__init__(self, opener)
140 140 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
141 141
142 142 class bundlemanifest(bundlerevlog, manifest.manifest):
143 143 def __init__(self, opener, bundlefile, linkmapper):
144 144 manifest.manifest.__init__(self, opener)
145 145 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
146 146 linkmapper)
147 147
148 148 class bundlefilelog(bundlerevlog, filelog.filelog):
149 149 def __init__(self, opener, path, bundlefile, linkmapper):
150 150 filelog.filelog.__init__(self, opener, path)
151 151 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
152 152 linkmapper)
153 153
154 154 class bundlerepository(localrepo.localrepository):
155 155 def __init__(self, ui, path, bundlename):
156 156 self._tempparent = None
157 157 try:
158 158 localrepo.localrepository.__init__(self, ui, path)
159 159 except repo.RepoError:
160 160 self._tempparent = tempfile.mkdtemp()
161 161 tmprepo = localrepo.instance(ui,self._tempparent,1)
162 162 localrepo.localrepository.__init__(self, ui, self._tempparent)
163 163
164 164 if path:
165 165 self._url = 'bundle:' + path + '+' + bundlename
166 166 else:
167 167 self._url = 'bundle:' + bundlename
168 168
169 169 self.tempfile = None
170 170 self.bundlefile = open(bundlename, "rb")
171 171 header = self.bundlefile.read(6)
172 172 if not header.startswith("HG"):
173 173 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
174 174 elif not header.startswith("HG10"):
175 175 raise util.Abort(_("%s: unknown bundle version") % bundlename)
176 176 elif header == "HG10BZ":
177 177 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
178 178 suffix=".hg10un", dir=self.path)
179 179 self.tempfile = temp
180 180 fptemp = os.fdopen(fdtemp, 'wb')
181 181 def generator(f):
182 182 zd = bz2.BZ2Decompressor()
183 183 zd.decompress("BZ")
184 184 for chunk in f:
185 185 yield zd.decompress(chunk)
186 186 gen = generator(util.filechunkiter(self.bundlefile, 4096))
187 187
188 188 try:
189 189 fptemp.write("HG10UN")
190 190 for chunk in gen:
191 191 fptemp.write(chunk)
192 192 finally:
193 193 fptemp.close()
194 194 self.bundlefile.close()
195 195
196 196 self.bundlefile = open(self.tempfile, "rb")
197 197 # seek right after the header
198 198 self.bundlefile.seek(6)
199 199 elif header == "HG10UN":
200 200 # nothing to do
201 201 pass
202 202 else:
203 203 raise util.Abort(_("%s: unknown bundle compression type")
204 204 % bundlename)
205 205 # dict with the mapping 'filename' -> position in the bundle
206 206 self.bundlefilespos = {}
207 207
208 208 def __getattr__(self, name):
209 209 if name == 'changelog':
210 210 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
211 211 self.manstart = self.bundlefile.tell()
212 212 return self.changelog
213 213 if name == 'manifest':
214 214 self.bundlefile.seek(self.manstart)
215 215 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
216 216 self.changelog.rev)
217 217 self.filestart = self.bundlefile.tell()
218 218 return self.manifest
219 219 if name == 'manstart':
220 220 self.changelog
221 221 return self.manstart
222 222 if name == 'filestart':
223 223 self.manifest
224 224 return self.filestart
225 225 return localrepo.localrepository.__getattr__(self, name)
226 226
227 227 def url(self):
228 228 return self._url
229 229
230 230 def file(self, f):
231 231 if not self.bundlefilespos:
232 232 self.bundlefile.seek(self.filestart)
233 233 while 1:
234 234 chunk = changegroup.getchunk(self.bundlefile)
235 235 if not chunk:
236 236 break
237 237 self.bundlefilespos[chunk] = self.bundlefile.tell()
238 238 for c in changegroup.chunkiter(self.bundlefile):
239 239 pass
240 240
241 241 if f[0] == '/':
242 242 f = f[1:]
243 243 if f in self.bundlefilespos:
244 244 self.bundlefile.seek(self.bundlefilespos[f])
245 245 return bundlefilelog(self.sopener, f, self.bundlefile,
246 246 self.changelog.rev)
247 247 else:
248 248 return filelog.filelog(self.sopener, f)
249 249
250 250 def close(self):
251 251 """Close assigned bundle file immediately."""
252 252 self.bundlefile.close()
253 253
254 254 def __del__(self):
255 255 bundlefile = getattr(self, 'bundlefile', None)
256 256 if bundlefile and not bundlefile.closed:
257 257 bundlefile.close()
258 258 tempfile = getattr(self, 'tempfile', None)
259 259 if tempfile is not None:
260 260 os.unlink(tempfile)
261 261 if self._tempparent:
262 262 shutil.rmtree(self._tempparent, True)
263 263
264 def cancopy(self):
265 return False
266
264 267 def instance(ui, path, create):
265 268 if create:
266 269 raise util.Abort(_('cannot create new bundle repository'))
267 270 parentpath = ui.config("bundle", "mainreporoot", "")
268 271 if parentpath:
269 272 # Try to make the full path relative so we get a nice, short URL.
270 273 # In particular, we don't want temp dir names in test outputs.
271 274 cwd = os.getcwd()
272 275 if parentpath == cwd:
273 276 parentpath = ''
274 277 else:
275 278 cwd = os.path.join(cwd,'')
276 279 if parentpath.startswith(cwd):
277 280 parentpath = parentpath[len(cwd):]
278 281 path = util.drop_scheme('file', path)
279 282 if path.startswith('bundle:'):
280 283 path = util.drop_scheme('bundle', path)
281 284 s = path.split("+", 1)
282 285 if len(s) == 1:
283 286 repopath, bundlename = parentpath, s[0]
284 287 else:
285 288 repopath, bundlename = s
286 289 else:
287 290 repopath, bundlename = parentpath, path
288 291 return bundlerepository(ui, repopath, bundlename)
@@ -1,311 +1,311
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
11 11 import errno, lock, os, shutil, util, extensions
12 12 import merge as _merge
13 13 import verify as _verify
14 14
15 15 def _local(path):
16 16 return (os.path.isfile(util.drop_scheme('file', path)) and
17 17 bundlerepo or localrepo)
18 18
19 19 def parseurl(url, revs):
20 20 '''parse url#branch, returning url, branch + revs'''
21 21
22 22 if '#' not in url:
23 23 return url, (revs or None), None
24 24
25 25 url, rev = url.split('#', 1)
26 26 return url, revs + [rev], rev
27 27
28 28 schemes = {
29 29 'bundle': bundlerepo,
30 30 'file': _local,
31 31 'http': httprepo,
32 32 'https': httprepo,
33 33 'ssh': sshrepo,
34 34 'static-http': statichttprepo,
35 35 }
36 36
37 37 def _lookup(path):
38 38 scheme = 'file'
39 39 if path:
40 40 c = path.find(':')
41 41 if c > 0:
42 42 scheme = path[:c]
43 43 thing = schemes.get(scheme) or schemes['file']
44 44 try:
45 45 return thing(path)
46 46 except TypeError:
47 47 return thing
48 48
49 49 def islocal(repo):
50 50 '''return true if repo or path is local'''
51 51 if isinstance(repo, str):
52 52 try:
53 53 return _lookup(repo).islocal(repo)
54 54 except AttributeError:
55 55 return False
56 56 return repo.local()
57 57
58 58 def repository(ui, path='', create=False):
59 59 """return a repository object for the specified path"""
60 60 repo = _lookup(path).instance(ui, path, create)
61 61 ui = getattr(repo, "ui", ui)
62 62 for name, module in extensions.extensions():
63 63 hook = getattr(module, 'reposetup', None)
64 64 if hook:
65 65 hook(ui, repo)
66 66 return repo
67 67
68 68 def defaultdest(source):
69 69 '''return default destination of clone if none is given'''
70 70 return os.path.basename(os.path.normpath(source))
71 71
72 72 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
73 73 stream=False):
74 74 """Make a copy of an existing repository.
75 75
76 76 Create a copy of an existing repository in a new directory. The
77 77 source and destination are URLs, as passed to the repository
78 78 function. Returns a pair of repository objects, the source and
79 79 newly created destination.
80 80
81 81 The location of the source is added to the new repository's
82 82 .hg/hgrc file, as the default to be used for future pulls and
83 83 pushes.
84 84
85 85 If an exception is raised, the partly cloned/updated destination
86 86 repository will be deleted.
87 87
88 88 Arguments:
89 89
90 90 source: repository object or URL
91 91
92 92 dest: URL of destination repository to create (defaults to base
93 93 name of source repository)
94 94
95 95 pull: always pull from source repository, even in local case
96 96
97 97 stream: stream raw data uncompressed from repository (fast over
98 98 LAN, slow over WAN)
99 99
100 100 rev: revision to clone up to (implies pull=True)
101 101
102 102 update: update working directory after clone completes, if
103 103 destination is local repository
104 104 """
105 105
106 106 if isinstance(source, str):
107 107 origsource = ui.expandpath(source)
108 108 source, rev, checkout = parseurl(origsource, rev)
109 109 src_repo = repository(ui, source)
110 110 else:
111 111 src_repo = source
112 112 origsource = source = src_repo.url()
113 113 checkout = None
114 114
115 115 if dest is None:
116 116 dest = defaultdest(source)
117 117 ui.status(_("destination directory: %s\n") % dest)
118 118
119 119 def localpath(path):
120 120 if path.startswith('file://localhost/'):
121 121 return path[16:]
122 122 if path.startswith('file://'):
123 123 return path[7:]
124 124 if path.startswith('file:'):
125 125 return path[5:]
126 126 return path
127 127
128 128 dest = localpath(dest)
129 129 source = localpath(source)
130 130
131 131 if os.path.exists(dest):
132 132 raise util.Abort(_("destination '%s' already exists") % dest)
133 133
134 134 class DirCleanup(object):
135 135 def __init__(self, dir_):
136 136 self.rmtree = shutil.rmtree
137 137 self.dir_ = dir_
138 138 def close(self):
139 139 self.dir_ = None
140 140 def __del__(self):
141 141 if self.dir_:
142 142 self.rmtree(self.dir_, True)
143 143
144 144 src_lock = dest_lock = dir_cleanup = None
145 145 try:
146 146 if islocal(dest):
147 147 dir_cleanup = DirCleanup(dest)
148 148
149 149 abspath = origsource
150 150 copy = False
151 if src_repo.local() and islocal(dest):
151 if src_repo.cancopy() and islocal(dest):
152 152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
153 153 copy = not pull and not rev
154 154
155 155 if copy:
156 156 try:
157 157 # we use a lock here because if we race with commit, we
158 158 # can end up with extra data in the cloned revlogs that's
159 159 # not pointed to by changesets, thus causing verify to
160 160 # fail
161 161 src_lock = src_repo.lock()
162 162 except lock.LockException:
163 163 copy = False
164 164
165 165 if copy:
166 166 def force_copy(src, dst):
167 167 if not os.path.exists(src):
168 168 # Tolerate empty source repository and optional files
169 169 return
170 170 util.copyfiles(src, dst)
171 171
172 172 src_store = os.path.realpath(src_repo.spath)
173 173 if not os.path.exists(dest):
174 174 os.mkdir(dest)
175 175 try:
176 176 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
177 177 os.mkdir(dest_path)
178 178 except OSError, inst:
179 179 if inst.errno == errno.EEXIST:
180 180 dir_cleanup.close()
181 181 raise util.Abort(_("destination '%s' already exists")
182 182 % dest)
183 183 raise
184 184 if src_repo.spath != src_repo.path:
185 185 # XXX racy
186 186 dummy_changelog = os.path.join(dest_path, "00changelog.i")
187 187 # copy the dummy changelog
188 188 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
189 189 dest_store = os.path.join(dest_path, "store")
190 190 os.mkdir(dest_store)
191 191 else:
192 192 dest_store = dest_path
193 193 # copy the requires file
194 194 force_copy(src_repo.join("requires"),
195 195 os.path.join(dest_path, "requires"))
196 196 # we lock here to avoid premature writing to the target
197 197 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
198 198
199 199 files = ("data",
200 200 "00manifest.d", "00manifest.i",
201 201 "00changelog.d", "00changelog.i")
202 202 for f in files:
203 203 src = os.path.join(src_store, f)
204 204 dst = os.path.join(dest_store, f)
205 205 force_copy(src, dst)
206 206
207 207 # we need to re-init the repo after manually copying the data
208 208 # into it
209 209 dest_repo = repository(ui, dest)
210 210
211 211 else:
212 212 try:
213 213 dest_repo = repository(ui, dest, create=True)
214 214 except OSError, inst:
215 215 if inst.errno == errno.EEXIST:
216 216 dir_cleanup.close()
217 217 raise util.Abort(_("destination '%s' already exists")
218 218 % dest)
219 219 raise
220 220
221 221 revs = None
222 222 if rev:
223 223 if 'lookup' not in src_repo.capabilities:
224 224 raise util.Abort(_("src repository does not support revision "
225 225 "lookup and so doesn't support clone by "
226 226 "revision"))
227 227 revs = [src_repo.lookup(r) for r in rev]
228 228
229 229 if dest_repo.local():
230 230 dest_repo.clone(src_repo, heads=revs, stream=stream)
231 231 elif src_repo.local():
232 232 src_repo.push(dest_repo, revs=revs)
233 233 else:
234 234 raise util.Abort(_("clone from remote to remote not supported"))
235 235
236 236 if dir_cleanup:
237 237 dir_cleanup.close()
238 238
239 239 if dest_repo.local():
240 240 fp = dest_repo.opener("hgrc", "w", text=True)
241 241 fp.write("[paths]\n")
242 242 fp.write("default = %s\n" % abspath)
243 243 fp.close()
244 244
245 245 if update:
246 246 if not checkout:
247 247 try:
248 248 checkout = dest_repo.lookup("default")
249 249 except:
250 250 checkout = dest_repo.changelog.tip()
251 251 _update(dest_repo, checkout)
252 252
253 253 return src_repo, dest_repo
254 254 finally:
255 255 del src_lock, dest_lock, dir_cleanup
256 256
257 257 def _showstats(repo, stats):
258 258 stats = ((stats[0], _("updated")),
259 259 (stats[1], _("merged")),
260 260 (stats[2], _("removed")),
261 261 (stats[3], _("unresolved")))
262 262 note = ", ".join([_("%d files %s") % s for s in stats])
263 263 repo.ui.status("%s\n" % note)
264 264
265 265 def _update(repo, node): return update(repo, node)
266 266
267 267 def update(repo, node):
268 268 """update the working directory to node, merging linear changes"""
269 269 pl = repo.parents()
270 270 stats = _merge.update(repo, node, False, False, None)
271 271 _showstats(repo, stats)
272 272 if stats[3]:
273 273 repo.ui.status(_("There are unresolved merges with"
274 274 " locally modified files.\n"))
275 275 if stats[1]:
276 276 repo.ui.status(_("You can finish the partial merge using:\n"))
277 277 else:
278 278 repo.ui.status(_("You can redo the full merge using:\n"))
279 279 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
280 280 repo.ui.status(_(" hg update %s\n hg update %s\n")
281 281 % (pl[0].rev(), repo.changectx(node).rev()))
282 282 return stats[3] > 0
283 283
284 284 def clean(repo, node, show_stats=True):
285 285 """forcibly switch the working directory to node, clobbering changes"""
286 286 stats = _merge.update(repo, node, False, True, None)
287 287 if show_stats: _showstats(repo, stats)
288 288 return stats[3] > 0
289 289
290 290 def merge(repo, node, force=None, remind=True):
291 291 """branch merge with node, resolving changes"""
292 292 stats = _merge.update(repo, node, True, force, False)
293 293 _showstats(repo, stats)
294 294 if stats[3]:
295 295 pl = repo.parents()
296 296 repo.ui.status(_("There are unresolved merges,"
297 297 " you can redo the full merge using:\n"
298 298 " hg update -C %s\n"
299 299 " hg merge %s\n")
300 300 % (pl[0].rev(), pl[1].rev()))
301 301 elif remind:
302 302 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
303 303 return stats[3] > 0
304 304
305 305 def revert(repo, node, choose):
306 306 """revert changes to revision in node without updating dirstate"""
307 307 return _merge.update(repo, node, False, True, choose)[3] > 0
308 308
309 309 def verify(repo):
310 310 """verify the consistency of a repository"""
311 311 return _verify.verify(repo)
General Comments 0
You need to be logged in to leave comments. Login now