##// END OF EJS Templates
parseurl: make revs optional
Bryan O'Sullivan -
r6525:a020247d default
parent child Browse files
Show More
@@ -1,300 +1,300 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
10 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
11 import errno, lock, os, shutil, util, extensions
11 import errno, lock, os, shutil, util, extensions
12 import merge as _merge
12 import merge as _merge
13 import verify as _verify
13 import verify as _verify
14
14
15 def _local(path):
15 def _local(path):
16 return (os.path.isfile(util.drop_scheme('file', path)) and
16 return (os.path.isfile(util.drop_scheme('file', path)) and
17 bundlerepo or localrepo)
17 bundlerepo or localrepo)
18
18
19 def parseurl(url, revs):
19 def parseurl(url, revs=[]):
20 '''parse url#branch, returning url, branch + revs'''
20 '''parse url#branch, returning url, branch + revs'''
21
21
22 if '#' not in url:
22 if '#' not in url:
23 return url, (revs or None), None
23 return url, (revs or None), None
24
24
25 url, rev = url.split('#', 1)
25 url, rev = url.split('#', 1)
26 return url, revs + [rev], rev
26 return url, revs + [rev], rev
27
27
28 schemes = {
28 schemes = {
29 'bundle': bundlerepo,
29 'bundle': bundlerepo,
30 'file': _local,
30 'file': _local,
31 'http': httprepo,
31 'http': httprepo,
32 'https': httprepo,
32 'https': httprepo,
33 'ssh': sshrepo,
33 'ssh': sshrepo,
34 'static-http': statichttprepo,
34 'static-http': statichttprepo,
35 }
35 }
36
36
37 def _lookup(path):
37 def _lookup(path):
38 scheme = 'file'
38 scheme = 'file'
39 if path:
39 if path:
40 c = path.find(':')
40 c = path.find(':')
41 if c > 0:
41 if c > 0:
42 scheme = path[:c]
42 scheme = path[:c]
43 thing = schemes.get(scheme) or schemes['file']
43 thing = schemes.get(scheme) or schemes['file']
44 try:
44 try:
45 return thing(path)
45 return thing(path)
46 except TypeError:
46 except TypeError:
47 return thing
47 return thing
48
48
49 def islocal(repo):
49 def islocal(repo):
50 '''return true if repo or path is local'''
50 '''return true if repo or path is local'''
51 if isinstance(repo, str):
51 if isinstance(repo, str):
52 try:
52 try:
53 return _lookup(repo).islocal(repo)
53 return _lookup(repo).islocal(repo)
54 except AttributeError:
54 except AttributeError:
55 return False
55 return False
56 return repo.local()
56 return repo.local()
57
57
58 def repository(ui, path='', create=False):
58 def repository(ui, path='', create=False):
59 """return a repository object for the specified path"""
59 """return a repository object for the specified path"""
60 repo = _lookup(path).instance(ui, path, create)
60 repo = _lookup(path).instance(ui, path, create)
61 ui = getattr(repo, "ui", ui)
61 ui = getattr(repo, "ui", ui)
62 for name, module in extensions.extensions():
62 for name, module in extensions.extensions():
63 hook = getattr(module, 'reposetup', None)
63 hook = getattr(module, 'reposetup', None)
64 if hook:
64 if hook:
65 hook(ui, repo)
65 hook(ui, repo)
66 return repo
66 return repo
67
67
68 def defaultdest(source):
68 def defaultdest(source):
69 '''return default destination of clone if none is given'''
69 '''return default destination of clone if none is given'''
70 return os.path.basename(os.path.normpath(source))
70 return os.path.basename(os.path.normpath(source))
71
71
72 def localpath(path):
72 def localpath(path):
73 if path.startswith('file://localhost/'):
73 if path.startswith('file://localhost/'):
74 return path[16:]
74 return path[16:]
75 if path.startswith('file://'):
75 if path.startswith('file://'):
76 return path[7:]
76 return path[7:]
77 if path.startswith('file:'):
77 if path.startswith('file:'):
78 return path[5:]
78 return path[5:]
79 return path
79 return path
80
80
81 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
81 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
82 stream=False):
82 stream=False):
83 """Make a copy of an existing repository.
83 """Make a copy of an existing repository.
84
84
85 Create a copy of an existing repository in a new directory. The
85 Create a copy of an existing repository in a new directory. The
86 source and destination are URLs, as passed to the repository
86 source and destination are URLs, as passed to the repository
87 function. Returns a pair of repository objects, the source and
87 function. Returns a pair of repository objects, the source and
88 newly created destination.
88 newly created destination.
89
89
90 The location of the source is added to the new repository's
90 The location of the source is added to the new repository's
91 .hg/hgrc file, as the default to be used for future pulls and
91 .hg/hgrc file, as the default to be used for future pulls and
92 pushes.
92 pushes.
93
93
94 If an exception is raised, the partly cloned/updated destination
94 If an exception is raised, the partly cloned/updated destination
95 repository will be deleted.
95 repository will be deleted.
96
96
97 Arguments:
97 Arguments:
98
98
99 source: repository object or URL
99 source: repository object or URL
100
100
101 dest: URL of destination repository to create (defaults to base
101 dest: URL of destination repository to create (defaults to base
102 name of source repository)
102 name of source repository)
103
103
104 pull: always pull from source repository, even in local case
104 pull: always pull from source repository, even in local case
105
105
106 stream: stream raw data uncompressed from repository (fast over
106 stream: stream raw data uncompressed from repository (fast over
107 LAN, slow over WAN)
107 LAN, slow over WAN)
108
108
109 rev: revision to clone up to (implies pull=True)
109 rev: revision to clone up to (implies pull=True)
110
110
111 update: update working directory after clone completes, if
111 update: update working directory after clone completes, if
112 destination is local repository
112 destination is local repository
113 """
113 """
114
114
115 if isinstance(source, str):
115 if isinstance(source, str):
116 origsource = ui.expandpath(source)
116 origsource = ui.expandpath(source)
117 source, rev, checkout = parseurl(origsource, rev)
117 source, rev, checkout = parseurl(origsource, rev)
118 src_repo = repository(ui, source)
118 src_repo = repository(ui, source)
119 else:
119 else:
120 src_repo = source
120 src_repo = source
121 origsource = source = src_repo.url()
121 origsource = source = src_repo.url()
122 checkout = None
122 checkout = None
123
123
124 if dest is None:
124 if dest is None:
125 dest = defaultdest(source)
125 dest = defaultdest(source)
126 ui.status(_("destination directory: %s\n") % dest)
126 ui.status(_("destination directory: %s\n") % dest)
127
127
128 dest = localpath(dest)
128 dest = localpath(dest)
129 source = localpath(source)
129 source = localpath(source)
130
130
131 if os.path.exists(dest):
131 if os.path.exists(dest):
132 raise util.Abort(_("destination '%s' already exists") % dest)
132 raise util.Abort(_("destination '%s' already exists") % dest)
133
133
134 class DirCleanup(object):
134 class DirCleanup(object):
135 def __init__(self, dir_):
135 def __init__(self, dir_):
136 self.rmtree = shutil.rmtree
136 self.rmtree = shutil.rmtree
137 self.dir_ = dir_
137 self.dir_ = dir_
138 def close(self):
138 def close(self):
139 self.dir_ = None
139 self.dir_ = None
140 def __del__(self):
140 def __del__(self):
141 if self.dir_:
141 if self.dir_:
142 self.rmtree(self.dir_, True)
142 self.rmtree(self.dir_, True)
143
143
144 src_lock = dest_lock = dir_cleanup = None
144 src_lock = dest_lock = dir_cleanup = None
145 try:
145 try:
146 if islocal(dest):
146 if islocal(dest):
147 dir_cleanup = DirCleanup(dest)
147 dir_cleanup = DirCleanup(dest)
148
148
149 abspath = origsource
149 abspath = origsource
150 copy = False
150 copy = False
151 if src_repo.cancopy() and islocal(dest):
151 if src_repo.cancopy() and islocal(dest):
152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
153 copy = not pull and not rev
153 copy = not pull and not rev
154
154
155 if copy:
155 if copy:
156 try:
156 try:
157 # we use a lock here because if we race with commit, we
157 # we use a lock here because if we race with commit, we
158 # can end up with extra data in the cloned revlogs that's
158 # can end up with extra data in the cloned revlogs that's
159 # not pointed to by changesets, thus causing verify to
159 # not pointed to by changesets, thus causing verify to
160 # fail
160 # fail
161 src_lock = src_repo.lock()
161 src_lock = src_repo.lock()
162 except lock.LockException:
162 except lock.LockException:
163 copy = False
163 copy = False
164
164
165 if copy:
165 if copy:
166 def force_copy(src, dst):
166 def force_copy(src, dst):
167 if not os.path.exists(src):
167 if not os.path.exists(src):
168 # Tolerate empty source repository and optional files
168 # Tolerate empty source repository and optional files
169 return
169 return
170 util.copyfiles(src, dst)
170 util.copyfiles(src, dst)
171
171
172 src_store = os.path.realpath(src_repo.spath)
172 src_store = os.path.realpath(src_repo.spath)
173 if not os.path.exists(dest):
173 if not os.path.exists(dest):
174 os.mkdir(dest)
174 os.mkdir(dest)
175 try:
175 try:
176 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
176 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
177 os.mkdir(dest_path)
177 os.mkdir(dest_path)
178 except OSError, inst:
178 except OSError, inst:
179 if inst.errno == errno.EEXIST:
179 if inst.errno == errno.EEXIST:
180 dir_cleanup.close()
180 dir_cleanup.close()
181 raise util.Abort(_("destination '%s' already exists")
181 raise util.Abort(_("destination '%s' already exists")
182 % dest)
182 % dest)
183 raise
183 raise
184 if src_repo.spath != src_repo.path:
184 if src_repo.spath != src_repo.path:
185 # XXX racy
185 # XXX racy
186 dummy_changelog = os.path.join(dest_path, "00changelog.i")
186 dummy_changelog = os.path.join(dest_path, "00changelog.i")
187 # copy the dummy changelog
187 # copy the dummy changelog
188 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
188 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
189 dest_store = os.path.join(dest_path, "store")
189 dest_store = os.path.join(dest_path, "store")
190 os.mkdir(dest_store)
190 os.mkdir(dest_store)
191 else:
191 else:
192 dest_store = dest_path
192 dest_store = dest_path
193 # copy the requires file
193 # copy the requires file
194 force_copy(src_repo.join("requires"),
194 force_copy(src_repo.join("requires"),
195 os.path.join(dest_path, "requires"))
195 os.path.join(dest_path, "requires"))
196 # we lock here to avoid premature writing to the target
196 # we lock here to avoid premature writing to the target
197 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
197 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
198
198
199 files = ("data",
199 files = ("data",
200 "00manifest.d", "00manifest.i",
200 "00manifest.d", "00manifest.i",
201 "00changelog.d", "00changelog.i")
201 "00changelog.d", "00changelog.i")
202 for f in files:
202 for f in files:
203 src = os.path.join(src_store, f)
203 src = os.path.join(src_store, f)
204 dst = os.path.join(dest_store, f)
204 dst = os.path.join(dest_store, f)
205 force_copy(src, dst)
205 force_copy(src, dst)
206
206
207 # we need to re-init the repo after manually copying the data
207 # we need to re-init the repo after manually copying the data
208 # into it
208 # into it
209 dest_repo = repository(ui, dest)
209 dest_repo = repository(ui, dest)
210
210
211 else:
211 else:
212 try:
212 try:
213 dest_repo = repository(ui, dest, create=True)
213 dest_repo = repository(ui, dest, create=True)
214 except OSError, inst:
214 except OSError, inst:
215 if inst.errno == errno.EEXIST:
215 if inst.errno == errno.EEXIST:
216 dir_cleanup.close()
216 dir_cleanup.close()
217 raise util.Abort(_("destination '%s' already exists")
217 raise util.Abort(_("destination '%s' already exists")
218 % dest)
218 % dest)
219 raise
219 raise
220
220
221 revs = None
221 revs = None
222 if rev:
222 if rev:
223 if 'lookup' not in src_repo.capabilities:
223 if 'lookup' not in src_repo.capabilities:
224 raise util.Abort(_("src repository does not support revision "
224 raise util.Abort(_("src repository does not support revision "
225 "lookup and so doesn't support clone by "
225 "lookup and so doesn't support clone by "
226 "revision"))
226 "revision"))
227 revs = [src_repo.lookup(r) for r in rev]
227 revs = [src_repo.lookup(r) for r in rev]
228
228
229 if dest_repo.local():
229 if dest_repo.local():
230 dest_repo.clone(src_repo, heads=revs, stream=stream)
230 dest_repo.clone(src_repo, heads=revs, stream=stream)
231 elif src_repo.local():
231 elif src_repo.local():
232 src_repo.push(dest_repo, revs=revs)
232 src_repo.push(dest_repo, revs=revs)
233 else:
233 else:
234 raise util.Abort(_("clone from remote to remote not supported"))
234 raise util.Abort(_("clone from remote to remote not supported"))
235
235
236 if dir_cleanup:
236 if dir_cleanup:
237 dir_cleanup.close()
237 dir_cleanup.close()
238
238
239 if dest_repo.local():
239 if dest_repo.local():
240 fp = dest_repo.opener("hgrc", "w", text=True)
240 fp = dest_repo.opener("hgrc", "w", text=True)
241 fp.write("[paths]\n")
241 fp.write("[paths]\n")
242 fp.write("default = %s\n" % abspath)
242 fp.write("default = %s\n" % abspath)
243 fp.close()
243 fp.close()
244
244
245 if update:
245 if update:
246 dest_repo.ui.status(_("updating working directory\n"))
246 dest_repo.ui.status(_("updating working directory\n"))
247 if not checkout:
247 if not checkout:
248 try:
248 try:
249 checkout = dest_repo.lookup("default")
249 checkout = dest_repo.lookup("default")
250 except:
250 except:
251 checkout = dest_repo.changelog.tip()
251 checkout = dest_repo.changelog.tip()
252 _update(dest_repo, checkout)
252 _update(dest_repo, checkout)
253
253
254 return src_repo, dest_repo
254 return src_repo, dest_repo
255 finally:
255 finally:
256 del src_lock, dest_lock, dir_cleanup
256 del src_lock, dest_lock, dir_cleanup
257
257
258 def _showstats(repo, stats):
258 def _showstats(repo, stats):
259 stats = ((stats[0], _("updated")),
259 stats = ((stats[0], _("updated")),
260 (stats[1], _("merged")),
260 (stats[1], _("merged")),
261 (stats[2], _("removed")),
261 (stats[2], _("removed")),
262 (stats[3], _("unresolved")))
262 (stats[3], _("unresolved")))
263 note = ", ".join([_("%d files %s") % s for s in stats])
263 note = ", ".join([_("%d files %s") % s for s in stats])
264 repo.ui.status("%s\n" % note)
264 repo.ui.status("%s\n" % note)
265
265
266 def _update(repo, node): return update(repo, node)
266 def _update(repo, node): return update(repo, node)
267
267
268 def update(repo, node):
268 def update(repo, node):
269 """update the working directory to node, merging linear changes"""
269 """update the working directory to node, merging linear changes"""
270 pl = repo.parents()
270 pl = repo.parents()
271 stats = _merge.update(repo, node, False, False, None)
271 stats = _merge.update(repo, node, False, False, None)
272 _showstats(repo, stats)
272 _showstats(repo, stats)
273 if stats[3]:
273 if stats[3]:
274 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
274 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
275 return stats[3] > 0
275 return stats[3] > 0
276
276
277 def clean(repo, node, show_stats=True):
277 def clean(repo, node, show_stats=True):
278 """forcibly switch the working directory to node, clobbering changes"""
278 """forcibly switch the working directory to node, clobbering changes"""
279 stats = _merge.update(repo, node, False, True, None)
279 stats = _merge.update(repo, node, False, True, None)
280 if show_stats: _showstats(repo, stats)
280 if show_stats: _showstats(repo, stats)
281 return stats[3] > 0
281 return stats[3] > 0
282
282
283 def merge(repo, node, force=None, remind=True):
283 def merge(repo, node, force=None, remind=True):
284 """branch merge with node, resolving changes"""
284 """branch merge with node, resolving changes"""
285 stats = _merge.update(repo, node, True, force, False)
285 stats = _merge.update(repo, node, True, force, False)
286 _showstats(repo, stats)
286 _showstats(repo, stats)
287 if stats[3]:
287 if stats[3]:
288 pl = repo.parents()
288 pl = repo.parents()
289 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
289 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
290 elif remind:
290 elif remind:
291 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
291 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
292 return stats[3] > 0
292 return stats[3] > 0
293
293
294 def revert(repo, node, choose):
294 def revert(repo, node, choose):
295 """revert changes to revision in node without updating dirstate"""
295 """revert changes to revision in node without updating dirstate"""
296 return _merge.update(repo, node, False, True, choose)[3] > 0
296 return _merge.update(repo, node, False, True, choose)[3] > 0
297
297
298 def verify(repo):
298 def verify(repo):
299 """verify the consistency of a repository"""
299 """verify the consistency of a repository"""
300 return _verify.verify(repo)
300 return _verify.verify(repo)
General Comments 0
You need to be logged in to leave comments. Login now