##// END OF EJS Templates
incoming: rename variable...
Nicolas Dumazet -
r12729:55f0648c default
parent child Browse files
Show More
@@ -1,557 +1,557
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid, nullrev, short
11 from node import hex, nullid, nullrev, short
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import lock, util, extensions, error, encoding, node
13 import lock, util, extensions, error, encoding, node
14 import cmdutil, discovery, url, changegroup
14 import cmdutil, discovery, url, changegroup
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.drop_scheme('file', path))
20 path = util.expandpath(util.drop_scheme('file', path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, repo, branches, revs):
23 def addbranchrevs(lrepo, repo, branches, revs):
24 hashbranch, branches = branches
24 hashbranch, branches = branches
25 if not hashbranch and not branches:
25 if not hashbranch and not branches:
26 return revs or None, revs and revs[0] or None
26 return revs or None, revs and revs[0] or None
27 revs = revs and list(revs) or []
27 revs = revs and list(revs) or []
28 if not repo.capable('branchmap'):
28 if not repo.capable('branchmap'):
29 if branches:
29 if branches:
30 raise util.Abort(_("remote branch lookup not supported"))
30 raise util.Abort(_("remote branch lookup not supported"))
31 revs.append(hashbranch)
31 revs.append(hashbranch)
32 return revs, revs[0]
32 return revs, revs[0]
33 branchmap = repo.branchmap()
33 branchmap = repo.branchmap()
34
34
35 def primary(butf8):
35 def primary(butf8):
36 if butf8 == '.':
36 if butf8 == '.':
37 if not lrepo or not lrepo.local():
37 if not lrepo or not lrepo.local():
38 raise util.Abort(_("dirstate branch not accessible"))
38 raise util.Abort(_("dirstate branch not accessible"))
39 butf8 = lrepo.dirstate.branch()
39 butf8 = lrepo.dirstate.branch()
40 if butf8 in branchmap:
40 if butf8 in branchmap:
41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
42 return True
42 return True
43 else:
43 else:
44 return False
44 return False
45
45
46 for branch in branches:
46 for branch in branches:
47 butf8 = encoding.fromlocal(branch)
47 butf8 = encoding.fromlocal(branch)
48 if not primary(butf8):
48 if not primary(butf8):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 butf8 = encoding.fromlocal(hashbranch)
51 butf8 = encoding.fromlocal(hashbranch)
52 if not primary(butf8):
52 if not primary(butf8):
53 revs.append(hashbranch)
53 revs.append(hashbranch)
54 return revs, revs[0]
54 return revs, revs[0]
55
55
56 def parseurl(url, branches=None):
56 def parseurl(url, branches=None):
57 '''parse url#branch, returning (url, (branch, branches))'''
57 '''parse url#branch, returning (url, (branch, branches))'''
58
58
59 if '#' not in url:
59 if '#' not in url:
60 return url, (None, branches or [])
60 return url, (None, branches or [])
61 url, branch = url.split('#', 1)
61 url, branch = url.split('#', 1)
62 return url, (branch, branches or [])
62 return url, (branch, branches or [])
63
63
64 schemes = {
64 schemes = {
65 'bundle': bundlerepo,
65 'bundle': bundlerepo,
66 'file': _local,
66 'file': _local,
67 'http': httprepo,
67 'http': httprepo,
68 'https': httprepo,
68 'https': httprepo,
69 'ssh': sshrepo,
69 'ssh': sshrepo,
70 'static-http': statichttprepo,
70 'static-http': statichttprepo,
71 }
71 }
72
72
73 def _lookup(path):
73 def _lookup(path):
74 scheme = 'file'
74 scheme = 'file'
75 if path:
75 if path:
76 c = path.find(':')
76 c = path.find(':')
77 if c > 0:
77 if c > 0:
78 scheme = path[:c]
78 scheme = path[:c]
79 thing = schemes.get(scheme) or schemes['file']
79 thing = schemes.get(scheme) or schemes['file']
80 try:
80 try:
81 return thing(path)
81 return thing(path)
82 except TypeError:
82 except TypeError:
83 return thing
83 return thing
84
84
85 def islocal(repo):
85 def islocal(repo):
86 '''return true if repo or path is local'''
86 '''return true if repo or path is local'''
87 if isinstance(repo, str):
87 if isinstance(repo, str):
88 try:
88 try:
89 return _lookup(repo).islocal(repo)
89 return _lookup(repo).islocal(repo)
90 except AttributeError:
90 except AttributeError:
91 return False
91 return False
92 return repo.local()
92 return repo.local()
93
93
94 def repository(ui, path='', create=False):
94 def repository(ui, path='', create=False):
95 """return a repository object for the specified path"""
95 """return a repository object for the specified path"""
96 repo = _lookup(path).instance(ui, path, create)
96 repo = _lookup(path).instance(ui, path, create)
97 ui = getattr(repo, "ui", ui)
97 ui = getattr(repo, "ui", ui)
98 for name, module in extensions.extensions():
98 for name, module in extensions.extensions():
99 hook = getattr(module, 'reposetup', None)
99 hook = getattr(module, 'reposetup', None)
100 if hook:
100 if hook:
101 hook(ui, repo)
101 hook(ui, repo)
102 return repo
102 return repo
103
103
104 def defaultdest(source):
104 def defaultdest(source):
105 '''return default destination of clone if none is given'''
105 '''return default destination of clone if none is given'''
106 return os.path.basename(os.path.normpath(source))
106 return os.path.basename(os.path.normpath(source))
107
107
108 def localpath(path):
108 def localpath(path):
109 if path.startswith('file://localhost/'):
109 if path.startswith('file://localhost/'):
110 return path[16:]
110 return path[16:]
111 if path.startswith('file://'):
111 if path.startswith('file://'):
112 return path[7:]
112 return path[7:]
113 if path.startswith('file:'):
113 if path.startswith('file:'):
114 return path[5:]
114 return path[5:]
115 return path
115 return path
116
116
117 def share(ui, source, dest=None, update=True):
117 def share(ui, source, dest=None, update=True):
118 '''create a shared repository'''
118 '''create a shared repository'''
119
119
120 if not islocal(source):
120 if not islocal(source):
121 raise util.Abort(_('can only share local repositories'))
121 raise util.Abort(_('can only share local repositories'))
122
122
123 if not dest:
123 if not dest:
124 dest = defaultdest(source)
124 dest = defaultdest(source)
125 else:
125 else:
126 dest = ui.expandpath(dest)
126 dest = ui.expandpath(dest)
127
127
128 if isinstance(source, str):
128 if isinstance(source, str):
129 origsource = ui.expandpath(source)
129 origsource = ui.expandpath(source)
130 source, branches = parseurl(origsource)
130 source, branches = parseurl(origsource)
131 srcrepo = repository(ui, source)
131 srcrepo = repository(ui, source)
132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
133 else:
133 else:
134 srcrepo = source
134 srcrepo = source
135 origsource = source = srcrepo.url()
135 origsource = source = srcrepo.url()
136 checkout = None
136 checkout = None
137
137
138 sharedpath = srcrepo.sharedpath # if our source is already sharing
138 sharedpath = srcrepo.sharedpath # if our source is already sharing
139
139
140 root = os.path.realpath(dest)
140 root = os.path.realpath(dest)
141 roothg = os.path.join(root, '.hg')
141 roothg = os.path.join(root, '.hg')
142
142
143 if os.path.exists(roothg):
143 if os.path.exists(roothg):
144 raise util.Abort(_('destination already exists'))
144 raise util.Abort(_('destination already exists'))
145
145
146 if not os.path.isdir(root):
146 if not os.path.isdir(root):
147 os.mkdir(root)
147 os.mkdir(root)
148 os.mkdir(roothg)
148 os.mkdir(roothg)
149
149
150 requirements = ''
150 requirements = ''
151 try:
151 try:
152 requirements = srcrepo.opener('requires').read()
152 requirements = srcrepo.opener('requires').read()
153 except IOError, inst:
153 except IOError, inst:
154 if inst.errno != errno.ENOENT:
154 if inst.errno != errno.ENOENT:
155 raise
155 raise
156
156
157 requirements += 'shared\n'
157 requirements += 'shared\n'
158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
160
160
161 default = srcrepo.ui.config('paths', 'default')
161 default = srcrepo.ui.config('paths', 'default')
162 if default:
162 if default:
163 f = file(os.path.join(roothg, 'hgrc'), 'w')
163 f = file(os.path.join(roothg, 'hgrc'), 'w')
164 f.write('[paths]\ndefault = %s\n' % default)
164 f.write('[paths]\ndefault = %s\n' % default)
165 f.close()
165 f.close()
166
166
167 r = repository(ui, root)
167 r = repository(ui, root)
168
168
169 if update:
169 if update:
170 r.ui.status(_("updating working directory\n"))
170 r.ui.status(_("updating working directory\n"))
171 if update is not True:
171 if update is not True:
172 checkout = update
172 checkout = update
173 for test in (checkout, 'default', 'tip'):
173 for test in (checkout, 'default', 'tip'):
174 if test is None:
174 if test is None:
175 continue
175 continue
176 try:
176 try:
177 uprev = r.lookup(test)
177 uprev = r.lookup(test)
178 break
178 break
179 except error.RepoLookupError:
179 except error.RepoLookupError:
180 continue
180 continue
181 _update(r, uprev)
181 _update(r, uprev)
182
182
183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
184 stream=False, branch=None):
184 stream=False, branch=None):
185 """Make a copy of an existing repository.
185 """Make a copy of an existing repository.
186
186
187 Create a copy of an existing repository in a new directory. The
187 Create a copy of an existing repository in a new directory. The
188 source and destination are URLs, as passed to the repository
188 source and destination are URLs, as passed to the repository
189 function. Returns a pair of repository objects, the source and
189 function. Returns a pair of repository objects, the source and
190 newly created destination.
190 newly created destination.
191
191
192 The location of the source is added to the new repository's
192 The location of the source is added to the new repository's
193 .hg/hgrc file, as the default to be used for future pulls and
193 .hg/hgrc file, as the default to be used for future pulls and
194 pushes.
194 pushes.
195
195
196 If an exception is raised, the partly cloned/updated destination
196 If an exception is raised, the partly cloned/updated destination
197 repository will be deleted.
197 repository will be deleted.
198
198
199 Arguments:
199 Arguments:
200
200
201 source: repository object or URL
201 source: repository object or URL
202
202
203 dest: URL of destination repository to create (defaults to base
203 dest: URL of destination repository to create (defaults to base
204 name of source repository)
204 name of source repository)
205
205
206 pull: always pull from source repository, even in local case
206 pull: always pull from source repository, even in local case
207
207
208 stream: stream raw data uncompressed from repository (fast over
208 stream: stream raw data uncompressed from repository (fast over
209 LAN, slow over WAN)
209 LAN, slow over WAN)
210
210
211 rev: revision to clone up to (implies pull=True)
211 rev: revision to clone up to (implies pull=True)
212
212
213 update: update working directory after clone completes, if
213 update: update working directory after clone completes, if
214 destination is local repository (True means update to default rev,
214 destination is local repository (True means update to default rev,
215 anything else is treated as a revision)
215 anything else is treated as a revision)
216
216
217 branch: branches to clone
217 branch: branches to clone
218 """
218 """
219
219
220 if isinstance(source, str):
220 if isinstance(source, str):
221 origsource = ui.expandpath(source)
221 origsource = ui.expandpath(source)
222 source, branch = parseurl(origsource, branch)
222 source, branch = parseurl(origsource, branch)
223 src_repo = repository(ui, source)
223 src_repo = repository(ui, source)
224 else:
224 else:
225 src_repo = source
225 src_repo = source
226 branch = (None, branch or [])
226 branch = (None, branch or [])
227 origsource = source = src_repo.url()
227 origsource = source = src_repo.url()
228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
229
229
230 if dest is None:
230 if dest is None:
231 dest = defaultdest(source)
231 dest = defaultdest(source)
232 ui.status(_("destination directory: %s\n") % dest)
232 ui.status(_("destination directory: %s\n") % dest)
233 else:
233 else:
234 dest = ui.expandpath(dest)
234 dest = ui.expandpath(dest)
235
235
236 dest = localpath(dest)
236 dest = localpath(dest)
237 source = localpath(source)
237 source = localpath(source)
238
238
239 if os.path.exists(dest):
239 if os.path.exists(dest):
240 if not os.path.isdir(dest):
240 if not os.path.isdir(dest):
241 raise util.Abort(_("destination '%s' already exists") % dest)
241 raise util.Abort(_("destination '%s' already exists") % dest)
242 elif os.listdir(dest):
242 elif os.listdir(dest):
243 raise util.Abort(_("destination '%s' is not empty") % dest)
243 raise util.Abort(_("destination '%s' is not empty") % dest)
244
244
245 class DirCleanup(object):
245 class DirCleanup(object):
246 def __init__(self, dir_):
246 def __init__(self, dir_):
247 self.rmtree = shutil.rmtree
247 self.rmtree = shutil.rmtree
248 self.dir_ = dir_
248 self.dir_ = dir_
249 def close(self):
249 def close(self):
250 self.dir_ = None
250 self.dir_ = None
251 def cleanup(self):
251 def cleanup(self):
252 if self.dir_:
252 if self.dir_:
253 self.rmtree(self.dir_, True)
253 self.rmtree(self.dir_, True)
254
254
255 src_lock = dest_lock = dir_cleanup = None
255 src_lock = dest_lock = dir_cleanup = None
256 try:
256 try:
257 if islocal(dest):
257 if islocal(dest):
258 dir_cleanup = DirCleanup(dest)
258 dir_cleanup = DirCleanup(dest)
259
259
260 abspath = origsource
260 abspath = origsource
261 copy = False
261 copy = False
262 if src_repo.cancopy() and islocal(dest):
262 if src_repo.cancopy() and islocal(dest):
263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
264 copy = not pull and not rev
264 copy = not pull and not rev
265
265
266 if copy:
266 if copy:
267 try:
267 try:
268 # we use a lock here because if we race with commit, we
268 # we use a lock here because if we race with commit, we
269 # can end up with extra data in the cloned revlogs that's
269 # can end up with extra data in the cloned revlogs that's
270 # not pointed to by changesets, thus causing verify to
270 # not pointed to by changesets, thus causing verify to
271 # fail
271 # fail
272 src_lock = src_repo.lock(wait=False)
272 src_lock = src_repo.lock(wait=False)
273 except error.LockError:
273 except error.LockError:
274 copy = False
274 copy = False
275
275
276 if copy:
276 if copy:
277 src_repo.hook('preoutgoing', throw=True, source='clone')
277 src_repo.hook('preoutgoing', throw=True, source='clone')
278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
279 if not os.path.exists(dest):
279 if not os.path.exists(dest):
280 os.mkdir(dest)
280 os.mkdir(dest)
281 else:
281 else:
282 # only clean up directories we create ourselves
282 # only clean up directories we create ourselves
283 dir_cleanup.dir_ = hgdir
283 dir_cleanup.dir_ = hgdir
284 try:
284 try:
285 dest_path = hgdir
285 dest_path = hgdir
286 os.mkdir(dest_path)
286 os.mkdir(dest_path)
287 except OSError, inst:
287 except OSError, inst:
288 if inst.errno == errno.EEXIST:
288 if inst.errno == errno.EEXIST:
289 dir_cleanup.close()
289 dir_cleanup.close()
290 raise util.Abort(_("destination '%s' already exists")
290 raise util.Abort(_("destination '%s' already exists")
291 % dest)
291 % dest)
292 raise
292 raise
293
293
294 hardlink = None
294 hardlink = None
295 num = 0
295 num = 0
296 for f in src_repo.store.copylist():
296 for f in src_repo.store.copylist():
297 src = os.path.join(src_repo.sharedpath, f)
297 src = os.path.join(src_repo.sharedpath, f)
298 dst = os.path.join(dest_path, f)
298 dst = os.path.join(dest_path, f)
299 dstbase = os.path.dirname(dst)
299 dstbase = os.path.dirname(dst)
300 if dstbase and not os.path.exists(dstbase):
300 if dstbase and not os.path.exists(dstbase):
301 os.mkdir(dstbase)
301 os.mkdir(dstbase)
302 if os.path.exists(src):
302 if os.path.exists(src):
303 if dst.endswith('data'):
303 if dst.endswith('data'):
304 # lock to avoid premature writing to the target
304 # lock to avoid premature writing to the target
305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
306 hardlink, n = util.copyfiles(src, dst, hardlink)
306 hardlink, n = util.copyfiles(src, dst, hardlink)
307 num += n
307 num += n
308 if hardlink:
308 if hardlink:
309 ui.debug("linked %d files\n" % num)
309 ui.debug("linked %d files\n" % num)
310 else:
310 else:
311 ui.debug("copied %d files\n" % num)
311 ui.debug("copied %d files\n" % num)
312
312
313 # we need to re-init the repo after manually copying the data
313 # we need to re-init the repo after manually copying the data
314 # into it
314 # into it
315 dest_repo = repository(ui, dest)
315 dest_repo = repository(ui, dest)
316 src_repo.hook('outgoing', source='clone',
316 src_repo.hook('outgoing', source='clone',
317 node=node.hex(node.nullid))
317 node=node.hex(node.nullid))
318 else:
318 else:
319 try:
319 try:
320 dest_repo = repository(ui, dest, create=True)
320 dest_repo = repository(ui, dest, create=True)
321 except OSError, inst:
321 except OSError, inst:
322 if inst.errno == errno.EEXIST:
322 if inst.errno == errno.EEXIST:
323 dir_cleanup.close()
323 dir_cleanup.close()
324 raise util.Abort(_("destination '%s' already exists")
324 raise util.Abort(_("destination '%s' already exists")
325 % dest)
325 % dest)
326 raise
326 raise
327
327
328 revs = None
328 revs = None
329 if rev:
329 if rev:
330 if 'lookup' not in src_repo.capabilities:
330 if 'lookup' not in src_repo.capabilities:
331 raise util.Abort(_("src repository does not support "
331 raise util.Abort(_("src repository does not support "
332 "revision lookup and so doesn't "
332 "revision lookup and so doesn't "
333 "support clone by revision"))
333 "support clone by revision"))
334 revs = [src_repo.lookup(r) for r in rev]
334 revs = [src_repo.lookup(r) for r in rev]
335 checkout = revs[0]
335 checkout = revs[0]
336 if dest_repo.local():
336 if dest_repo.local():
337 dest_repo.clone(src_repo, heads=revs, stream=stream)
337 dest_repo.clone(src_repo, heads=revs, stream=stream)
338 elif src_repo.local():
338 elif src_repo.local():
339 src_repo.push(dest_repo, revs=revs)
339 src_repo.push(dest_repo, revs=revs)
340 else:
340 else:
341 raise util.Abort(_("clone from remote to remote not supported"))
341 raise util.Abort(_("clone from remote to remote not supported"))
342
342
343 if dir_cleanup:
343 if dir_cleanup:
344 dir_cleanup.close()
344 dir_cleanup.close()
345
345
346 if dest_repo.local():
346 if dest_repo.local():
347 fp = dest_repo.opener("hgrc", "w", text=True)
347 fp = dest_repo.opener("hgrc", "w", text=True)
348 fp.write("[paths]\n")
348 fp.write("[paths]\n")
349 fp.write("default = %s\n" % abspath)
349 fp.write("default = %s\n" % abspath)
350 fp.close()
350 fp.close()
351
351
352 dest_repo.ui.setconfig('paths', 'default', abspath)
352 dest_repo.ui.setconfig('paths', 'default', abspath)
353
353
354 if update:
354 if update:
355 if update is not True:
355 if update is not True:
356 checkout = update
356 checkout = update
357 if src_repo.local():
357 if src_repo.local():
358 checkout = src_repo.lookup(update)
358 checkout = src_repo.lookup(update)
359 for test in (checkout, 'default', 'tip'):
359 for test in (checkout, 'default', 'tip'):
360 if test is None:
360 if test is None:
361 continue
361 continue
362 try:
362 try:
363 uprev = dest_repo.lookup(test)
363 uprev = dest_repo.lookup(test)
364 break
364 break
365 except error.RepoLookupError:
365 except error.RepoLookupError:
366 continue
366 continue
367 bn = dest_repo[uprev].branch()
367 bn = dest_repo[uprev].branch()
368 dest_repo.ui.status(_("updating to branch %s\n")
368 dest_repo.ui.status(_("updating to branch %s\n")
369 % encoding.tolocal(bn))
369 % encoding.tolocal(bn))
370 _update(dest_repo, uprev)
370 _update(dest_repo, uprev)
371
371
372 return src_repo, dest_repo
372 return src_repo, dest_repo
373 finally:
373 finally:
374 release(src_lock, dest_lock)
374 release(src_lock, dest_lock)
375 if dir_cleanup is not None:
375 if dir_cleanup is not None:
376 dir_cleanup.cleanup()
376 dir_cleanup.cleanup()
377
377
378 def _showstats(repo, stats):
378 def _showstats(repo, stats):
379 repo.ui.status(_("%d files updated, %d files merged, "
379 repo.ui.status(_("%d files updated, %d files merged, "
380 "%d files removed, %d files unresolved\n") % stats)
380 "%d files removed, %d files unresolved\n") % stats)
381
381
382 def update(repo, node):
382 def update(repo, node):
383 """update the working directory to node, merging linear changes"""
383 """update the working directory to node, merging linear changes"""
384 stats = mergemod.update(repo, node, False, False, None)
384 stats = mergemod.update(repo, node, False, False, None)
385 _showstats(repo, stats)
385 _showstats(repo, stats)
386 if stats[3]:
386 if stats[3]:
387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
388 return stats[3] > 0
388 return stats[3] > 0
389
389
390 # naming conflict in clone()
390 # naming conflict in clone()
391 _update = update
391 _update = update
392
392
393 def clean(repo, node, show_stats=True):
393 def clean(repo, node, show_stats=True):
394 """forcibly switch the working directory to node, clobbering changes"""
394 """forcibly switch the working directory to node, clobbering changes"""
395 stats = mergemod.update(repo, node, False, True, None)
395 stats = mergemod.update(repo, node, False, True, None)
396 if show_stats:
396 if show_stats:
397 _showstats(repo, stats)
397 _showstats(repo, stats)
398 return stats[3] > 0
398 return stats[3] > 0
399
399
400 def merge(repo, node, force=None, remind=True):
400 def merge(repo, node, force=None, remind=True):
401 """branch merge with node, resolving changes"""
401 """branch merge with node, resolving changes"""
402 stats = mergemod.update(repo, node, True, force, False)
402 stats = mergemod.update(repo, node, True, force, False)
403 _showstats(repo, stats)
403 _showstats(repo, stats)
404 if stats[3]:
404 if stats[3]:
405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
406 "or 'hg update -C .' to abandon\n"))
406 "or 'hg update -C .' to abandon\n"))
407 elif remind:
407 elif remind:
408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
409 return stats[3] > 0
409 return stats[3] > 0
410
410
411 def incoming(ui, repo, source, opts):
411 def incoming(ui, repo, source, opts):
412 def recurse():
412 def recurse():
413 ret = 1
413 ret = 1
414 if opts.get('subrepos'):
414 if opts.get('subrepos'):
415 ctx = repo[None]
415 ctx = repo[None]
416 for subpath in sorted(ctx.substate):
416 for subpath in sorted(ctx.substate):
417 sub = ctx.sub(subpath)
417 sub = ctx.sub(subpath)
418 ret = min(ret, sub.incoming(ui, source, opts))
418 ret = min(ret, sub.incoming(ui, source, opts))
419 return ret
419 return ret
420
420
421 limit = cmdutil.loglimit(opts)
421 limit = cmdutil.loglimit(opts)
422 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
422 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
423 other = repository(remoteui(repo, opts), source)
423 other = repository(remoteui(repo, opts), source)
424 ui.status(_('comparing with %s\n') % url.hidepassword(source))
424 ui.status(_('comparing with %s\n') % url.hidepassword(source))
425 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
425 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
426 if revs:
426 if revs:
427 revs = [other.lookup(rev) for rev in revs]
427 revs = [other.lookup(rev) for rev in revs]
428
428
429 tmp = discovery.findcommonincoming(repo, other, heads=revs,
429 tmp = discovery.findcommonincoming(repo, other, heads=revs,
430 force=opts.get('force'))
430 force=opts.get('force'))
431 common, incoming, rheads = tmp
431 common, incoming, rheads = tmp
432 if not incoming:
432 if not incoming:
433 try:
433 try:
434 os.unlink(opts["bundle"])
434 os.unlink(opts["bundle"])
435 except:
435 except:
436 pass
436 pass
437 ui.status(_("no changes found\n"))
437 ui.status(_("no changes found\n"))
438 return recurse()
438 return recurse()
439
439
440 cleanup = None
440 cleanup = None
441 try:
441 try:
442 fname = opts["bundle"]
442 fname = opts["bundle"]
443 if fname or not other.local():
443 if fname or not other.local():
444 # create a bundle (uncompressed if other repo is not local)
444 # create a bundle (uncompressed if other repo is not local)
445
445
446 if revs is None and other.capable('changegroupsubset'):
446 if revs is None and other.capable('changegroupsubset'):
447 revs = rheads
447 revs = rheads
448
448
449 if revs is None:
449 if revs is None:
450 cg = other.changegroup(incoming, "incoming")
450 cg = other.changegroup(incoming, "incoming")
451 else:
451 else:
452 cg = other.changegroupsubset(incoming, revs, 'incoming')
452 cg = other.changegroupsubset(incoming, revs, 'incoming')
453 bundletype = other.local() and "HG10BZ" or "HG10UN"
453 bundletype = other.local() and "HG10BZ" or "HG10UN"
454 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
454 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
455 # keep written bundle?
455 # keep written bundle?
456 if opts["bundle"]:
456 if opts["bundle"]:
457 cleanup = None
457 cleanup = None
458 if not other.local():
458 if not other.local():
459 # use the created uncompressed bundlerepo
459 # use the created uncompressed bundlerepo
460 other = bundlerepo.bundlerepository(ui, repo.root, fname)
460 other = bundlerepo.bundlerepository(ui, repo.root, fname)
461
461
462 o = other.changelog.nodesbetween(incoming, revs)[0]
462 chlist = other.changelog.nodesbetween(incoming, revs)[0]
463 if opts.get('newest_first'):
463 if opts.get('newest_first'):
464 o.reverse()
464 chlist.reverse()
465 displayer = cmdutil.show_changeset(ui, other, opts)
465 displayer = cmdutil.show_changeset(ui, other, opts)
466 count = 0
466 count = 0
467 for n in o:
467 for n in chlist:
468 if limit is not None and count >= limit:
468 if limit is not None and count >= limit:
469 break
469 break
470 parents = [p for p in other.changelog.parents(n) if p != nullid]
470 parents = [p for p in other.changelog.parents(n) if p != nullid]
471 if opts.get('no_merges') and len(parents) == 2:
471 if opts.get('no_merges') and len(parents) == 2:
472 continue
472 continue
473 count += 1
473 count += 1
474 displayer.show(other[n])
474 displayer.show(other[n])
475 displayer.close()
475 displayer.close()
476 finally:
476 finally:
477 if hasattr(other, 'close'):
477 if hasattr(other, 'close'):
478 other.close()
478 other.close()
479 if cleanup:
479 if cleanup:
480 os.unlink(cleanup)
480 os.unlink(cleanup)
481 recurse()
481 recurse()
482 return 0 # exit code is zero since we found incoming changes
482 return 0 # exit code is zero since we found incoming changes
483
483
484 def outgoing(ui, repo, dest, opts):
484 def outgoing(ui, repo, dest, opts):
485 def recurse():
485 def recurse():
486 ret = 1
486 ret = 1
487 if opts.get('subrepos'):
487 if opts.get('subrepos'):
488 ctx = repo[None]
488 ctx = repo[None]
489 for subpath in sorted(ctx.substate):
489 for subpath in sorted(ctx.substate):
490 sub = ctx.sub(subpath)
490 sub = ctx.sub(subpath)
491 ret = min(ret, sub.outgoing(ui, dest, opts))
491 ret = min(ret, sub.outgoing(ui, dest, opts))
492 return ret
492 return ret
493
493
494 limit = cmdutil.loglimit(opts)
494 limit = cmdutil.loglimit(opts)
495 dest = ui.expandpath(dest or 'default-push', dest or 'default')
495 dest = ui.expandpath(dest or 'default-push', dest or 'default')
496 dest, branches = parseurl(dest, opts.get('branch'))
496 dest, branches = parseurl(dest, opts.get('branch'))
497 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
497 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
498 if revs:
498 if revs:
499 revs = [repo.lookup(rev) for rev in revs]
499 revs = [repo.lookup(rev) for rev in revs]
500
500
501 other = repository(remoteui(repo, opts), dest)
501 other = repository(remoteui(repo, opts), dest)
502 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
502 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
503 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
503 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
504 if not o:
504 if not o:
505 ui.status(_("no changes found\n"))
505 ui.status(_("no changes found\n"))
506 return recurse()
506 return recurse()
507
507
508 o = repo.changelog.nodesbetween(o, revs)[0]
508 o = repo.changelog.nodesbetween(o, revs)[0]
509 if opts.get('newest_first'):
509 if opts.get('newest_first'):
510 o.reverse()
510 o.reverse()
511 displayer = cmdutil.show_changeset(ui, repo, opts)
511 displayer = cmdutil.show_changeset(ui, repo, opts)
512 count = 0
512 count = 0
513 for n in o:
513 for n in o:
514 if limit is not None and count >= limit:
514 if limit is not None and count >= limit:
515 break
515 break
516 parents = [p for p in repo.changelog.parents(n) if p != nullid]
516 parents = [p for p in repo.changelog.parents(n) if p != nullid]
517 if opts.get('no_merges') and len(parents) == 2:
517 if opts.get('no_merges') and len(parents) == 2:
518 continue
518 continue
519 count += 1
519 count += 1
520 displayer.show(repo[n])
520 displayer.show(repo[n])
521 displayer.close()
521 displayer.close()
522 recurse()
522 recurse()
523 return 0 # exit code is zero since we found outgoing changes
523 return 0 # exit code is zero since we found outgoing changes
524
524
525 def revert(repo, node, choose):
525 def revert(repo, node, choose):
526 """revert changes to revision in node without updating dirstate"""
526 """revert changes to revision in node without updating dirstate"""
527 return mergemod.update(repo, node, False, True, choose)[3] > 0
527 return mergemod.update(repo, node, False, True, choose)[3] > 0
528
528
529 def verify(repo):
529 def verify(repo):
530 """verify the consistency of a repository"""
530 """verify the consistency of a repository"""
531 return verifymod.verify(repo)
531 return verifymod.verify(repo)
532
532
533 def remoteui(src, opts):
533 def remoteui(src, opts):
534 'build a remote ui from ui or repo and opts'
534 'build a remote ui from ui or repo and opts'
535 if hasattr(src, 'baseui'): # looks like a repository
535 if hasattr(src, 'baseui'): # looks like a repository
536 dst = src.baseui.copy() # drop repo-specific config
536 dst = src.baseui.copy() # drop repo-specific config
537 src = src.ui # copy target options from repo
537 src = src.ui # copy target options from repo
538 else: # assume it's a global ui object
538 else: # assume it's a global ui object
539 dst = src.copy() # keep all global options
539 dst = src.copy() # keep all global options
540
540
541 # copy ssh-specific options
541 # copy ssh-specific options
542 for o in 'ssh', 'remotecmd':
542 for o in 'ssh', 'remotecmd':
543 v = opts.get(o) or src.config('ui', o)
543 v = opts.get(o) or src.config('ui', o)
544 if v:
544 if v:
545 dst.setconfig("ui", o, v)
545 dst.setconfig("ui", o, v)
546
546
547 # copy bundle-specific options
547 # copy bundle-specific options
548 r = src.config('bundle', 'mainreporoot')
548 r = src.config('bundle', 'mainreporoot')
549 if r:
549 if r:
550 dst.setconfig('bundle', 'mainreporoot', r)
550 dst.setconfig('bundle', 'mainreporoot', r)
551
551
552 # copy auth and http_proxy section settings
552 # copy auth and http_proxy section settings
553 for sect in ('auth', 'http_proxy'):
553 for sect in ('auth', 'http_proxy'):
554 for key, val in src.configitems(sect):
554 for key, val in src.configitems(sect):
555 dst.setconfig(sect, key, val)
555 dst.setconfig(sect, key, val)
556
556
557 return dst
557 return dst
General Comments 0
You need to be logged in to leave comments. Login now