##// END OF EJS Templates
clone: make sure to use "@" as bookmark and "default" as branch (issue3677)...
Thomas Arendsen Hein -
r17867:c9339efe stable
parent child Browse files
Show More
@@ -1,610 +1,616
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
13 import lock, util, extensions, error, node, scmutil, phases
13 import lock, util, extensions, error, node, scmutil, phases
14 import cmdutil, discovery
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.urllocalpath(path))
20 path = util.expandpath(util.urllocalpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, other, branches, revs):
23 def addbranchrevs(lrepo, other, branches, revs):
24 peer = other.peer() # a courtesy to callers using a localrepo for other
24 peer = other.peer() # a courtesy to callers using a localrepo for other
25 hashbranch, branches = branches
25 hashbranch, branches = branches
26 if not hashbranch and not branches:
26 if not hashbranch and not branches:
27 return revs or None, revs and revs[0] or None
27 return revs or None, revs and revs[0] or None
28 revs = revs and list(revs) or []
28 revs = revs and list(revs) or []
29 if not peer.capable('branchmap'):
29 if not peer.capable('branchmap'):
30 if branches:
30 if branches:
31 raise util.Abort(_("remote branch lookup not supported"))
31 raise util.Abort(_("remote branch lookup not supported"))
32 revs.append(hashbranch)
32 revs.append(hashbranch)
33 return revs, revs[0]
33 return revs, revs[0]
34 branchmap = peer.branchmap()
34 branchmap = peer.branchmap()
35
35
36 def primary(branch):
36 def primary(branch):
37 if branch == '.':
37 if branch == '.':
38 if not lrepo:
38 if not lrepo:
39 raise util.Abort(_("dirstate branch not accessible"))
39 raise util.Abort(_("dirstate branch not accessible"))
40 branch = lrepo.dirstate.branch()
40 branch = lrepo.dirstate.branch()
41 if branch in branchmap:
41 if branch in branchmap:
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
43 return True
43 return True
44 else:
44 else:
45 return False
45 return False
46
46
47 for branch in branches:
47 for branch in branches:
48 if not primary(branch):
48 if not primary(branch):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 if not primary(hashbranch):
51 if not primary(hashbranch):
52 revs.append(hashbranch)
52 revs.append(hashbranch)
53 return revs, revs[0]
53 return revs, revs[0]
54
54
55 def parseurl(path, branches=None):
55 def parseurl(path, branches=None):
56 '''parse url#branch, returning (url, (branch, branches))'''
56 '''parse url#branch, returning (url, (branch, branches))'''
57
57
58 u = util.url(path)
58 u = util.url(path)
59 branch = None
59 branch = None
60 if u.fragment:
60 if u.fragment:
61 branch = u.fragment
61 branch = u.fragment
62 u.fragment = None
62 u.fragment = None
63 return str(u), (branch, branches or [])
63 return str(u), (branch, branches or [])
64
64
65 schemes = {
65 schemes = {
66 'bundle': bundlerepo,
66 'bundle': bundlerepo,
67 'file': _local,
67 'file': _local,
68 'http': httppeer,
68 'http': httppeer,
69 'https': httppeer,
69 'https': httppeer,
70 'ssh': sshpeer,
70 'ssh': sshpeer,
71 'static-http': statichttprepo,
71 'static-http': statichttprepo,
72 }
72 }
73
73
74 def _peerlookup(path):
74 def _peerlookup(path):
75 u = util.url(path)
75 u = util.url(path)
76 scheme = u.scheme or 'file'
76 scheme = u.scheme or 'file'
77 thing = schemes.get(scheme) or schemes['file']
77 thing = schemes.get(scheme) or schemes['file']
78 try:
78 try:
79 return thing(path)
79 return thing(path)
80 except TypeError:
80 except TypeError:
81 return thing
81 return thing
82
82
83 def islocal(repo):
83 def islocal(repo):
84 '''return true if repo or path is local'''
84 '''return true if repo or path is local'''
85 if isinstance(repo, str):
85 if isinstance(repo, str):
86 try:
86 try:
87 return _peerlookup(repo).islocal(repo)
87 return _peerlookup(repo).islocal(repo)
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90 return repo.local()
90 return repo.local()
91
91
92 def _peerorrepo(ui, path, create=False):
92 def _peerorrepo(ui, path, create=False):
93 """return a repository object for the specified path"""
93 """return a repository object for the specified path"""
94 obj = _peerlookup(path).instance(ui, path, create)
94 obj = _peerlookup(path).instance(ui, path, create)
95 ui = getattr(obj, "ui", ui)
95 ui = getattr(obj, "ui", ui)
96 for name, module in extensions.extensions():
96 for name, module in extensions.extensions():
97 hook = getattr(module, 'reposetup', None)
97 hook = getattr(module, 'reposetup', None)
98 if hook:
98 if hook:
99 hook(ui, obj)
99 hook(ui, obj)
100 return obj
100 return obj
101
101
102 def repository(ui, path='', create=False):
102 def repository(ui, path='', create=False):
103 """return a repository object for the specified path"""
103 """return a repository object for the specified path"""
104 peer = _peerorrepo(ui, path, create)
104 peer = _peerorrepo(ui, path, create)
105 repo = peer.local()
105 repo = peer.local()
106 if not repo:
106 if not repo:
107 raise util.Abort(_("repository '%s' is not local") %
107 raise util.Abort(_("repository '%s' is not local") %
108 (path or peer.url()))
108 (path or peer.url()))
109 return repo
109 return repo
110
110
111 def peer(uiorrepo, opts, path, create=False):
111 def peer(uiorrepo, opts, path, create=False):
112 '''return a repository peer for the specified path'''
112 '''return a repository peer for the specified path'''
113 rui = remoteui(uiorrepo, opts)
113 rui = remoteui(uiorrepo, opts)
114 return _peerorrepo(rui, path, create).peer()
114 return _peerorrepo(rui, path, create).peer()
115
115
116 def defaultdest(source):
116 def defaultdest(source):
117 '''return default destination of clone if none is given'''
117 '''return default destination of clone if none is given'''
118 return os.path.basename(os.path.normpath(util.url(source).path))
118 return os.path.basename(os.path.normpath(util.url(source).path))
119
119
120 def share(ui, source, dest=None, update=True):
120 def share(ui, source, dest=None, update=True):
121 '''create a shared repository'''
121 '''create a shared repository'''
122
122
123 if not islocal(source):
123 if not islocal(source):
124 raise util.Abort(_('can only share local repositories'))
124 raise util.Abort(_('can only share local repositories'))
125
125
126 if not dest:
126 if not dest:
127 dest = defaultdest(source)
127 dest = defaultdest(source)
128 else:
128 else:
129 dest = ui.expandpath(dest)
129 dest = ui.expandpath(dest)
130
130
131 if isinstance(source, str):
131 if isinstance(source, str):
132 origsource = ui.expandpath(source)
132 origsource = ui.expandpath(source)
133 source, branches = parseurl(origsource)
133 source, branches = parseurl(origsource)
134 srcrepo = repository(ui, source)
134 srcrepo = repository(ui, source)
135 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
135 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
136 else:
136 else:
137 srcrepo = source.local()
137 srcrepo = source.local()
138 origsource = source = srcrepo.url()
138 origsource = source = srcrepo.url()
139 checkout = None
139 checkout = None
140
140
141 sharedpath = srcrepo.sharedpath # if our source is already sharing
141 sharedpath = srcrepo.sharedpath # if our source is already sharing
142
142
143 root = os.path.realpath(dest)
143 root = os.path.realpath(dest)
144 roothg = os.path.join(root, '.hg')
144 roothg = os.path.join(root, '.hg')
145
145
146 if os.path.exists(roothg):
146 if os.path.exists(roothg):
147 raise util.Abort(_('destination already exists'))
147 raise util.Abort(_('destination already exists'))
148
148
149 if not os.path.isdir(root):
149 if not os.path.isdir(root):
150 os.mkdir(root)
150 os.mkdir(root)
151 util.makedir(roothg, notindexed=True)
151 util.makedir(roothg, notindexed=True)
152
152
153 requirements = ''
153 requirements = ''
154 try:
154 try:
155 requirements = srcrepo.opener.read('requires')
155 requirements = srcrepo.opener.read('requires')
156 except IOError, inst:
156 except IOError, inst:
157 if inst.errno != errno.ENOENT:
157 if inst.errno != errno.ENOENT:
158 raise
158 raise
159
159
160 requirements += 'shared\n'
160 requirements += 'shared\n'
161 util.writefile(os.path.join(roothg, 'requires'), requirements)
161 util.writefile(os.path.join(roothg, 'requires'), requirements)
162 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
162 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
163
163
164 r = repository(ui, root)
164 r = repository(ui, root)
165
165
166 default = srcrepo.ui.config('paths', 'default')
166 default = srcrepo.ui.config('paths', 'default')
167 if default:
167 if default:
168 fp = r.opener("hgrc", "w", text=True)
168 fp = r.opener("hgrc", "w", text=True)
169 fp.write("[paths]\n")
169 fp.write("[paths]\n")
170 fp.write("default = %s\n" % default)
170 fp.write("default = %s\n" % default)
171 fp.close()
171 fp.close()
172
172
173 if update:
173 if update:
174 r.ui.status(_("updating working directory\n"))
174 r.ui.status(_("updating working directory\n"))
175 if update is not True:
175 if update is not True:
176 checkout = update
176 checkout = update
177 for test in (checkout, 'default', 'tip'):
177 for test in (checkout, 'default', 'tip'):
178 if test is None:
178 if test is None:
179 continue
179 continue
180 try:
180 try:
181 uprev = r.lookup(test)
181 uprev = r.lookup(test)
182 break
182 break
183 except error.RepoLookupError:
183 except error.RepoLookupError:
184 continue
184 continue
185 _update(r, uprev)
185 _update(r, uprev)
186
186
187 def copystore(ui, srcrepo, destpath):
187 def copystore(ui, srcrepo, destpath):
188 '''copy files from store of srcrepo in destpath
188 '''copy files from store of srcrepo in destpath
189
189
190 returns destlock
190 returns destlock
191 '''
191 '''
192 destlock = None
192 destlock = None
193 try:
193 try:
194 hardlink = None
194 hardlink = None
195 num = 0
195 num = 0
196 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
196 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
197 for f in srcrepo.store.copylist():
197 for f in srcrepo.store.copylist():
198 if srcpublishing and f.endswith('phaseroots'):
198 if srcpublishing and f.endswith('phaseroots'):
199 continue
199 continue
200 src = os.path.join(srcrepo.sharedpath, f)
200 src = os.path.join(srcrepo.sharedpath, f)
201 dst = os.path.join(destpath, f)
201 dst = os.path.join(destpath, f)
202 dstbase = os.path.dirname(dst)
202 dstbase = os.path.dirname(dst)
203 if dstbase and not os.path.exists(dstbase):
203 if dstbase and not os.path.exists(dstbase):
204 os.mkdir(dstbase)
204 os.mkdir(dstbase)
205 if os.path.exists(src):
205 if os.path.exists(src):
206 if dst.endswith('data'):
206 if dst.endswith('data'):
207 # lock to avoid premature writing to the target
207 # lock to avoid premature writing to the target
208 destlock = lock.lock(os.path.join(dstbase, "lock"))
208 destlock = lock.lock(os.path.join(dstbase, "lock"))
209 hardlink, n = util.copyfiles(src, dst, hardlink)
209 hardlink, n = util.copyfiles(src, dst, hardlink)
210 num += n
210 num += n
211 if hardlink:
211 if hardlink:
212 ui.debug("linked %d files\n" % num)
212 ui.debug("linked %d files\n" % num)
213 else:
213 else:
214 ui.debug("copied %d files\n" % num)
214 ui.debug("copied %d files\n" % num)
215 return destlock
215 return destlock
216 except: # re-raises
216 except: # re-raises
217 release(destlock)
217 release(destlock)
218 raise
218 raise
219
219
220 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
220 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
221 update=True, stream=False, branch=None):
221 update=True, stream=False, branch=None):
222 """Make a copy of an existing repository.
222 """Make a copy of an existing repository.
223
223
224 Create a copy of an existing repository in a new directory. The
224 Create a copy of an existing repository in a new directory. The
225 source and destination are URLs, as passed to the repository
225 source and destination are URLs, as passed to the repository
226 function. Returns a pair of repository peers, the source and
226 function. Returns a pair of repository peers, the source and
227 newly created destination.
227 newly created destination.
228
228
229 The location of the source is added to the new repository's
229 The location of the source is added to the new repository's
230 .hg/hgrc file, as the default to be used for future pulls and
230 .hg/hgrc file, as the default to be used for future pulls and
231 pushes.
231 pushes.
232
232
233 If an exception is raised, the partly cloned/updated destination
233 If an exception is raised, the partly cloned/updated destination
234 repository will be deleted.
234 repository will be deleted.
235
235
236 Arguments:
236 Arguments:
237
237
238 source: repository object or URL
238 source: repository object or URL
239
239
240 dest: URL of destination repository to create (defaults to base
240 dest: URL of destination repository to create (defaults to base
241 name of source repository)
241 name of source repository)
242
242
243 pull: always pull from source repository, even in local case
243 pull: always pull from source repository, even in local case
244
244
245 stream: stream raw data uncompressed from repository (fast over
245 stream: stream raw data uncompressed from repository (fast over
246 LAN, slow over WAN)
246 LAN, slow over WAN)
247
247
248 rev: revision to clone up to (implies pull=True)
248 rev: revision to clone up to (implies pull=True)
249
249
250 update: update working directory after clone completes, if
250 update: update working directory after clone completes, if
251 destination is local repository (True means update to default rev,
251 destination is local repository (True means update to default rev,
252 anything else is treated as a revision)
252 anything else is treated as a revision)
253
253
254 branch: branches to clone
254 branch: branches to clone
255 """
255 """
256
256
257 if isinstance(source, str):
257 if isinstance(source, str):
258 origsource = ui.expandpath(source)
258 origsource = ui.expandpath(source)
259 source, branch = parseurl(origsource, branch)
259 source, branch = parseurl(origsource, branch)
260 srcpeer = peer(ui, peeropts, source)
260 srcpeer = peer(ui, peeropts, source)
261 else:
261 else:
262 srcpeer = source.peer() # in case we were called with a localrepo
262 srcpeer = source.peer() # in case we were called with a localrepo
263 branch = (None, branch or [])
263 branch = (None, branch or [])
264 origsource = source = srcpeer.url()
264 origsource = source = srcpeer.url()
265 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
265 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
266
266
267 if dest is None:
267 if dest is None:
268 dest = defaultdest(source)
268 dest = defaultdest(source)
269 ui.status(_("destination directory: %s\n") % dest)
269 ui.status(_("destination directory: %s\n") % dest)
270 else:
270 else:
271 dest = ui.expandpath(dest)
271 dest = ui.expandpath(dest)
272
272
273 dest = util.urllocalpath(dest)
273 dest = util.urllocalpath(dest)
274 source = util.urllocalpath(source)
274 source = util.urllocalpath(source)
275
275
276 if not dest:
276 if not dest:
277 raise util.Abort(_("empty destination path is not valid"))
277 raise util.Abort(_("empty destination path is not valid"))
278 if os.path.exists(dest):
278 if os.path.exists(dest):
279 if not os.path.isdir(dest):
279 if not os.path.isdir(dest):
280 raise util.Abort(_("destination '%s' already exists") % dest)
280 raise util.Abort(_("destination '%s' already exists") % dest)
281 elif os.listdir(dest):
281 elif os.listdir(dest):
282 raise util.Abort(_("destination '%s' is not empty") % dest)
282 raise util.Abort(_("destination '%s' is not empty") % dest)
283
283
284 class DirCleanup(object):
284 class DirCleanup(object):
285 def __init__(self, dir_):
285 def __init__(self, dir_):
286 self.rmtree = shutil.rmtree
286 self.rmtree = shutil.rmtree
287 self.dir_ = dir_
287 self.dir_ = dir_
288 def close(self):
288 def close(self):
289 self.dir_ = None
289 self.dir_ = None
290 def cleanup(self):
290 def cleanup(self):
291 if self.dir_:
291 if self.dir_:
292 self.rmtree(self.dir_, True)
292 self.rmtree(self.dir_, True)
293
293
294 srclock = destlock = dircleanup = None
294 srclock = destlock = dircleanup = None
295 srcrepo = srcpeer.local()
295 srcrepo = srcpeer.local()
296 try:
296 try:
297 abspath = origsource
297 abspath = origsource
298 if islocal(origsource):
298 if islocal(origsource):
299 abspath = os.path.abspath(util.urllocalpath(origsource))
299 abspath = os.path.abspath(util.urllocalpath(origsource))
300
300
301 if islocal(dest):
301 if islocal(dest):
302 dircleanup = DirCleanup(dest)
302 dircleanup = DirCleanup(dest)
303
303
304 copy = False
304 copy = False
305 if (srcrepo and srcrepo.cancopy() and islocal(dest)
305 if (srcrepo and srcrepo.cancopy() and islocal(dest)
306 and not phases.hassecret(srcrepo)):
306 and not phases.hassecret(srcrepo)):
307 copy = not pull and not rev
307 copy = not pull and not rev
308
308
309 if copy:
309 if copy:
310 try:
310 try:
311 # we use a lock here because if we race with commit, we
311 # we use a lock here because if we race with commit, we
312 # can end up with extra data in the cloned revlogs that's
312 # can end up with extra data in the cloned revlogs that's
313 # not pointed to by changesets, thus causing verify to
313 # not pointed to by changesets, thus causing verify to
314 # fail
314 # fail
315 srclock = srcrepo.lock(wait=False)
315 srclock = srcrepo.lock(wait=False)
316 except error.LockError:
316 except error.LockError:
317 copy = False
317 copy = False
318
318
319 if copy:
319 if copy:
320 srcrepo.hook('preoutgoing', throw=True, source='clone')
320 srcrepo.hook('preoutgoing', throw=True, source='clone')
321 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
321 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
322 if not os.path.exists(dest):
322 if not os.path.exists(dest):
323 os.mkdir(dest)
323 os.mkdir(dest)
324 else:
324 else:
325 # only clean up directories we create ourselves
325 # only clean up directories we create ourselves
326 dircleanup.dir_ = hgdir
326 dircleanup.dir_ = hgdir
327 try:
327 try:
328 destpath = hgdir
328 destpath = hgdir
329 util.makedir(destpath, notindexed=True)
329 util.makedir(destpath, notindexed=True)
330 except OSError, inst:
330 except OSError, inst:
331 if inst.errno == errno.EEXIST:
331 if inst.errno == errno.EEXIST:
332 dircleanup.close()
332 dircleanup.close()
333 raise util.Abort(_("destination '%s' already exists")
333 raise util.Abort(_("destination '%s' already exists")
334 % dest)
334 % dest)
335 raise
335 raise
336
336
337 destlock = copystore(ui, srcrepo, destpath)
337 destlock = copystore(ui, srcrepo, destpath)
338
338
339 # Recomputing branch cache might be slow on big repos,
339 # Recomputing branch cache might be slow on big repos,
340 # so just copy it
340 # so just copy it
341 dstcachedir = os.path.join(destpath, 'cache')
341 dstcachedir = os.path.join(destpath, 'cache')
342 srcbranchcache = srcrepo.sjoin('cache/branchheads')
342 srcbranchcache = srcrepo.sjoin('cache/branchheads')
343 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
343 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
344 if os.path.exists(srcbranchcache):
344 if os.path.exists(srcbranchcache):
345 if not os.path.exists(dstcachedir):
345 if not os.path.exists(dstcachedir):
346 os.mkdir(dstcachedir)
346 os.mkdir(dstcachedir)
347 util.copyfile(srcbranchcache, dstbranchcache)
347 util.copyfile(srcbranchcache, dstbranchcache)
348
348
349 # we need to re-init the repo after manually copying the data
349 # we need to re-init the repo after manually copying the data
350 # into it
350 # into it
351 destpeer = peer(ui, peeropts, dest)
351 destpeer = peer(ui, peeropts, dest)
352 srcrepo.hook('outgoing', source='clone',
352 srcrepo.hook('outgoing', source='clone',
353 node=node.hex(node.nullid))
353 node=node.hex(node.nullid))
354 else:
354 else:
355 try:
355 try:
356 destpeer = peer(ui, peeropts, dest, create=True)
356 destpeer = peer(ui, peeropts, dest, create=True)
357 except OSError, inst:
357 except OSError, inst:
358 if inst.errno == errno.EEXIST:
358 if inst.errno == errno.EEXIST:
359 dircleanup.close()
359 dircleanup.close()
360 raise util.Abort(_("destination '%s' already exists")
360 raise util.Abort(_("destination '%s' already exists")
361 % dest)
361 % dest)
362 raise
362 raise
363
363
364 revs = None
364 revs = None
365 if rev:
365 if rev:
366 if not srcpeer.capable('lookup'):
366 if not srcpeer.capable('lookup'):
367 raise util.Abort(_("src repository does not support "
367 raise util.Abort(_("src repository does not support "
368 "revision lookup and so doesn't "
368 "revision lookup and so doesn't "
369 "support clone by revision"))
369 "support clone by revision"))
370 revs = [srcpeer.lookup(r) for r in rev]
370 revs = [srcpeer.lookup(r) for r in rev]
371 checkout = revs[0]
371 checkout = revs[0]
372 if destpeer.local():
372 if destpeer.local():
373 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
373 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
374 elif srcrepo:
374 elif srcrepo:
375 srcrepo.push(destpeer, revs=revs)
375 srcrepo.push(destpeer, revs=revs)
376 else:
376 else:
377 raise util.Abort(_("clone from remote to remote not supported"))
377 raise util.Abort(_("clone from remote to remote not supported"))
378
378
379 if dircleanup:
379 if dircleanup:
380 dircleanup.close()
380 dircleanup.close()
381
381
382 # clone all bookmarks except divergent ones
382 # clone all bookmarks except divergent ones
383 destrepo = destpeer.local()
383 destrepo = destpeer.local()
384 if destrepo and srcpeer.capable("pushkey"):
384 if destrepo and srcpeer.capable("pushkey"):
385 rb = srcpeer.listkeys('bookmarks')
385 rb = srcpeer.listkeys('bookmarks')
386 for k, n in rb.iteritems():
386 for k, n in rb.iteritems():
387 try:
387 try:
388 m = destrepo.lookup(n)
388 m = destrepo.lookup(n)
389 destrepo._bookmarks[k] = m
389 destrepo._bookmarks[k] = m
390 except error.RepoLookupError:
390 except error.RepoLookupError:
391 pass
391 pass
392 if rb:
392 if rb:
393 bookmarks.write(destrepo)
393 bookmarks.write(destrepo)
394 elif srcrepo and destpeer.capable("pushkey"):
394 elif srcrepo and destpeer.capable("pushkey"):
395 for k, n in srcrepo._bookmarks.iteritems():
395 for k, n in srcrepo._bookmarks.iteritems():
396 destpeer.pushkey('bookmarks', k, '', hex(n))
396 destpeer.pushkey('bookmarks', k, '', hex(n))
397
397
398 if destrepo:
398 if destrepo:
399 fp = destrepo.opener("hgrc", "w", text=True)
399 fp = destrepo.opener("hgrc", "w", text=True)
400 fp.write("[paths]\n")
400 fp.write("[paths]\n")
401 u = util.url(abspath)
401 u = util.url(abspath)
402 u.passwd = None
402 u.passwd = None
403 defaulturl = str(u)
403 defaulturl = str(u)
404 fp.write("default = %s\n" % defaulturl)
404 fp.write("default = %s\n" % defaulturl)
405 fp.close()
405 fp.close()
406
406
407 destrepo.ui.setconfig('paths', 'default', defaulturl)
407 destrepo.ui.setconfig('paths', 'default', defaulturl)
408
408
409 if update:
409 if update:
410 if update is not True:
410 if update is not True:
411 checkout = srcpeer.lookup(update)
411 checkout = srcpeer.lookup(update)
412 for test in (checkout, '@', 'default', 'tip'):
412 uprev = None
413 if test is None:
413 if checkout is not None:
414 continue
415 try:
414 try:
416 uprev = destrepo.lookup(test)
415 uprev = destrepo.lookup(checkout)
417 break
418 except error.RepoLookupError:
416 except error.RepoLookupError:
419 continue
417 pass
418 if uprev is None:
419 try:
420 uprev = destrepo._bookmarks['@']
421 except KeyError:
422 try:
423 uprev = destrepo.branchtip('default')
424 except error.RepoLookupError:
425 uprev = destrepo.lookup('tip')
420 bn = destrepo[uprev].branch()
426 bn = destrepo[uprev].branch()
421 destrepo.ui.status(_("updating to branch %s\n") % bn)
427 destrepo.ui.status(_("updating to branch %s\n") % bn)
422 _update(destrepo, uprev)
428 _update(destrepo, uprev)
423 if update in destrepo._bookmarks:
429 if update in destrepo._bookmarks:
424 bookmarks.setcurrent(destrepo, update)
430 bookmarks.setcurrent(destrepo, update)
425
431
426 return srcpeer, destpeer
432 return srcpeer, destpeer
427 finally:
433 finally:
428 release(srclock, destlock)
434 release(srclock, destlock)
429 if dircleanup is not None:
435 if dircleanup is not None:
430 dircleanup.cleanup()
436 dircleanup.cleanup()
431 if srcpeer is not None:
437 if srcpeer is not None:
432 srcpeer.close()
438 srcpeer.close()
433
439
434 def _showstats(repo, stats):
440 def _showstats(repo, stats):
435 repo.ui.status(_("%d files updated, %d files merged, "
441 repo.ui.status(_("%d files updated, %d files merged, "
436 "%d files removed, %d files unresolved\n") % stats)
442 "%d files removed, %d files unresolved\n") % stats)
437
443
438 def update(repo, node):
444 def update(repo, node):
439 """update the working directory to node, merging linear changes"""
445 """update the working directory to node, merging linear changes"""
440 stats = mergemod.update(repo, node, False, False, None)
446 stats = mergemod.update(repo, node, False, False, None)
441 _showstats(repo, stats)
447 _showstats(repo, stats)
442 if stats[3]:
448 if stats[3]:
443 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
449 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
444 return stats[3] > 0
450 return stats[3] > 0
445
451
446 # naming conflict in clone()
452 # naming conflict in clone()
447 _update = update
453 _update = update
448
454
449 def clean(repo, node, show_stats=True):
455 def clean(repo, node, show_stats=True):
450 """forcibly switch the working directory to node, clobbering changes"""
456 """forcibly switch the working directory to node, clobbering changes"""
451 stats = mergemod.update(repo, node, False, True, None)
457 stats = mergemod.update(repo, node, False, True, None)
452 if show_stats:
458 if show_stats:
453 _showstats(repo, stats)
459 _showstats(repo, stats)
454 return stats[3] > 0
460 return stats[3] > 0
455
461
456 def merge(repo, node, force=None, remind=True):
462 def merge(repo, node, force=None, remind=True):
457 """Branch merge with node, resolving changes. Return true if any
463 """Branch merge with node, resolving changes. Return true if any
458 unresolved conflicts."""
464 unresolved conflicts."""
459 stats = mergemod.update(repo, node, True, force, False)
465 stats = mergemod.update(repo, node, True, force, False)
460 _showstats(repo, stats)
466 _showstats(repo, stats)
461 if stats[3]:
467 if stats[3]:
462 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
468 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
463 "or 'hg update -C .' to abandon\n"))
469 "or 'hg update -C .' to abandon\n"))
464 elif remind:
470 elif remind:
465 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
471 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
466 return stats[3] > 0
472 return stats[3] > 0
467
473
468 def _incoming(displaychlist, subreporecurse, ui, repo, source,
474 def _incoming(displaychlist, subreporecurse, ui, repo, source,
469 opts, buffered=False):
475 opts, buffered=False):
470 """
476 """
471 Helper for incoming / gincoming.
477 Helper for incoming / gincoming.
472 displaychlist gets called with
478 displaychlist gets called with
473 (remoterepo, incomingchangesetlist, displayer) parameters,
479 (remoterepo, incomingchangesetlist, displayer) parameters,
474 and is supposed to contain only code that can't be unified.
480 and is supposed to contain only code that can't be unified.
475 """
481 """
476 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
482 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
477 other = peer(repo, opts, source)
483 other = peer(repo, opts, source)
478 ui.status(_('comparing with %s\n') % util.hidepassword(source))
484 ui.status(_('comparing with %s\n') % util.hidepassword(source))
479 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
485 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
480
486
481 if revs:
487 if revs:
482 revs = [other.lookup(rev) for rev in revs]
488 revs = [other.lookup(rev) for rev in revs]
483 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
489 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
484 revs, opts["bundle"], opts["force"])
490 revs, opts["bundle"], opts["force"])
485 try:
491 try:
486 if not chlist:
492 if not chlist:
487 ui.status(_("no changes found\n"))
493 ui.status(_("no changes found\n"))
488 return subreporecurse()
494 return subreporecurse()
489
495
490 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
496 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
491
497
492 # XXX once graphlog extension makes it into core,
498 # XXX once graphlog extension makes it into core,
493 # should be replaced by a if graph/else
499 # should be replaced by a if graph/else
494 displaychlist(other, chlist, displayer)
500 displaychlist(other, chlist, displayer)
495
501
496 displayer.close()
502 displayer.close()
497 finally:
503 finally:
498 cleanupfn()
504 cleanupfn()
499 subreporecurse()
505 subreporecurse()
500 return 0 # exit code is zero since we found incoming changes
506 return 0 # exit code is zero since we found incoming changes
501
507
502 def incoming(ui, repo, source, opts):
508 def incoming(ui, repo, source, opts):
503 def subreporecurse():
509 def subreporecurse():
504 ret = 1
510 ret = 1
505 if opts.get('subrepos'):
511 if opts.get('subrepos'):
506 ctx = repo[None]
512 ctx = repo[None]
507 for subpath in sorted(ctx.substate):
513 for subpath in sorted(ctx.substate):
508 sub = ctx.sub(subpath)
514 sub = ctx.sub(subpath)
509 ret = min(ret, sub.incoming(ui, source, opts))
515 ret = min(ret, sub.incoming(ui, source, opts))
510 return ret
516 return ret
511
517
512 def display(other, chlist, displayer):
518 def display(other, chlist, displayer):
513 limit = cmdutil.loglimit(opts)
519 limit = cmdutil.loglimit(opts)
514 if opts.get('newest_first'):
520 if opts.get('newest_first'):
515 chlist.reverse()
521 chlist.reverse()
516 count = 0
522 count = 0
517 for n in chlist:
523 for n in chlist:
518 if limit is not None and count >= limit:
524 if limit is not None and count >= limit:
519 break
525 break
520 parents = [p for p in other.changelog.parents(n) if p != nullid]
526 parents = [p for p in other.changelog.parents(n) if p != nullid]
521 if opts.get('no_merges') and len(parents) == 2:
527 if opts.get('no_merges') and len(parents) == 2:
522 continue
528 continue
523 count += 1
529 count += 1
524 displayer.show(other[n])
530 displayer.show(other[n])
525 return _incoming(display, subreporecurse, ui, repo, source, opts)
531 return _incoming(display, subreporecurse, ui, repo, source, opts)
526
532
527 def _outgoing(ui, repo, dest, opts):
533 def _outgoing(ui, repo, dest, opts):
528 dest = ui.expandpath(dest or 'default-push', dest or 'default')
534 dest = ui.expandpath(dest or 'default-push', dest or 'default')
529 dest, branches = parseurl(dest, opts.get('branch'))
535 dest, branches = parseurl(dest, opts.get('branch'))
530 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
536 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
531 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
537 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
532 if revs:
538 if revs:
533 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
539 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
534
540
535 other = peer(repo, opts, dest)
541 other = peer(repo, opts, dest)
536 outgoing = discovery.findcommonoutgoing(repo, other, revs,
542 outgoing = discovery.findcommonoutgoing(repo, other, revs,
537 force=opts.get('force'))
543 force=opts.get('force'))
538 o = outgoing.missing
544 o = outgoing.missing
539 if not o:
545 if not o:
540 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
546 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
541 return None
547 return None
542 return o
548 return o
543
549
544 def outgoing(ui, repo, dest, opts):
550 def outgoing(ui, repo, dest, opts):
545 def recurse():
551 def recurse():
546 ret = 1
552 ret = 1
547 if opts.get('subrepos'):
553 if opts.get('subrepos'):
548 ctx = repo[None]
554 ctx = repo[None]
549 for subpath in sorted(ctx.substate):
555 for subpath in sorted(ctx.substate):
550 sub = ctx.sub(subpath)
556 sub = ctx.sub(subpath)
551 ret = min(ret, sub.outgoing(ui, dest, opts))
557 ret = min(ret, sub.outgoing(ui, dest, opts))
552 return ret
558 return ret
553
559
554 limit = cmdutil.loglimit(opts)
560 limit = cmdutil.loglimit(opts)
555 o = _outgoing(ui, repo, dest, opts)
561 o = _outgoing(ui, repo, dest, opts)
556 if o is None:
562 if o is None:
557 return recurse()
563 return recurse()
558
564
559 if opts.get('newest_first'):
565 if opts.get('newest_first'):
560 o.reverse()
566 o.reverse()
561 displayer = cmdutil.show_changeset(ui, repo, opts)
567 displayer = cmdutil.show_changeset(ui, repo, opts)
562 count = 0
568 count = 0
563 for n in o:
569 for n in o:
564 if limit is not None and count >= limit:
570 if limit is not None and count >= limit:
565 break
571 break
566 parents = [p for p in repo.changelog.parents(n) if p != nullid]
572 parents = [p for p in repo.changelog.parents(n) if p != nullid]
567 if opts.get('no_merges') and len(parents) == 2:
573 if opts.get('no_merges') and len(parents) == 2:
568 continue
574 continue
569 count += 1
575 count += 1
570 displayer.show(repo[n])
576 displayer.show(repo[n])
571 displayer.close()
577 displayer.close()
572 recurse()
578 recurse()
573 return 0 # exit code is zero since we found outgoing changes
579 return 0 # exit code is zero since we found outgoing changes
574
580
575 def revert(repo, node, choose):
581 def revert(repo, node, choose):
576 """revert changes to revision in node without updating dirstate"""
582 """revert changes to revision in node without updating dirstate"""
577 return mergemod.update(repo, node, False, True, choose)[3] > 0
583 return mergemod.update(repo, node, False, True, choose)[3] > 0
578
584
579 def verify(repo):
585 def verify(repo):
580 """verify the consistency of a repository"""
586 """verify the consistency of a repository"""
581 return verifymod.verify(repo)
587 return verifymod.verify(repo)
582
588
583 def remoteui(src, opts):
589 def remoteui(src, opts):
584 'build a remote ui from ui or repo and opts'
590 'build a remote ui from ui or repo and opts'
585 if util.safehasattr(src, 'baseui'): # looks like a repository
591 if util.safehasattr(src, 'baseui'): # looks like a repository
586 dst = src.baseui.copy() # drop repo-specific config
592 dst = src.baseui.copy() # drop repo-specific config
587 src = src.ui # copy target options from repo
593 src = src.ui # copy target options from repo
588 else: # assume it's a global ui object
594 else: # assume it's a global ui object
589 dst = src.copy() # keep all global options
595 dst = src.copy() # keep all global options
590
596
591 # copy ssh-specific options
597 # copy ssh-specific options
592 for o in 'ssh', 'remotecmd':
598 for o in 'ssh', 'remotecmd':
593 v = opts.get(o) or src.config('ui', o)
599 v = opts.get(o) or src.config('ui', o)
594 if v:
600 if v:
595 dst.setconfig("ui", o, v)
601 dst.setconfig("ui", o, v)
596
602
597 # copy bundle-specific options
603 # copy bundle-specific options
598 r = src.config('bundle', 'mainreporoot')
604 r = src.config('bundle', 'mainreporoot')
599 if r:
605 if r:
600 dst.setconfig('bundle', 'mainreporoot', r)
606 dst.setconfig('bundle', 'mainreporoot', r)
601
607
602 # copy selected local settings to the remote ui
608 # copy selected local settings to the remote ui
603 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
609 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
604 for key, val in src.configitems(sect):
610 for key, val in src.configitems(sect):
605 dst.setconfig(sect, key, val)
611 dst.setconfig(sect, key, val)
606 v = src.config('web', 'cacerts')
612 v = src.config('web', 'cacerts')
607 if v:
613 if v:
608 dst.setconfig('web', 'cacerts', util.expandpath(v))
614 dst.setconfig('web', 'cacerts', util.expandpath(v))
609
615
610 return dst
616 return dst
@@ -1,352 +1,350
1 $ "$TESTDIR/hghave" hardlink || exit 80
1 $ "$TESTDIR/hghave" hardlink || exit 80
2
2
3 $ cat > nlinks.py <<EOF
3 $ cat > nlinks.py <<EOF
4 > import sys
4 > import sys
5 > from mercurial import util
5 > from mercurial import util
6 > for f in sorted(sys.stdin.readlines()):
6 > for f in sorted(sys.stdin.readlines()):
7 > f = f[:-1]
7 > f = f[:-1]
8 > print util.nlinks(f), f
8 > print util.nlinks(f), f
9 > EOF
9 > EOF
10
10
11 $ nlinksdir()
11 $ nlinksdir()
12 > {
12 > {
13 > find $1 -type f | python $TESTTMP/nlinks.py
13 > find $1 -type f | python $TESTTMP/nlinks.py
14 > }
14 > }
15
15
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
17
17
18 $ cat > linkcp.py <<EOF
18 $ cat > linkcp.py <<EOF
19 > from mercurial import util
19 > from mercurial import util
20 > import sys
20 > import sys
21 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
21 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
22 > EOF
22 > EOF
23
23
24 $ linkcp()
24 $ linkcp()
25 > {
25 > {
26 > python $TESTTMP/linkcp.py $1 $2
26 > python $TESTTMP/linkcp.py $1 $2
27 > }
27 > }
28
28
29 Prepare repo r1:
29 Prepare repo r1:
30
30
31 $ hg init r1
31 $ hg init r1
32 $ cd r1
32 $ cd r1
33
33
34 $ echo c1 > f1
34 $ echo c1 > f1
35 $ hg add f1
35 $ hg add f1
36 $ hg ci -m0
36 $ hg ci -m0
37
37
38 $ mkdir d1
38 $ mkdir d1
39 $ cd d1
39 $ cd d1
40 $ echo c2 > f2
40 $ echo c2 > f2
41 $ hg add f2
41 $ hg add f2
42 $ hg ci -m1
42 $ hg ci -m1
43 $ cd ../..
43 $ cd ../..
44
44
45 $ nlinksdir r1/.hg/store
45 $ nlinksdir r1/.hg/store
46 1 r1/.hg/store/00changelog.i
46 1 r1/.hg/store/00changelog.i
47 1 r1/.hg/store/00manifest.i
47 1 r1/.hg/store/00manifest.i
48 1 r1/.hg/store/data/d1/f2.i
48 1 r1/.hg/store/data/d1/f2.i
49 1 r1/.hg/store/data/f1.i
49 1 r1/.hg/store/data/f1.i
50 1 r1/.hg/store/fncache
50 1 r1/.hg/store/fncache
51 1 r1/.hg/store/phaseroots
51 1 r1/.hg/store/phaseroots
52 1 r1/.hg/store/undo
52 1 r1/.hg/store/undo
53 1 r1/.hg/store/undo.phaseroots
53 1 r1/.hg/store/undo.phaseroots
54
54
55
55
56 Create hardlinked clone r2:
56 Create hardlinked clone r2:
57
57
58 $ hg clone -U --debug r1 r2
58 $ hg clone -U --debug r1 r2
59 linked 7 files
59 linked 7 files
60 listing keys for "bookmarks"
60 listing keys for "bookmarks"
61
61
62 Create non-hardlinked clone r3:
62 Create non-hardlinked clone r3:
63
63
64 $ hg clone --pull r1 r3
64 $ hg clone --pull r1 r3
65 requesting all changes
65 requesting all changes
66 adding changesets
66 adding changesets
67 adding manifests
67 adding manifests
68 adding file changes
68 adding file changes
69 added 2 changesets with 2 changes to 2 files
69 added 2 changesets with 2 changes to 2 files
70 updating to branch default
70 updating to branch default
71 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
72
73
73
74 Repos r1 and r2 should now contain hardlinked files:
74 Repos r1 and r2 should now contain hardlinked files:
75
75
76 $ nlinksdir r1/.hg/store
76 $ nlinksdir r1/.hg/store
77 2 r1/.hg/store/00changelog.i
77 2 r1/.hg/store/00changelog.i
78 2 r1/.hg/store/00manifest.i
78 2 r1/.hg/store/00manifest.i
79 2 r1/.hg/store/data/d1/f2.i
79 2 r1/.hg/store/data/d1/f2.i
80 2 r1/.hg/store/data/f1.i
80 2 r1/.hg/store/data/f1.i
81 2 r1/.hg/store/fncache
81 2 r1/.hg/store/fncache
82 1 r1/.hg/store/phaseroots
82 1 r1/.hg/store/phaseroots
83 1 r1/.hg/store/undo
83 1 r1/.hg/store/undo
84 1 r1/.hg/store/undo.phaseroots
84 1 r1/.hg/store/undo.phaseroots
85
85
86 $ nlinksdir r2/.hg/store
86 $ nlinksdir r2/.hg/store
87 2 r2/.hg/store/00changelog.i
87 2 r2/.hg/store/00changelog.i
88 2 r2/.hg/store/00manifest.i
88 2 r2/.hg/store/00manifest.i
89 2 r2/.hg/store/data/d1/f2.i
89 2 r2/.hg/store/data/d1/f2.i
90 2 r2/.hg/store/data/f1.i
90 2 r2/.hg/store/data/f1.i
91 2 r2/.hg/store/fncache
91 2 r2/.hg/store/fncache
92
92
93 Repo r3 should not be hardlinked:
93 Repo r3 should not be hardlinked:
94
94
95 $ nlinksdir r3/.hg/store
95 $ nlinksdir r3/.hg/store
96 1 r3/.hg/store/00changelog.i
96 1 r3/.hg/store/00changelog.i
97 1 r3/.hg/store/00manifest.i
97 1 r3/.hg/store/00manifest.i
98 1 r3/.hg/store/data/d1/f2.i
98 1 r3/.hg/store/data/d1/f2.i
99 1 r3/.hg/store/data/f1.i
99 1 r3/.hg/store/data/f1.i
100 1 r3/.hg/store/fncache
100 1 r3/.hg/store/fncache
101 1 r3/.hg/store/phaseroots
101 1 r3/.hg/store/phaseroots
102 1 r3/.hg/store/undo
102 1 r3/.hg/store/undo
103 1 r3/.hg/store/undo.phaseroots
103 1 r3/.hg/store/undo.phaseroots
104
104
105
105
106 Create a non-inlined filelog in r3:
106 Create a non-inlined filelog in r3:
107
107
108 $ cd r3/d1
108 $ cd r3/d1
109 >>> f = open('data1', 'wb')
109 >>> f = open('data1', 'wb')
110 >>> for x in range(10000):
110 >>> for x in range(10000):
111 ... f.write("%s\n" % str(x))
111 ... f.write("%s\n" % str(x))
112 >>> f.close()
112 >>> f.close()
113 $ for j in 0 1 2 3 4 5 6 7 8 9; do
113 $ for j in 0 1 2 3 4 5 6 7 8 9; do
114 > cat data1 >> f2
114 > cat data1 >> f2
115 > hg commit -m$j
115 > hg commit -m$j
116 > done
116 > done
117 $ cd ../..
117 $ cd ../..
118
118
119 $ nlinksdir r3/.hg/store
119 $ nlinksdir r3/.hg/store
120 1 r3/.hg/store/00changelog.i
120 1 r3/.hg/store/00changelog.i
121 1 r3/.hg/store/00manifest.i
121 1 r3/.hg/store/00manifest.i
122 1 r3/.hg/store/data/d1/f2.d
122 1 r3/.hg/store/data/d1/f2.d
123 1 r3/.hg/store/data/d1/f2.i
123 1 r3/.hg/store/data/d1/f2.i
124 1 r3/.hg/store/data/f1.i
124 1 r3/.hg/store/data/f1.i
125 1 r3/.hg/store/fncache
125 1 r3/.hg/store/fncache
126 1 r3/.hg/store/phaseroots
126 1 r3/.hg/store/phaseroots
127 1 r3/.hg/store/undo
127 1 r3/.hg/store/undo
128 1 r3/.hg/store/undo.phaseroots
128 1 r3/.hg/store/undo.phaseroots
129
129
130 Push to repo r1 should break up most hardlinks in r2:
130 Push to repo r1 should break up most hardlinks in r2:
131
131
132 $ hg -R r2 verify
132 $ hg -R r2 verify
133 checking changesets
133 checking changesets
134 checking manifests
134 checking manifests
135 crosschecking files in changesets and manifests
135 crosschecking files in changesets and manifests
136 checking files
136 checking files
137 2 files, 2 changesets, 2 total revisions
137 2 files, 2 changesets, 2 total revisions
138
138
139 $ cd r3
139 $ cd r3
140 $ hg push
140 $ hg push
141 pushing to $TESTTMP/r1 (glob)
141 pushing to $TESTTMP/r1 (glob)
142 searching for changes
142 searching for changes
143 adding changesets
143 adding changesets
144 adding manifests
144 adding manifests
145 adding file changes
145 adding file changes
146 added 10 changesets with 10 changes to 1 files
146 added 10 changesets with 10 changes to 1 files
147
147
148 $ cd ..
148 $ cd ..
149
149
150 $ nlinksdir r2/.hg/store
150 $ nlinksdir r2/.hg/store
151 1 r2/.hg/store/00changelog.i
151 1 r2/.hg/store/00changelog.i
152 1 r2/.hg/store/00manifest.i
152 1 r2/.hg/store/00manifest.i
153 1 r2/.hg/store/data/d1/f2.i
153 1 r2/.hg/store/data/d1/f2.i
154 2 r2/.hg/store/data/f1.i
154 2 r2/.hg/store/data/f1.i
155 1 r2/.hg/store/fncache
155 1 r2/.hg/store/fncache
156
156
157 $ hg -R r2 verify
157 $ hg -R r2 verify
158 checking changesets
158 checking changesets
159 checking manifests
159 checking manifests
160 crosschecking files in changesets and manifests
160 crosschecking files in changesets and manifests
161 checking files
161 checking files
162 2 files, 2 changesets, 2 total revisions
162 2 files, 2 changesets, 2 total revisions
163
163
164
164
165 $ cd r1
165 $ cd r1
166 $ hg up
166 $ hg up
167 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
167 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
168
168
169 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
169 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
170
170
171 $ echo c1c1 >> f1
171 $ echo c1c1 >> f1
172 $ hg ci -m00
172 $ hg ci -m00
173 $ cd ..
173 $ cd ..
174
174
175 $ nlinksdir r2/.hg/store
175 $ nlinksdir r2/.hg/store
176 1 r2/.hg/store/00changelog.i
176 1 r2/.hg/store/00changelog.i
177 1 r2/.hg/store/00manifest.i
177 1 r2/.hg/store/00manifest.i
178 1 r2/.hg/store/data/d1/f2.i
178 1 r2/.hg/store/data/d1/f2.i
179 1 r2/.hg/store/data/f1.i
179 1 r2/.hg/store/data/f1.i
180 1 r2/.hg/store/fncache
180 1 r2/.hg/store/fncache
181
181
182
182
183 $ cd r3
183 $ cd r3
184 $ hg tip --template '{rev}:{node|short}\n'
184 $ hg tip --template '{rev}:{node|short}\n'
185 11:a6451b6bc41f
185 11:a6451b6bc41f
186 $ echo bla > f1
186 $ echo bla > f1
187 $ hg ci -m1
187 $ hg ci -m1
188 $ cd ..
188 $ cd ..
189
189
190 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
190 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
191
191
192 $ linkcp r3 r4
192 $ linkcp r3 r4
193
193
194 r4 has hardlinks in the working dir (not just inside .hg):
194 r4 has hardlinks in the working dir (not just inside .hg):
195
195
196 $ nlinksdir r4
196 $ nlinksdir r4
197 2 r4/.hg/00changelog.i
197 2 r4/.hg/00changelog.i
198 2 r4/.hg/branch
198 2 r4/.hg/branch
199 2 r4/.hg/cache/branchheads
199 2 r4/.hg/cache/branchheads
200 2 r4/.hg/cache/tags
201 2 r4/.hg/dirstate
200 2 r4/.hg/dirstate
202 2 r4/.hg/hgrc
201 2 r4/.hg/hgrc
203 2 r4/.hg/last-message.txt
202 2 r4/.hg/last-message.txt
204 2 r4/.hg/requires
203 2 r4/.hg/requires
205 2 r4/.hg/store/00changelog.i
204 2 r4/.hg/store/00changelog.i
206 2 r4/.hg/store/00manifest.i
205 2 r4/.hg/store/00manifest.i
207 2 r4/.hg/store/data/d1/f2.d
206 2 r4/.hg/store/data/d1/f2.d
208 2 r4/.hg/store/data/d1/f2.i
207 2 r4/.hg/store/data/d1/f2.i
209 2 r4/.hg/store/data/f1.i
208 2 r4/.hg/store/data/f1.i
210 2 r4/.hg/store/fncache
209 2 r4/.hg/store/fncache
211 2 r4/.hg/store/phaseroots
210 2 r4/.hg/store/phaseroots
212 2 r4/.hg/store/undo
211 2 r4/.hg/store/undo
213 2 r4/.hg/store/undo.phaseroots
212 2 r4/.hg/store/undo.phaseroots
214 2 r4/.hg/undo.bookmarks
213 2 r4/.hg/undo.bookmarks
215 2 r4/.hg/undo.branch
214 2 r4/.hg/undo.branch
216 2 r4/.hg/undo.desc
215 2 r4/.hg/undo.desc
217 2 r4/.hg/undo.dirstate
216 2 r4/.hg/undo.dirstate
218 2 r4/d1/data1
217 2 r4/d1/data1
219 2 r4/d1/f2
218 2 r4/d1/f2
220 2 r4/f1
219 2 r4/f1
221
220
222 Update back to revision 11 in r4 should break hardlink of file f1:
221 Update back to revision 11 in r4 should break hardlink of file f1:
223
222
224 $ hg -R r4 up 11
223 $ hg -R r4 up 11
225 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
224 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
226
225
227 $ nlinksdir r4
226 $ nlinksdir r4
228 2 r4/.hg/00changelog.i
227 2 r4/.hg/00changelog.i
229 1 r4/.hg/branch
228 1 r4/.hg/branch
230 2 r4/.hg/cache/branchheads
229 2 r4/.hg/cache/branchheads
231 2 r4/.hg/cache/tags
232 1 r4/.hg/dirstate
230 1 r4/.hg/dirstate
233 2 r4/.hg/hgrc
231 2 r4/.hg/hgrc
234 2 r4/.hg/last-message.txt
232 2 r4/.hg/last-message.txt
235 2 r4/.hg/requires
233 2 r4/.hg/requires
236 2 r4/.hg/store/00changelog.i
234 2 r4/.hg/store/00changelog.i
237 2 r4/.hg/store/00manifest.i
235 2 r4/.hg/store/00manifest.i
238 2 r4/.hg/store/data/d1/f2.d
236 2 r4/.hg/store/data/d1/f2.d
239 2 r4/.hg/store/data/d1/f2.i
237 2 r4/.hg/store/data/d1/f2.i
240 2 r4/.hg/store/data/f1.i
238 2 r4/.hg/store/data/f1.i
241 2 r4/.hg/store/fncache
239 2 r4/.hg/store/fncache
242 2 r4/.hg/store/phaseroots
240 2 r4/.hg/store/phaseroots
243 2 r4/.hg/store/undo
241 2 r4/.hg/store/undo
244 2 r4/.hg/store/undo.phaseroots
242 2 r4/.hg/store/undo.phaseroots
245 2 r4/.hg/undo.bookmarks
243 2 r4/.hg/undo.bookmarks
246 2 r4/.hg/undo.branch
244 2 r4/.hg/undo.branch
247 2 r4/.hg/undo.desc
245 2 r4/.hg/undo.desc
248 2 r4/.hg/undo.dirstate
246 2 r4/.hg/undo.dirstate
249 2 r4/d1/data1
247 2 r4/d1/data1
250 2 r4/d1/f2
248 2 r4/d1/f2
251 1 r4/f1
249 1 r4/f1
252
250
253
251
254 Test hardlinking outside hg:
252 Test hardlinking outside hg:
255
253
256 $ mkdir x
254 $ mkdir x
257 $ echo foo > x/a
255 $ echo foo > x/a
258
256
259 $ linkcp x y
257 $ linkcp x y
260 $ echo bar >> y/a
258 $ echo bar >> y/a
261
259
262 No diff if hardlink:
260 No diff if hardlink:
263
261
264 $ diff x/a y/a
262 $ diff x/a y/a
265
263
266 Test mq hardlinking:
264 Test mq hardlinking:
267
265
268 $ echo "[extensions]" >> $HGRCPATH
266 $ echo "[extensions]" >> $HGRCPATH
269 $ echo "mq=" >> $HGRCPATH
267 $ echo "mq=" >> $HGRCPATH
270
268
271 $ hg init a
269 $ hg init a
272 $ cd a
270 $ cd a
273
271
274 $ hg qimport -n foo - << EOF
272 $ hg qimport -n foo - << EOF
275 > # HG changeset patch
273 > # HG changeset patch
276 > # Date 1 0
274 > # Date 1 0
277 > diff -r 2588a8b53d66 a
275 > diff -r 2588a8b53d66 a
278 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
276 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
279 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
277 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
280 > @@ -0,0 +1,1 @@
278 > @@ -0,0 +1,1 @@
281 > +a
279 > +a
282 > EOF
280 > EOF
283 adding foo to series file
281 adding foo to series file
284
282
285 $ hg qpush
283 $ hg qpush
286 applying foo
284 applying foo
287 now at: foo
285 now at: foo
288
286
289 $ cd ..
287 $ cd ..
290 $ linkcp a b
288 $ linkcp a b
291 $ cd b
289 $ cd b
292
290
293 $ hg qimport -n bar - << EOF
291 $ hg qimport -n bar - << EOF
294 > # HG changeset patch
292 > # HG changeset patch
295 > # Date 2 0
293 > # Date 2 0
296 > diff -r 2588a8b53d66 a
294 > diff -r 2588a8b53d66 a
297 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
295 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
298 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
296 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
299 > @@ -0,0 +1,1 @@
297 > @@ -0,0 +1,1 @@
300 > +b
298 > +b
301 > EOF
299 > EOF
302 adding bar to series file
300 adding bar to series file
303
301
304 $ hg qpush
302 $ hg qpush
305 applying bar
303 applying bar
306 now at: bar
304 now at: bar
307
305
308 $ cat .hg/patches/status
306 $ cat .hg/patches/status
309 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
307 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
310 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
308 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
311
309
312 $ cat .hg/patches/series
310 $ cat .hg/patches/series
313 foo
311 foo
314 bar
312 bar
315
313
316 $ cat ../a/.hg/patches/status
314 $ cat ../a/.hg/patches/status
317 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
315 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
318
316
319 $ cat ../a/.hg/patches/series
317 $ cat ../a/.hg/patches/series
320 foo
318 foo
321
319
322 Test tags hardlinking:
320 Test tags hardlinking:
323
321
324 $ hg qdel -r qbase:qtip
322 $ hg qdel -r qbase:qtip
325 patch foo finalized without changeset message
323 patch foo finalized without changeset message
326 patch bar finalized without changeset message
324 patch bar finalized without changeset message
327
325
328 $ hg tag -l lfoo
326 $ hg tag -l lfoo
329 $ hg tag foo
327 $ hg tag foo
330
328
331 $ cd ..
329 $ cd ..
332 $ linkcp b c
330 $ linkcp b c
333 $ cd c
331 $ cd c
334
332
335 $ hg tag -l -r 0 lbar
333 $ hg tag -l -r 0 lbar
336 $ hg tag -r 0 bar
334 $ hg tag -r 0 bar
337
335
338 $ cat .hgtags
336 $ cat .hgtags
339 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
337 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
340 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
338 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
341
339
342 $ cat .hg/localtags
340 $ cat .hg/localtags
343 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
341 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
344 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
342 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
345
343
346 $ cat ../b/.hgtags
344 $ cat ../b/.hgtags
347 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
345 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
348
346
349 $ cat ../b/.hg/localtags
347 $ cat ../b/.hg/localtags
350 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
348 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
351
349
352 $ cd ..
350 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now