|
@@
-1,429
+1,440
|
|
1
|
# hg.py - repository classes for mercurial
|
|
1
|
# hg.py - repository classes for mercurial
|
|
2
|
#
|
|
2
|
#
|
|
3
|
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
3
|
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
4
|
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
|
|
4
|
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
|
|
5
|
#
|
|
5
|
#
|
|
6
|
# This software may be used and distributed according to the terms of the
|
|
6
|
# This software may be used and distributed according to the terms of the
|
|
7
|
# GNU General Public License version 2 or any later version.
|
|
7
|
# GNU General Public License version 2 or any later version.
|
|
8
|
|
|
8
|
|
|
9
|
from i18n import _
|
|
9
|
from i18n import _
|
|
10
|
from lock import release
|
|
10
|
from lock import release
|
|
11
|
import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
|
|
11
|
import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
|
|
12
|
import lock, util, extensions, error, encoding, node
|
|
12
|
import lock, util, extensions, error, encoding, node
|
|
13
|
import merge as mergemod
|
|
13
|
import merge as mergemod
|
|
14
|
import verify as verifymod
|
|
14
|
import verify as verifymod
|
|
15
|
import errno, os, shutil
|
|
15
|
import errno, os, shutil
|
|
16
|
|
|
16
|
|
|
17
|
def _local(path):
|
|
17
|
def _local(path):
|
|
18
|
path = util.expandpath(util.drop_scheme('file', path))
|
|
18
|
path = util.expandpath(util.drop_scheme('file', path))
|
|
19
|
return (os.path.isfile(path) and bundlerepo or localrepo)
|
|
19
|
return (os.path.isfile(path) and bundlerepo or localrepo)
|
|
20
|
|
|
20
|
|
|
21
|
def addbranchrevs(lrepo, repo, branches, revs):
|
|
21
|
def addbranchrevs(lrepo, repo, branches, revs):
|
|
22
|
if not branches:
|
|
22
|
hashbranch, branches = branches
|
|
|
|
|
23
|
if not hashbranch and not branches:
|
|
23
|
return revs or None, revs and revs[0] or None
|
|
24
|
return revs or None, revs and revs[0] or None
|
|
24
|
revs = revs and list(revs) or []
|
|
25
|
revs = revs and list(revs) or []
|
|
25
|
if not repo.capable('branchmap'):
|
|
26
|
if not repo.capable('branchmap'):
|
|
26
|
revs.extend(branches)
|
|
27
|
if branches:
|
|
|
|
|
28
|
raise util.Abort(_("remote branch lookup not supported"))
|
|
|
|
|
29
|
revs.append(hashbranch)
|
|
27
|
return revs, revs[0]
|
|
30
|
return revs, revs[0]
|
|
28
|
branchmap = repo.branchmap()
|
|
31
|
branchmap = repo.branchmap()
|
|
29
|
for branch in branches:
|
|
32
|
|
|
30
|
if branch == '.':
|
|
33
|
def primary(butf8):
|
|
|
|
|
34
|
if butf8 == '.':
|
|
31
|
if not lrepo or not lrepo.local():
|
|
35
|
if not lrepo or not lrepo.local():
|
|
32
|
raise util.Abort(_("dirstate branch not accessible"))
|
|
36
|
raise util.Abort(_("dirstate branch not accessible"))
|
|
33
|
butf8 = lrepo.dirstate.branch()
|
|
37
|
butf8 = lrepo.dirstate.branch()
|
|
34
|
branch = encoding.tolocal(butf8)
|
|
|
|
|
35
|
else:
|
|
|
|
|
36
|
butf8 = encoding.fromlocal(branch)
|
|
|
|
|
37
|
if butf8 in branchmap:
|
|
38
|
if butf8 in branchmap:
|
|
38
|
revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
|
|
39
|
revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
|
|
|
|
|
40
|
return True
|
|
39
|
else:
|
|
41
|
else:
|
|
40
|
revs.append(branch)
|
|
42
|
return False
|
|
|
|
|
43
|
|
|
|
|
|
44
|
for branch in branches:
|
|
|
|
|
45
|
butf8 = encoding.fromlocal(branch)
|
|
|
|
|
46
|
if not primary(butf8):
|
|
|
|
|
47
|
raise error.RepoLookupError(_("unknown branch '%s'") % branch)
|
|
|
|
|
48
|
if hashbranch:
|
|
|
|
|
49
|
butf8 = encoding.fromlocal(hashbranch)
|
|
|
|
|
50
|
if not primary(butf8):
|
|
|
|
|
51
|
revs.append(hashbranch)
|
|
41
|
return revs, revs[0]
|
|
52
|
return revs, revs[0]
|
|
42
|
|
|
53
|
|
|
43
|
def parseurl(url, branches=None):
|
|
54
|
def parseurl(url, branches=None):
|
|
44
|
'''parse url#branch, returning url, branches+[branch]'''
|
|
55
|
'''parse url#branch, returning (url, (branch, branches))'''
|
|
45
|
|
|
56
|
|
|
46
|
if '#' not in url:
|
|
57
|
if '#' not in url:
|
|
47
|
return url, branches or []
|
|
58
|
return url, (None, branches or [])
|
|
48
|
url, branch = url.split('#', 1)
|
|
59
|
url, branch = url.split('#', 1)
|
|
49
|
return url, (branches or []) + [branch]
|
|
60
|
return url, (branch, branches or [])
|
|
50
|
|
|
61
|
|
|
51
|
schemes = {
|
|
62
|
schemes = {
|
|
52
|
'bundle': bundlerepo,
|
|
63
|
'bundle': bundlerepo,
|
|
53
|
'file': _local,
|
|
64
|
'file': _local,
|
|
54
|
'http': httprepo,
|
|
65
|
'http': httprepo,
|
|
55
|
'https': httprepo,
|
|
66
|
'https': httprepo,
|
|
56
|
'ssh': sshrepo,
|
|
67
|
'ssh': sshrepo,
|
|
57
|
'static-http': statichttprepo,
|
|
68
|
'static-http': statichttprepo,
|
|
58
|
}
|
|
69
|
}
|
|
59
|
|
|
70
|
|
|
60
|
def _lookup(path):
|
|
71
|
def _lookup(path):
|
|
61
|
scheme = 'file'
|
|
72
|
scheme = 'file'
|
|
62
|
if path:
|
|
73
|
if path:
|
|
63
|
c = path.find(':')
|
|
74
|
c = path.find(':')
|
|
64
|
if c > 0:
|
|
75
|
if c > 0:
|
|
65
|
scheme = path[:c]
|
|
76
|
scheme = path[:c]
|
|
66
|
thing = schemes.get(scheme) or schemes['file']
|
|
77
|
thing = schemes.get(scheme) or schemes['file']
|
|
67
|
try:
|
|
78
|
try:
|
|
68
|
return thing(path)
|
|
79
|
return thing(path)
|
|
69
|
except TypeError:
|
|
80
|
except TypeError:
|
|
70
|
return thing
|
|
81
|
return thing
|
|
71
|
|
|
82
|
|
|
72
|
def islocal(repo):
|
|
83
|
def islocal(repo):
|
|
73
|
'''return true if repo or path is local'''
|
|
84
|
'''return true if repo or path is local'''
|
|
74
|
if isinstance(repo, str):
|
|
85
|
if isinstance(repo, str):
|
|
75
|
try:
|
|
86
|
try:
|
|
76
|
return _lookup(repo).islocal(repo)
|
|
87
|
return _lookup(repo).islocal(repo)
|
|
77
|
except AttributeError:
|
|
88
|
except AttributeError:
|
|
78
|
return False
|
|
89
|
return False
|
|
79
|
return repo.local()
|
|
90
|
return repo.local()
|
|
80
|
|
|
91
|
|
|
81
|
def repository(ui, path='', create=False):
|
|
92
|
def repository(ui, path='', create=False):
|
|
82
|
"""return a repository object for the specified path"""
|
|
93
|
"""return a repository object for the specified path"""
|
|
83
|
repo = _lookup(path).instance(ui, path, create)
|
|
94
|
repo = _lookup(path).instance(ui, path, create)
|
|
84
|
ui = getattr(repo, "ui", ui)
|
|
95
|
ui = getattr(repo, "ui", ui)
|
|
85
|
for name, module in extensions.extensions():
|
|
96
|
for name, module in extensions.extensions():
|
|
86
|
hook = getattr(module, 'reposetup', None)
|
|
97
|
hook = getattr(module, 'reposetup', None)
|
|
87
|
if hook:
|
|
98
|
if hook:
|
|
88
|
hook(ui, repo)
|
|
99
|
hook(ui, repo)
|
|
89
|
return repo
|
|
100
|
return repo
|
|
90
|
|
|
101
|
|
|
91
|
def defaultdest(source):
|
|
102
|
def defaultdest(source):
|
|
92
|
'''return default destination of clone if none is given'''
|
|
103
|
'''return default destination of clone if none is given'''
|
|
93
|
return os.path.basename(os.path.normpath(source))
|
|
104
|
return os.path.basename(os.path.normpath(source))
|
|
94
|
|
|
105
|
|
|
95
|
def localpath(path):
|
|
106
|
def localpath(path):
|
|
96
|
if path.startswith('file://localhost/'):
|
|
107
|
if path.startswith('file://localhost/'):
|
|
97
|
return path[16:]
|
|
108
|
return path[16:]
|
|
98
|
if path.startswith('file://'):
|
|
109
|
if path.startswith('file://'):
|
|
99
|
return path[7:]
|
|
110
|
return path[7:]
|
|
100
|
if path.startswith('file:'):
|
|
111
|
if path.startswith('file:'):
|
|
101
|
return path[5:]
|
|
112
|
return path[5:]
|
|
102
|
return path
|
|
113
|
return path
|
|
103
|
|
|
114
|
|
|
104
|
def share(ui, source, dest=None, update=True):
|
|
115
|
def share(ui, source, dest=None, update=True):
|
|
105
|
'''create a shared repository'''
|
|
116
|
'''create a shared repository'''
|
|
106
|
|
|
117
|
|
|
107
|
if not islocal(source):
|
|
118
|
if not islocal(source):
|
|
108
|
raise util.Abort(_('can only share local repositories'))
|
|
119
|
raise util.Abort(_('can only share local repositories'))
|
|
109
|
|
|
120
|
|
|
110
|
if not dest:
|
|
121
|
if not dest:
|
|
111
|
dest = defaultdest(source)
|
|
122
|
dest = defaultdest(source)
|
|
112
|
else:
|
|
123
|
else:
|
|
113
|
dest = ui.expandpath(dest)
|
|
124
|
dest = ui.expandpath(dest)
|
|
114
|
|
|
125
|
|
|
115
|
if isinstance(source, str):
|
|
126
|
if isinstance(source, str):
|
|
116
|
origsource = ui.expandpath(source)
|
|
127
|
origsource = ui.expandpath(source)
|
|
117
|
source, branches = parseurl(origsource)
|
|
128
|
source, branches = parseurl(origsource)
|
|
118
|
srcrepo = repository(ui, source)
|
|
129
|
srcrepo = repository(ui, source)
|
|
119
|
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
|
|
130
|
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
|
|
120
|
else:
|
|
131
|
else:
|
|
121
|
srcrepo = source
|
|
132
|
srcrepo = source
|
|
122
|
origsource = source = srcrepo.url()
|
|
133
|
origsource = source = srcrepo.url()
|
|
123
|
checkout = None
|
|
134
|
checkout = None
|
|
124
|
|
|
135
|
|
|
125
|
sharedpath = srcrepo.sharedpath # if our source is already sharing
|
|
136
|
sharedpath = srcrepo.sharedpath # if our source is already sharing
|
|
126
|
|
|
137
|
|
|
127
|
root = os.path.realpath(dest)
|
|
138
|
root = os.path.realpath(dest)
|
|
128
|
roothg = os.path.join(root, '.hg')
|
|
139
|
roothg = os.path.join(root, '.hg')
|
|
129
|
|
|
140
|
|
|
130
|
if os.path.exists(roothg):
|
|
141
|
if os.path.exists(roothg):
|
|
131
|
raise util.Abort(_('destination already exists'))
|
|
142
|
raise util.Abort(_('destination already exists'))
|
|
132
|
|
|
143
|
|
|
133
|
if not os.path.isdir(root):
|
|
144
|
if not os.path.isdir(root):
|
|
134
|
os.mkdir(root)
|
|
145
|
os.mkdir(root)
|
|
135
|
os.mkdir(roothg)
|
|
146
|
os.mkdir(roothg)
|
|
136
|
|
|
147
|
|
|
137
|
requirements = ''
|
|
148
|
requirements = ''
|
|
138
|
try:
|
|
149
|
try:
|
|
139
|
requirements = srcrepo.opener('requires').read()
|
|
150
|
requirements = srcrepo.opener('requires').read()
|
|
140
|
except IOError, inst:
|
|
151
|
except IOError, inst:
|
|
141
|
if inst.errno != errno.ENOENT:
|
|
152
|
if inst.errno != errno.ENOENT:
|
|
142
|
raise
|
|
153
|
raise
|
|
143
|
|
|
154
|
|
|
144
|
requirements += 'shared\n'
|
|
155
|
requirements += 'shared\n'
|
|
145
|
file(os.path.join(roothg, 'requires'), 'w').write(requirements)
|
|
156
|
file(os.path.join(roothg, 'requires'), 'w').write(requirements)
|
|
146
|
file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
|
|
157
|
file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
|
|
147
|
|
|
158
|
|
|
148
|
default = srcrepo.ui.config('paths', 'default')
|
|
159
|
default = srcrepo.ui.config('paths', 'default')
|
|
149
|
if default:
|
|
160
|
if default:
|
|
150
|
f = file(os.path.join(roothg, 'hgrc'), 'w')
|
|
161
|
f = file(os.path.join(roothg, 'hgrc'), 'w')
|
|
151
|
f.write('[paths]\ndefault = %s\n' % default)
|
|
162
|
f.write('[paths]\ndefault = %s\n' % default)
|
|
152
|
f.close()
|
|
163
|
f.close()
|
|
153
|
|
|
164
|
|
|
154
|
r = repository(ui, root)
|
|
165
|
r = repository(ui, root)
|
|
155
|
|
|
166
|
|
|
156
|
if update:
|
|
167
|
if update:
|
|
157
|
r.ui.status(_("updating working directory\n"))
|
|
168
|
r.ui.status(_("updating working directory\n"))
|
|
158
|
if update is not True:
|
|
169
|
if update is not True:
|
|
159
|
checkout = update
|
|
170
|
checkout = update
|
|
160
|
for test in (checkout, 'default', 'tip'):
|
|
171
|
for test in (checkout, 'default', 'tip'):
|
|
161
|
if test is None:
|
|
172
|
if test is None:
|
|
162
|
continue
|
|
173
|
continue
|
|
163
|
try:
|
|
174
|
try:
|
|
164
|
uprev = r.lookup(test)
|
|
175
|
uprev = r.lookup(test)
|
|
165
|
break
|
|
176
|
break
|
|
166
|
except error.RepoLookupError:
|
|
177
|
except error.RepoLookupError:
|
|
167
|
continue
|
|
178
|
continue
|
|
168
|
_update(r, uprev)
|
|
179
|
_update(r, uprev)
|
|
169
|
|
|
180
|
|
|
170
|
def clone(ui, source, dest=None, pull=False, rev=None, update=True,
|
|
181
|
def clone(ui, source, dest=None, pull=False, rev=None, update=True,
|
|
171
|
stream=False, branch=None):
|
|
182
|
stream=False, branch=None):
|
|
172
|
"""Make a copy of an existing repository.
|
|
183
|
"""Make a copy of an existing repository.
|
|
173
|
|
|
184
|
|
|
174
|
Create a copy of an existing repository in a new directory. The
|
|
185
|
Create a copy of an existing repository in a new directory. The
|
|
175
|
source and destination are URLs, as passed to the repository
|
|
186
|
source and destination are URLs, as passed to the repository
|
|
176
|
function. Returns a pair of repository objects, the source and
|
|
187
|
function. Returns a pair of repository objects, the source and
|
|
177
|
newly created destination.
|
|
188
|
newly created destination.
|
|
178
|
|
|
189
|
|
|
179
|
The location of the source is added to the new repository's
|
|
190
|
The location of the source is added to the new repository's
|
|
180
|
.hg/hgrc file, as the default to be used for future pulls and
|
|
191
|
.hg/hgrc file, as the default to be used for future pulls and
|
|
181
|
pushes.
|
|
192
|
pushes.
|
|
182
|
|
|
193
|
|
|
183
|
If an exception is raised, the partly cloned/updated destination
|
|
194
|
If an exception is raised, the partly cloned/updated destination
|
|
184
|
repository will be deleted.
|
|
195
|
repository will be deleted.
|
|
185
|
|
|
196
|
|
|
186
|
Arguments:
|
|
197
|
Arguments:
|
|
187
|
|
|
198
|
|
|
188
|
source: repository object or URL
|
|
199
|
source: repository object or URL
|
|
189
|
|
|
200
|
|
|
190
|
dest: URL of destination repository to create (defaults to base
|
|
201
|
dest: URL of destination repository to create (defaults to base
|
|
191
|
name of source repository)
|
|
202
|
name of source repository)
|
|
192
|
|
|
203
|
|
|
193
|
pull: always pull from source repository, even in local case
|
|
204
|
pull: always pull from source repository, even in local case
|
|
194
|
|
|
205
|
|
|
195
|
stream: stream raw data uncompressed from repository (fast over
|
|
206
|
stream: stream raw data uncompressed from repository (fast over
|
|
196
|
LAN, slow over WAN)
|
|
207
|
LAN, slow over WAN)
|
|
197
|
|
|
208
|
|
|
198
|
rev: revision to clone up to (implies pull=True)
|
|
209
|
rev: revision to clone up to (implies pull=True)
|
|
199
|
|
|
210
|
|
|
200
|
update: update working directory after clone completes, if
|
|
211
|
update: update working directory after clone completes, if
|
|
201
|
destination is local repository (True means update to default rev,
|
|
212
|
destination is local repository (True means update to default rev,
|
|
202
|
anything else is treated as a revision)
|
|
213
|
anything else is treated as a revision)
|
|
203
|
|
|
214
|
|
|
204
|
branch: branches to clone
|
|
215
|
branch: branches to clone
|
|
205
|
"""
|
|
216
|
"""
|
|
206
|
|
|
217
|
|
|
207
|
if isinstance(source, str):
|
|
218
|
if isinstance(source, str):
|
|
208
|
origsource = ui.expandpath(source)
|
|
219
|
origsource = ui.expandpath(source)
|
|
209
|
source, branch = parseurl(origsource, branch)
|
|
220
|
source, branch = parseurl(origsource, branch)
|
|
210
|
src_repo = repository(ui, source)
|
|
221
|
src_repo = repository(ui, source)
|
|
211
|
else:
|
|
222
|
else:
|
|
212
|
src_repo = source
|
|
223
|
src_repo = source
|
|
213
|
branch = None
|
|
224
|
branch = None
|
|
214
|
origsource = source = src_repo.url()
|
|
225
|
origsource = source = src_repo.url()
|
|
215
|
rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
|
|
226
|
rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
|
|
216
|
|
|
227
|
|
|
217
|
if dest is None:
|
|
228
|
if dest is None:
|
|
218
|
dest = defaultdest(source)
|
|
229
|
dest = defaultdest(source)
|
|
219
|
ui.status(_("destination directory: %s\n") % dest)
|
|
230
|
ui.status(_("destination directory: %s\n") % dest)
|
|
220
|
else:
|
|
231
|
else:
|
|
221
|
dest = ui.expandpath(dest)
|
|
232
|
dest = ui.expandpath(dest)
|
|
222
|
|
|
233
|
|
|
223
|
dest = localpath(dest)
|
|
234
|
dest = localpath(dest)
|
|
224
|
source = localpath(source)
|
|
235
|
source = localpath(source)
|
|
225
|
|
|
236
|
|
|
226
|
if os.path.exists(dest):
|
|
237
|
if os.path.exists(dest):
|
|
227
|
if not os.path.isdir(dest):
|
|
238
|
if not os.path.isdir(dest):
|
|
228
|
raise util.Abort(_("destination '%s' already exists") % dest)
|
|
239
|
raise util.Abort(_("destination '%s' already exists") % dest)
|
|
229
|
elif os.listdir(dest):
|
|
240
|
elif os.listdir(dest):
|
|
230
|
raise util.Abort(_("destination '%s' is not empty") % dest)
|
|
241
|
raise util.Abort(_("destination '%s' is not empty") % dest)
|
|
231
|
|
|
242
|
|
|
232
|
class DirCleanup(object):
|
|
243
|
class DirCleanup(object):
|
|
233
|
def __init__(self, dir_):
|
|
244
|
def __init__(self, dir_):
|
|
234
|
self.rmtree = shutil.rmtree
|
|
245
|
self.rmtree = shutil.rmtree
|
|
235
|
self.dir_ = dir_
|
|
246
|
self.dir_ = dir_
|
|
236
|
def close(self):
|
|
247
|
def close(self):
|
|
237
|
self.dir_ = None
|
|
248
|
self.dir_ = None
|
|
238
|
def cleanup(self):
|
|
249
|
def cleanup(self):
|
|
239
|
if self.dir_:
|
|
250
|
if self.dir_:
|
|
240
|
self.rmtree(self.dir_, True)
|
|
251
|
self.rmtree(self.dir_, True)
|
|
241
|
|
|
252
|
|
|
242
|
src_lock = dest_lock = dir_cleanup = None
|
|
253
|
src_lock = dest_lock = dir_cleanup = None
|
|
243
|
try:
|
|
254
|
try:
|
|
244
|
if islocal(dest):
|
|
255
|
if islocal(dest):
|
|
245
|
dir_cleanup = DirCleanup(dest)
|
|
256
|
dir_cleanup = DirCleanup(dest)
|
|
246
|
|
|
257
|
|
|
247
|
abspath = origsource
|
|
258
|
abspath = origsource
|
|
248
|
copy = False
|
|
259
|
copy = False
|
|
249
|
if src_repo.cancopy() and islocal(dest):
|
|
260
|
if src_repo.cancopy() and islocal(dest):
|
|
250
|
abspath = os.path.abspath(util.drop_scheme('file', origsource))
|
|
261
|
abspath = os.path.abspath(util.drop_scheme('file', origsource))
|
|
251
|
copy = not pull and not rev
|
|
262
|
copy = not pull and not rev
|
|
252
|
|
|
263
|
|
|
253
|
if copy:
|
|
264
|
if copy:
|
|
254
|
try:
|
|
265
|
try:
|
|
255
|
# we use a lock here because if we race with commit, we
|
|
266
|
# we use a lock here because if we race with commit, we
|
|
256
|
# can end up with extra data in the cloned revlogs that's
|
|
267
|
# can end up with extra data in the cloned revlogs that's
|
|
257
|
# not pointed to by changesets, thus causing verify to
|
|
268
|
# not pointed to by changesets, thus causing verify to
|
|
258
|
# fail
|
|
269
|
# fail
|
|
259
|
src_lock = src_repo.lock(wait=False)
|
|
270
|
src_lock = src_repo.lock(wait=False)
|
|
260
|
except error.LockError:
|
|
271
|
except error.LockError:
|
|
261
|
copy = False
|
|
272
|
copy = False
|
|
262
|
|
|
273
|
|
|
263
|
if copy:
|
|
274
|
if copy:
|
|
264
|
src_repo.hook('preoutgoing', throw=True, source='clone')
|
|
275
|
src_repo.hook('preoutgoing', throw=True, source='clone')
|
|
265
|
hgdir = os.path.realpath(os.path.join(dest, ".hg"))
|
|
276
|
hgdir = os.path.realpath(os.path.join(dest, ".hg"))
|
|
266
|
if not os.path.exists(dest):
|
|
277
|
if not os.path.exists(dest):
|
|
267
|
os.mkdir(dest)
|
|
278
|
os.mkdir(dest)
|
|
268
|
else:
|
|
279
|
else:
|
|
269
|
# only clean up directories we create ourselves
|
|
280
|
# only clean up directories we create ourselves
|
|
270
|
dir_cleanup.dir_ = hgdir
|
|
281
|
dir_cleanup.dir_ = hgdir
|
|
271
|
try:
|
|
282
|
try:
|
|
272
|
dest_path = hgdir
|
|
283
|
dest_path = hgdir
|
|
273
|
os.mkdir(dest_path)
|
|
284
|
os.mkdir(dest_path)
|
|
274
|
except OSError, inst:
|
|
285
|
except OSError, inst:
|
|
275
|
if inst.errno == errno.EEXIST:
|
|
286
|
if inst.errno == errno.EEXIST:
|
|
276
|
dir_cleanup.close()
|
|
287
|
dir_cleanup.close()
|
|
277
|
raise util.Abort(_("destination '%s' already exists")
|
|
288
|
raise util.Abort(_("destination '%s' already exists")
|
|
278
|
% dest)
|
|
289
|
% dest)
|
|
279
|
raise
|
|
290
|
raise
|
|
280
|
|
|
291
|
|
|
281
|
hardlink = None
|
|
292
|
hardlink = None
|
|
282
|
num = 0
|
|
293
|
num = 0
|
|
283
|
for f in src_repo.store.copylist():
|
|
294
|
for f in src_repo.store.copylist():
|
|
284
|
src = os.path.join(src_repo.sharedpath, f)
|
|
295
|
src = os.path.join(src_repo.sharedpath, f)
|
|
285
|
dst = os.path.join(dest_path, f)
|
|
296
|
dst = os.path.join(dest_path, f)
|
|
286
|
dstbase = os.path.dirname(dst)
|
|
297
|
dstbase = os.path.dirname(dst)
|
|
287
|
if dstbase and not os.path.exists(dstbase):
|
|
298
|
if dstbase and not os.path.exists(dstbase):
|
|
288
|
os.mkdir(dstbase)
|
|
299
|
os.mkdir(dstbase)
|
|
289
|
if os.path.exists(src):
|
|
300
|
if os.path.exists(src):
|
|
290
|
if dst.endswith('data'):
|
|
301
|
if dst.endswith('data'):
|
|
291
|
# lock to avoid premature writing to the target
|
|
302
|
# lock to avoid premature writing to the target
|
|
292
|
dest_lock = lock.lock(os.path.join(dstbase, "lock"))
|
|
303
|
dest_lock = lock.lock(os.path.join(dstbase, "lock"))
|
|
293
|
hardlink, n = util.copyfiles(src, dst, hardlink)
|
|
304
|
hardlink, n = util.copyfiles(src, dst, hardlink)
|
|
294
|
num += n
|
|
305
|
num += n
|
|
295
|
if hardlink:
|
|
306
|
if hardlink:
|
|
296
|
ui.debug("linked %d files\n" % num)
|
|
307
|
ui.debug("linked %d files\n" % num)
|
|
297
|
else:
|
|
308
|
else:
|
|
298
|
ui.debug("copied %d files\n" % num)
|
|
309
|
ui.debug("copied %d files\n" % num)
|
|
299
|
|
|
310
|
|
|
300
|
# we need to re-init the repo after manually copying the data
|
|
311
|
# we need to re-init the repo after manually copying the data
|
|
301
|
# into it
|
|
312
|
# into it
|
|
302
|
dest_repo = repository(ui, dest)
|
|
313
|
dest_repo = repository(ui, dest)
|
|
303
|
src_repo.hook('outgoing', source='clone', node='0'*40)
|
|
314
|
src_repo.hook('outgoing', source='clone', node='0'*40)
|
|
304
|
else:
|
|
315
|
else:
|
|
305
|
try:
|
|
316
|
try:
|
|
306
|
dest_repo = repository(ui, dest, create=True)
|
|
317
|
dest_repo = repository(ui, dest, create=True)
|
|
307
|
except OSError, inst:
|
|
318
|
except OSError, inst:
|
|
308
|
if inst.errno == errno.EEXIST:
|
|
319
|
if inst.errno == errno.EEXIST:
|
|
309
|
dir_cleanup.close()
|
|
320
|
dir_cleanup.close()
|
|
310
|
raise util.Abort(_("destination '%s' already exists")
|
|
321
|
raise util.Abort(_("destination '%s' already exists")
|
|
311
|
% dest)
|
|
322
|
% dest)
|
|
312
|
raise
|
|
323
|
raise
|
|
313
|
|
|
324
|
|
|
314
|
revs = None
|
|
325
|
revs = None
|
|
315
|
if rev:
|
|
326
|
if rev:
|
|
316
|
if 'lookup' not in src_repo.capabilities:
|
|
327
|
if 'lookup' not in src_repo.capabilities:
|
|
317
|
raise util.Abort(_("src repository does not support "
|
|
328
|
raise util.Abort(_("src repository does not support "
|
|
318
|
"revision lookup and so doesn't "
|
|
329
|
"revision lookup and so doesn't "
|
|
319
|
"support clone by revision"))
|
|
330
|
"support clone by revision"))
|
|
320
|
revs = [src_repo.lookup(r) for r in rev]
|
|
331
|
revs = [src_repo.lookup(r) for r in rev]
|
|
321
|
checkout = revs[0]
|
|
332
|
checkout = revs[0]
|
|
322
|
if dest_repo.local():
|
|
333
|
if dest_repo.local():
|
|
323
|
dest_repo.clone(src_repo, heads=revs, stream=stream)
|
|
334
|
dest_repo.clone(src_repo, heads=revs, stream=stream)
|
|
324
|
elif src_repo.local():
|
|
335
|
elif src_repo.local():
|
|
325
|
src_repo.push(dest_repo, revs=revs)
|
|
336
|
src_repo.push(dest_repo, revs=revs)
|
|
326
|
else:
|
|
337
|
else:
|
|
327
|
raise util.Abort(_("clone from remote to remote not supported"))
|
|
338
|
raise util.Abort(_("clone from remote to remote not supported"))
|
|
328
|
|
|
339
|
|
|
329
|
if dir_cleanup:
|
|
340
|
if dir_cleanup:
|
|
330
|
dir_cleanup.close()
|
|
341
|
dir_cleanup.close()
|
|
331
|
|
|
342
|
|
|
332
|
if dest_repo.local():
|
|
343
|
if dest_repo.local():
|
|
333
|
fp = dest_repo.opener("hgrc", "w", text=True)
|
|
344
|
fp = dest_repo.opener("hgrc", "w", text=True)
|
|
334
|
fp.write("[paths]\n")
|
|
345
|
fp.write("[paths]\n")
|
|
335
|
fp.write("default = %s\n" % abspath)
|
|
346
|
fp.write("default = %s\n" % abspath)
|
|
336
|
fp.close()
|
|
347
|
fp.close()
|
|
337
|
|
|
348
|
|
|
338
|
dest_repo.ui.setconfig('paths', 'default', abspath)
|
|
349
|
dest_repo.ui.setconfig('paths', 'default', abspath)
|
|
339
|
|
|
350
|
|
|
340
|
if update:
|
|
351
|
if update:
|
|
341
|
if update is not True:
|
|
352
|
if update is not True:
|
|
342
|
checkout = update
|
|
353
|
checkout = update
|
|
343
|
if src_repo.local():
|
|
354
|
if src_repo.local():
|
|
344
|
checkout = src_repo.lookup(update)
|
|
355
|
checkout = src_repo.lookup(update)
|
|
345
|
for test in (checkout, 'default', 'tip'):
|
|
356
|
for test in (checkout, 'default', 'tip'):
|
|
346
|
if test is None:
|
|
357
|
if test is None:
|
|
347
|
continue
|
|
358
|
continue
|
|
348
|
try:
|
|
359
|
try:
|
|
349
|
uprev = dest_repo.lookup(test)
|
|
360
|
uprev = dest_repo.lookup(test)
|
|
350
|
break
|
|
361
|
break
|
|
351
|
except error.RepoLookupError:
|
|
362
|
except error.RepoLookupError:
|
|
352
|
continue
|
|
363
|
continue
|
|
353
|
bn = dest_repo[uprev].branch()
|
|
364
|
bn = dest_repo[uprev].branch()
|
|
354
|
dest_repo.ui.status(_("updating to branch %s\n")
|
|
365
|
dest_repo.ui.status(_("updating to branch %s\n")
|
|
355
|
% encoding.tolocal(bn))
|
|
366
|
% encoding.tolocal(bn))
|
|
356
|
_update(dest_repo, uprev)
|
|
367
|
_update(dest_repo, uprev)
|
|
357
|
|
|
368
|
|
|
358
|
return src_repo, dest_repo
|
|
369
|
return src_repo, dest_repo
|
|
359
|
finally:
|
|
370
|
finally:
|
|
360
|
release(src_lock, dest_lock)
|
|
371
|
release(src_lock, dest_lock)
|
|
361
|
if dir_cleanup is not None:
|
|
372
|
if dir_cleanup is not None:
|
|
362
|
dir_cleanup.cleanup()
|
|
373
|
dir_cleanup.cleanup()
|
|
363
|
|
|
374
|
|
|
364
|
def _showstats(repo, stats):
|
|
375
|
def _showstats(repo, stats):
|
|
365
|
repo.ui.status(_("%d files updated, %d files merged, "
|
|
376
|
repo.ui.status(_("%d files updated, %d files merged, "
|
|
366
|
"%d files removed, %d files unresolved\n") % stats)
|
|
377
|
"%d files removed, %d files unresolved\n") % stats)
|
|
367
|
|
|
378
|
|
|
368
|
def update(repo, node):
|
|
379
|
def update(repo, node):
|
|
369
|
"""update the working directory to node, merging linear changes"""
|
|
380
|
"""update the working directory to node, merging linear changes"""
|
|
370
|
stats = mergemod.update(repo, node, False, False, None)
|
|
381
|
stats = mergemod.update(repo, node, False, False, None)
|
|
371
|
_showstats(repo, stats)
|
|
382
|
_showstats(repo, stats)
|
|
372
|
if stats[3]:
|
|
383
|
if stats[3]:
|
|
373
|
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
|
|
384
|
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
|
|
374
|
return stats[3] > 0
|
|
385
|
return stats[3] > 0
|
|
375
|
|
|
386
|
|
|
376
|
# naming conflict in clone()
|
|
387
|
# naming conflict in clone()
|
|
377
|
_update = update
|
|
388
|
_update = update
|
|
378
|
|
|
389
|
|
|
379
|
def clean(repo, node, show_stats=True):
|
|
390
|
def clean(repo, node, show_stats=True):
|
|
380
|
"""forcibly switch the working directory to node, clobbering changes"""
|
|
391
|
"""forcibly switch the working directory to node, clobbering changes"""
|
|
381
|
stats = mergemod.update(repo, node, False, True, None)
|
|
392
|
stats = mergemod.update(repo, node, False, True, None)
|
|
382
|
if show_stats:
|
|
393
|
if show_stats:
|
|
383
|
_showstats(repo, stats)
|
|
394
|
_showstats(repo, stats)
|
|
384
|
return stats[3] > 0
|
|
395
|
return stats[3] > 0
|
|
385
|
|
|
396
|
|
|
386
|
def merge(repo, node, force=None, remind=True):
|
|
397
|
def merge(repo, node, force=None, remind=True):
|
|
387
|
"""branch merge with node, resolving changes"""
|
|
398
|
"""branch merge with node, resolving changes"""
|
|
388
|
stats = mergemod.update(repo, node, True, force, False)
|
|
399
|
stats = mergemod.update(repo, node, True, force, False)
|
|
389
|
_showstats(repo, stats)
|
|
400
|
_showstats(repo, stats)
|
|
390
|
if stats[3]:
|
|
401
|
if stats[3]:
|
|
391
|
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
|
|
402
|
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
|
|
392
|
"or 'hg update -C' to abandon\n"))
|
|
403
|
"or 'hg update -C' to abandon\n"))
|
|
393
|
elif remind:
|
|
404
|
elif remind:
|
|
394
|
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
|
|
405
|
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
|
|
395
|
return stats[3] > 0
|
|
406
|
return stats[3] > 0
|
|
396
|
|
|
407
|
|
|
397
|
def revert(repo, node, choose):
|
|
408
|
def revert(repo, node, choose):
|
|
398
|
"""revert changes to revision in node without updating dirstate"""
|
|
409
|
"""revert changes to revision in node without updating dirstate"""
|
|
399
|
return mergemod.update(repo, node, False, True, choose)[3] > 0
|
|
410
|
return mergemod.update(repo, node, False, True, choose)[3] > 0
|
|
400
|
|
|
411
|
|
|
401
|
def verify(repo):
|
|
412
|
def verify(repo):
|
|
402
|
"""verify the consistency of a repository"""
|
|
413
|
"""verify the consistency of a repository"""
|
|
403
|
return verifymod.verify(repo)
|
|
414
|
return verifymod.verify(repo)
|
|
404
|
|
|
415
|
|
|
405
|
def remoteui(src, opts):
|
|
416
|
def remoteui(src, opts):
|
|
406
|
'build a remote ui from ui or repo and opts'
|
|
417
|
'build a remote ui from ui or repo and opts'
|
|
407
|
if hasattr(src, 'baseui'): # looks like a repository
|
|
418
|
if hasattr(src, 'baseui'): # looks like a repository
|
|
408
|
dst = src.baseui.copy() # drop repo-specific config
|
|
419
|
dst = src.baseui.copy() # drop repo-specific config
|
|
409
|
src = src.ui # copy target options from repo
|
|
420
|
src = src.ui # copy target options from repo
|
|
410
|
else: # assume it's a global ui object
|
|
421
|
else: # assume it's a global ui object
|
|
411
|
dst = src.copy() # keep all global options
|
|
422
|
dst = src.copy() # keep all global options
|
|
412
|
|
|
423
|
|
|
413
|
# copy ssh-specific options
|
|
424
|
# copy ssh-specific options
|
|
414
|
for o in 'ssh', 'remotecmd':
|
|
425
|
for o in 'ssh', 'remotecmd':
|
|
415
|
v = opts.get(o) or src.config('ui', o)
|
|
426
|
v = opts.get(o) or src.config('ui', o)
|
|
416
|
if v:
|
|
427
|
if v:
|
|
417
|
dst.setconfig("ui", o, v)
|
|
428
|
dst.setconfig("ui", o, v)
|
|
418
|
|
|
429
|
|
|
419
|
# copy bundle-specific options
|
|
430
|
# copy bundle-specific options
|
|
420
|
r = src.config('bundle', 'mainreporoot')
|
|
431
|
r = src.config('bundle', 'mainreporoot')
|
|
421
|
if r:
|
|
432
|
if r:
|
|
422
|
dst.setconfig('bundle', 'mainreporoot', r)
|
|
433
|
dst.setconfig('bundle', 'mainreporoot', r)
|
|
423
|
|
|
434
|
|
|
424
|
# copy auth and http_proxy section settings
|
|
435
|
# copy auth and http_proxy section settings
|
|
425
|
for sect in ('auth', 'http_proxy'):
|
|
436
|
for sect in ('auth', 'http_proxy'):
|
|
426
|
for key, val in src.configitems(sect):
|
|
437
|
for key, val in src.configitems(sect):
|
|
427
|
dst.setconfig(sect, key, val)
|
|
438
|
dst.setconfig(sect, key, val)
|
|
428
|
|
|
439
|
|
|
429
|
return dst
|
|
440
|
return dst
|