##// END OF EJS Templates
url: use open and not url.open for local files (issue3624)
Siddharth Agarwal -
r17887:0e2846b2 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,377 +1,377 b''
1 1 # synthrepo.py - repo synthesis
2 2 #
3 3 # Copyright 2012 Facebook
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''synthesize structurally interesting change history
9 9
10 10 This extension is useful for creating a repository with properties
11 11 that are statistically similar to an existing repository. During
12 12 analysis, a simple probability table is constructed from the history
13 13 of an existing repository. During synthesis, these properties are
14 14 reconstructed.
15 15
16 16 Properties that are analyzed and synthesized include the following:
17 17
18 18 - Lines added or removed when an existing file is modified
19 19 - Number and sizes of files added
20 20 - Number of files removed
21 21 - Line lengths
22 22 - Topological distance to parent changeset(s)
23 23 - Probability of a commit being a merge
24 24 - Probability of a newly added file being added to a new directory
25 25 - Interarrival time, and time zone, of commits
26 26
27 27 A few obvious properties that are not currently handled realistically:
28 28
29 29 - Merges are treated as regular commits with two parents, which is not
30 30 realistic
31 31 - Modifications are not treated as operations on hunks of lines, but
32 32 as insertions and deletions of randomly chosen single lines
33 33 - Committer ID (always random)
34 34 - Executability of files
35 35 - Symlinks and binary files are ignored
36 36 '''
37 37
38 38 import bisect, collections, json, os, random, time
39 from mercurial import cmdutil, context, patch, scmutil, url, util
39 from mercurial import cmdutil, context, patch, scmutil, url, util, hg
40 40 from mercurial.i18n import _
41 41 from mercurial.node import nullrev, nullid
42 42
43 43 testedwith = 'internal'
44 44
45 45 cmdtable = {}
46 46 command = cmdutil.command(cmdtable)
47 47
48 48 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
49 49
50 50 def zerodict():
51 51 return collections.defaultdict(lambda: 0)
52 52
53 53 def roundto(x, k):
54 54 if x > k * 2:
55 55 return int(round(x / float(k)) * k)
56 56 return int(round(x))
57 57
58 58 def parsegitdiff(lines):
59 59 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
60 60 binary = False
61 61 for line in lines:
62 62 start = line[:6]
63 63 if start == 'diff -':
64 64 if filename:
65 65 yield filename, mar, lineadd, lineremove, binary
66 66 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
67 67 filename = patch.gitre.match(line).group(1)
68 68 elif start in newfile:
69 69 mar = 'a'
70 70 elif start == 'GIT bi':
71 71 binary = True
72 72 elif start == 'delete':
73 73 mar = 'r'
74 74 elif start:
75 75 s = start[0]
76 76 if s == '-' and not line.startswith('--- '):
77 77 lineremove += 1
78 78 elif s == '+' and not line.startswith('+++ '):
79 79 lineadd[roundto(len(line) - 1, 5)] += 1
80 80 if filename:
81 81 yield filename, mar, lineadd, lineremove, binary
82 82
83 83 @command('analyze',
84 84 [('o', 'output', [], _('write output to given file'), _('FILE')),
85 85 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
86 86 _('hg analyze'))
87 87 def analyze(ui, repo, *revs, **opts):
88 88 '''create a simple model of a repository to use for later synthesis
89 89
90 90 This command examines every changeset in the given range (or all
91 91 of history if none are specified) and creates a simple statistical
92 92 model of the history of the repository.
93 93
94 94 The model is written out to a JSON file, and can be used by
95 95 :hg:`synthesize` to create or augment a repository with synthetic
96 96 commits that have a structure that is statistically similar to the
97 97 analyzed repository.
98 98 '''
99 99
100 100 revs = list(revs)
101 101 revs.extend(opts['rev'])
102 102 if not revs:
103 103 revs = [':']
104 104
105 105 output = opts['output']
106 106 if not output:
107 107 output = os.path.basename(repo.root) + '.json'
108 108
109 109 if output == '-':
110 110 fp = sys.stdout
111 111 else:
112 112 fp = open(output, 'w')
113 113
114 114 revs = scmutil.revrange(repo, revs)
115 115 revs.sort()
116 116
117 117 lineschanged = zerodict()
118 118 children = zerodict()
119 119 p1distance = zerodict()
120 120 p2distance = zerodict()
121 121 linesinfilesadded = zerodict()
122 122 fileschanged = zerodict()
123 123 filesadded = zerodict()
124 124 filesremoved = zerodict()
125 125 linelengths = zerodict()
126 126 interarrival = zerodict()
127 127 parents = zerodict()
128 128 dirsadded = zerodict()
129 129 tzoffset = zerodict()
130 130
131 131 progress = ui.progress
132 132 _analyzing = _('analyzing')
133 133 _changesets = _('changesets')
134 134 _total = len(revs)
135 135
136 136 for i, rev in enumerate(revs):
137 137 progress(_analyzing, i, unit=_changesets, total=_total)
138 138 ctx = repo[rev]
139 139 pl = ctx.parents()
140 140 pctx = pl[0]
141 141 prev = pctx.rev()
142 142 children[prev] += 1
143 143 p1distance[rev - prev] += 1
144 144 parents[len(pl)] += 1
145 145 tzoffset[ctx.date()[1]] += 1
146 146 if len(pl) > 1:
147 147 p2distance[rev - pl[1].rev()] += 1
148 148 if prev == rev - 1:
149 149 lastctx = pctx
150 150 else:
151 151 lastctx = repo[rev - 1]
152 152 if lastctx.rev() != nullrev:
153 153 interarrival[roundto(ctx.date()[0] - lastctx.date()[0], 300)] += 1
154 154 diff = sum((d.splitlines()
155 155 for d in ctx.diff(pctx, opts=dict(git=True))), [])
156 156 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
157 157 for filename, mar, lineadd, lineremove, binary in parsegitdiff(diff):
158 158 if binary:
159 159 continue
160 160 added = sum(lineadd.itervalues(), 0)
161 161 if mar == 'm':
162 162 if added and lineremove:
163 163 lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
164 164 filechanges += 1
165 165 elif mar == 'a':
166 166 fileadds += 1
167 167 if '/' in filename:
168 168 filedir = filename.rsplit('/', 1)[0]
169 169 if filedir not in pctx.dirs():
170 170 diradds += 1
171 171 linesinfilesadded[roundto(added, 5)] += 1
172 172 elif mar == 'r':
173 173 fileremoves += 1
174 174 for length, count in lineadd.iteritems():
175 175 linelengths[length] += count
176 176 fileschanged[filechanges] += 1
177 177 filesadded[fileadds] += 1
178 178 dirsadded[diradds] += 1
179 179 filesremoved[fileremoves] += 1
180 180
181 181 invchildren = zerodict()
182 182
183 183 for rev, count in children.iteritems():
184 184 invchildren[count] += 1
185 185
186 186 if output != '-':
187 187 ui.status(_('writing output to %s\n') % output)
188 188
189 189 def pronk(d):
190 190 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
191 191
192 192 json.dump(dict(revs=len(revs),
193 193 lineschanged=pronk(lineschanged),
194 194 children=pronk(invchildren),
195 195 fileschanged=pronk(fileschanged),
196 196 filesadded=pronk(filesadded),
197 197 linesinfilesadded=pronk(linesinfilesadded),
198 198 dirsadded=pronk(dirsadded),
199 199 filesremoved=pronk(filesremoved),
200 200 linelengths=pronk(linelengths),
201 201 parents=pronk(parents),
202 202 p1distance=pronk(p1distance),
203 203 p2distance=pronk(p2distance),
204 204 interarrival=pronk(interarrival),
205 205 tzoffset=pronk(tzoffset),
206 206 ),
207 207 fp)
208 208 fp.close()
209 209
210 210 @command('synthesize',
211 211 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
212 212 ('', 'dict', '', _('path to a dictionary of words'), _('FILE'))],
213 213 _('hg synthesize [OPTION].. DESCFILE'))
214 214 def synthesize(ui, repo, descpath, **opts):
215 215 '''synthesize commits based on a model of an existing repository
216 216
217 217 The model must have been generated by :hg:`analyze`. Commits will
218 218 be generated randomly according to the probabilities described in
219 219 the model.
220 220
221 221 When synthesizing new content, commit descriptions, and user
222 222 names, words will be chosen randomly from a dictionary that is
223 223 presumed to contain one word per line. Use --dict to specify the
224 224 path to an alternate dictionary to use.
225 225 '''
226 226 try:
227 fp = url.open(ui, descpath)
227 fp = hg.openpath(ui, descpath)
228 228 except Exception, err:
229 229 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
230 230 desc = json.load(fp)
231 231 fp.close()
232 232
233 233 def cdf(l):
234 234 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
235 235 t = float(sum(probs, 0))
236 236 s, cdfs = 0, []
237 237 for v in probs:
238 238 s += v
239 239 cdfs.append(s / t)
240 240 return vals, cdfs
241 241
242 242 lineschanged = cdf(desc['lineschanged'])
243 243 fileschanged = cdf(desc['fileschanged'])
244 244 filesadded = cdf(desc['filesadded'])
245 245 dirsadded = cdf(desc['dirsadded'])
246 246 filesremoved = cdf(desc['filesremoved'])
247 247 linelengths = cdf(desc['linelengths'])
248 248 parents = cdf(desc['parents'])
249 249 p1distance = cdf(desc['p1distance'])
250 250 p2distance = cdf(desc['p2distance'])
251 251 interarrival = cdf(desc['interarrival'])
252 252 linesinfilesadded = cdf(desc['linesinfilesadded'])
253 253 tzoffset = cdf(desc['tzoffset'])
254 254
255 255 dictfile = opts.get('dict') or '/usr/share/dict/words'
256 256 try:
257 257 fp = open(dictfile, 'rU')
258 258 except IOError, err:
259 259 raise util.Abort('%s: %s' % (dictfile, err.strerror))
260 260 words = fp.read().splitlines()
261 261 fp.close()
262 262
263 263 def pick(cdf):
264 264 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
265 265
266 266 def makeline(minimum=0):
267 267 total = max(minimum, pick(linelengths))
268 268 c, l = 0, []
269 269 while c < total:
270 270 w = random.choice(words)
271 271 c += len(w) + 1
272 272 l.append(w)
273 273 return ' '.join(l)
274 274
275 275 wlock = repo.wlock()
276 276 lock = repo.lock()
277 277
278 278 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
279 279
280 280 progress = ui.progress
281 281 _synthesizing = _('synthesizing')
282 282 _changesets = _('changesets')
283 283
284 284 count = int(opts['count'])
285 285 heads = set(map(repo.changelog.rev, repo.heads()))
286 286 for i in xrange(count):
287 287 progress(_synthesizing, i, unit=_changesets, total=count)
288 288
289 289 node = repo.changelog.node
290 290 revs = len(repo)
291 291
292 292 def pickhead(heads, distance):
293 293 if heads:
294 294 lheads = sorted(heads)
295 295 rev = revs - min(pick(distance), revs)
296 296 if rev < lheads[-1]:
297 297 rev = lheads[bisect.bisect_left(lheads, rev)]
298 298 else:
299 299 rev = lheads[-1]
300 300 return rev, node(rev)
301 301 return nullrev, nullid
302 302
303 303 r1 = revs - min(pick(p1distance), revs)
304 304 p1 = node(r1)
305 305
306 306 # the number of heads will grow without bound if we use a pure
307 307 # model, so artificially constrain their proliferation
308 308 if pick(parents) == 2 or len(heads) > random.randint(1, 20):
309 309 r2, p2 = pickhead(heads.difference([r1]), p2distance)
310 310 else:
311 311 r2, p2 = nullrev, nullid
312 312
313 313 pl = [p1, p2]
314 314 pctx = repo[r1]
315 315 mf = pctx.manifest()
316 316 mfk = mf.keys()
317 317 changes = {}
318 318 if mfk:
319 319 for __ in xrange(pick(fileschanged)):
320 320 for __ in xrange(10):
321 321 fctx = pctx.filectx(random.choice(mfk))
322 322 path = fctx.path()
323 323 if not (path in nevertouch or fctx.isbinary() or
324 324 'l' in fctx.flags()):
325 325 break
326 326 lines = fctx.data().splitlines()
327 327 add, remove = pick(lineschanged)
328 328 for __ in xrange(remove):
329 329 if not lines:
330 330 break
331 331 del lines[random.randrange(0, len(lines))]
332 332 for __ in xrange(add):
333 333 lines.insert(random.randint(0, len(lines)), makeline())
334 334 path = fctx.path()
335 335 changes[path] = context.memfilectx(path,
336 336 '\n'.join(lines) + '\n')
337 337 for __ in xrange(pick(filesremoved)):
338 338 path = random.choice(mfk)
339 339 for __ in xrange(10):
340 340 path = random.choice(mfk)
341 341 if path not in changes:
342 342 changes[path] = None
343 343 break
344 344 if filesadded:
345 345 dirs = list(pctx.dirs())
346 346 dirs.append('')
347 347 for __ in xrange(pick(filesadded)):
348 348 path = [random.choice(dirs)]
349 349 if pick(dirsadded):
350 350 path.append(random.choice(words))
351 351 path.append(random.choice(words))
352 352 path = '/'.join(filter(None, path))
353 353 data = '\n'.join(makeline()
354 354 for __ in xrange(pick(linesinfilesadded))) + '\n'
355 355 changes[path] = context.memfilectx(path, data)
356 356 def filectxfn(repo, memctx, path):
357 357 data = changes[path]
358 358 if data is None:
359 359 raise IOError
360 360 return data
361 361 if not changes:
362 362 continue
363 363 if revs:
364 364 date = repo['tip'].date()[0] + pick(interarrival)
365 365 else:
366 366 date = time.time() - (86400 * count)
367 367 user = random.choice(words) + '@' + random.choice(words)
368 368 mc = context.memctx(repo, pl, makeline(minimum=2),
369 369 sorted(changes.iterkeys()),
370 370 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
371 371 newnode = mc.commit()
372 372 heads.add(repo.changelog.rev(newnode))
373 373 heads.discard(r1)
374 374 heads.discard(r2)
375 375
376 376 lock.release()
377 377 wlock.release()
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,627 +1,634 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from i18n import _
10 10 from lock import release
11 11 from node import hex, nullid
12 12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
13 import lock, util, extensions, error, node, scmutil, phases
13 import lock, util, extensions, error, node, scmutil, phases, url
14 14 import cmdutil, discovery
15 15 import merge as mergemod
16 16 import verify as verifymod
17 17 import errno, os, shutil
18 18
19 19 def _local(path):
20 20 path = util.expandpath(util.urllocalpath(path))
21 21 return (os.path.isfile(path) and bundlerepo or localrepo)
22 22
23 23 def addbranchrevs(lrepo, other, branches, revs):
24 24 peer = other.peer() # a courtesy to callers using a localrepo for other
25 25 hashbranch, branches = branches
26 26 if not hashbranch and not branches:
27 27 return revs or None, revs and revs[0] or None
28 28 revs = revs and list(revs) or []
29 29 if not peer.capable('branchmap'):
30 30 if branches:
31 31 raise util.Abort(_("remote branch lookup not supported"))
32 32 revs.append(hashbranch)
33 33 return revs, revs[0]
34 34 branchmap = peer.branchmap()
35 35
36 36 def primary(branch):
37 37 if branch == '.':
38 38 if not lrepo:
39 39 raise util.Abort(_("dirstate branch not accessible"))
40 40 branch = lrepo.dirstate.branch()
41 41 if branch in branchmap:
42 42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
43 43 return True
44 44 else:
45 45 return False
46 46
47 47 for branch in branches:
48 48 if not primary(branch):
49 49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 50 if hashbranch:
51 51 if not primary(hashbranch):
52 52 revs.append(hashbranch)
53 53 return revs, revs[0]
54 54
55 55 def parseurl(path, branches=None):
56 56 '''parse url#branch, returning (url, (branch, branches))'''
57 57
58 58 u = util.url(path)
59 59 branch = None
60 60 if u.fragment:
61 61 branch = u.fragment
62 62 u.fragment = None
63 63 return str(u), (branch, branches or [])
64 64
65 65 schemes = {
66 66 'bundle': bundlerepo,
67 67 'file': _local,
68 68 'http': httppeer,
69 69 'https': httppeer,
70 70 'ssh': sshpeer,
71 71 'static-http': statichttprepo,
72 72 }
73 73
74 74 def _peerlookup(path):
75 75 u = util.url(path)
76 76 scheme = u.scheme or 'file'
77 77 thing = schemes.get(scheme) or schemes['file']
78 78 try:
79 79 return thing(path)
80 80 except TypeError:
81 81 return thing
82 82
83 83 def islocal(repo):
84 84 '''return true if repo or path is local'''
85 85 if isinstance(repo, str):
86 86 try:
87 87 return _peerlookup(repo).islocal(repo)
88 88 except AttributeError:
89 89 return False
90 90 return repo.local()
91 91
92 def openpath(ui, path):
93 '''open path with open if local, url.open if remote'''
94 if islocal(path):
95 return open(util.urllocalpath(path))
96 else:
97 return url.open(ui, path)
98
92 99 def _peerorrepo(ui, path, create=False):
93 100 """return a repository object for the specified path"""
94 101 obj = _peerlookup(path).instance(ui, path, create)
95 102 ui = getattr(obj, "ui", ui)
96 103 for name, module in extensions.extensions():
97 104 hook = getattr(module, 'reposetup', None)
98 105 if hook:
99 106 hook(ui, obj)
100 107 return obj
101 108
102 109 def repository(ui, path='', create=False):
103 110 """return a repository object for the specified path"""
104 111 peer = _peerorrepo(ui, path, create)
105 112 repo = peer.local()
106 113 if not repo:
107 114 raise util.Abort(_("repository '%s' is not local") %
108 115 (path or peer.url()))
109 116 return repo
110 117
111 118 def peer(uiorrepo, opts, path, create=False):
112 119 '''return a repository peer for the specified path'''
113 120 rui = remoteui(uiorrepo, opts)
114 121 return _peerorrepo(rui, path, create).peer()
115 122
116 123 def defaultdest(source):
117 124 '''return default destination of clone if none is given'''
118 125 return os.path.basename(os.path.normpath(util.url(source).path))
119 126
120 127 def share(ui, source, dest=None, update=True):
121 128 '''create a shared repository'''
122 129
123 130 if not islocal(source):
124 131 raise util.Abort(_('can only share local repositories'))
125 132
126 133 if not dest:
127 134 dest = defaultdest(source)
128 135 else:
129 136 dest = ui.expandpath(dest)
130 137
131 138 if isinstance(source, str):
132 139 origsource = ui.expandpath(source)
133 140 source, branches = parseurl(origsource)
134 141 srcrepo = repository(ui, source)
135 142 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
136 143 else:
137 144 srcrepo = source.local()
138 145 origsource = source = srcrepo.url()
139 146 checkout = None
140 147
141 148 sharedpath = srcrepo.sharedpath # if our source is already sharing
142 149
143 150 root = os.path.realpath(dest)
144 151 roothg = os.path.join(root, '.hg')
145 152
146 153 if os.path.exists(roothg):
147 154 raise util.Abort(_('destination already exists'))
148 155
149 156 if not os.path.isdir(root):
150 157 os.mkdir(root)
151 158 util.makedir(roothg, notindexed=True)
152 159
153 160 requirements = ''
154 161 try:
155 162 requirements = srcrepo.opener.read('requires')
156 163 except IOError, inst:
157 164 if inst.errno != errno.ENOENT:
158 165 raise
159 166
160 167 requirements += 'shared\n'
161 168 util.writefile(os.path.join(roothg, 'requires'), requirements)
162 169 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
163 170
164 171 r = repository(ui, root)
165 172
166 173 default = srcrepo.ui.config('paths', 'default')
167 174 if default:
168 175 fp = r.opener("hgrc", "w", text=True)
169 176 fp.write("[paths]\n")
170 177 fp.write("default = %s\n" % default)
171 178 fp.close()
172 179
173 180 if update:
174 181 r.ui.status(_("updating working directory\n"))
175 182 if update is not True:
176 183 checkout = update
177 184 for test in (checkout, 'default', 'tip'):
178 185 if test is None:
179 186 continue
180 187 try:
181 188 uprev = r.lookup(test)
182 189 break
183 190 except error.RepoLookupError:
184 191 continue
185 192 _update(r, uprev)
186 193
187 194 def copystore(ui, srcrepo, destpath):
188 195 '''copy files from store of srcrepo in destpath
189 196
190 197 returns destlock
191 198 '''
192 199 destlock = None
193 200 try:
194 201 hardlink = None
195 202 num = 0
196 203 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
197 204 for f in srcrepo.store.copylist():
198 205 if srcpublishing and f.endswith('phaseroots'):
199 206 continue
200 207 src = os.path.join(srcrepo.sharedpath, f)
201 208 dst = os.path.join(destpath, f)
202 209 dstbase = os.path.dirname(dst)
203 210 if dstbase and not os.path.exists(dstbase):
204 211 os.mkdir(dstbase)
205 212 if os.path.exists(src):
206 213 if dst.endswith('data'):
207 214 # lock to avoid premature writing to the target
208 215 destlock = lock.lock(os.path.join(dstbase, "lock"))
209 216 hardlink, n = util.copyfiles(src, dst, hardlink)
210 217 num += n
211 218 if hardlink:
212 219 ui.debug("linked %d files\n" % num)
213 220 else:
214 221 ui.debug("copied %d files\n" % num)
215 222 return destlock
216 223 except: # re-raises
217 224 release(destlock)
218 225 raise
219 226
220 227 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
221 228 update=True, stream=False, branch=None):
222 229 """Make a copy of an existing repository.
223 230
224 231 Create a copy of an existing repository in a new directory. The
225 232 source and destination are URLs, as passed to the repository
226 233 function. Returns a pair of repository peers, the source and
227 234 newly created destination.
228 235
229 236 The location of the source is added to the new repository's
230 237 .hg/hgrc file, as the default to be used for future pulls and
231 238 pushes.
232 239
233 240 If an exception is raised, the partly cloned/updated destination
234 241 repository will be deleted.
235 242
236 243 Arguments:
237 244
238 245 source: repository object or URL
239 246
240 247 dest: URL of destination repository to create (defaults to base
241 248 name of source repository)
242 249
243 250 pull: always pull from source repository, even in local case
244 251
245 252 stream: stream raw data uncompressed from repository (fast over
246 253 LAN, slow over WAN)
247 254
248 255 rev: revision to clone up to (implies pull=True)
249 256
250 257 update: update working directory after clone completes, if
251 258 destination is local repository (True means update to default rev,
252 259 anything else is treated as a revision)
253 260
254 261 branch: branches to clone
255 262 """
256 263
257 264 if isinstance(source, str):
258 265 origsource = ui.expandpath(source)
259 266 source, branch = parseurl(origsource, branch)
260 267 srcpeer = peer(ui, peeropts, source)
261 268 else:
262 269 srcpeer = source.peer() # in case we were called with a localrepo
263 270 branch = (None, branch or [])
264 271 origsource = source = srcpeer.url()
265 272 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
266 273
267 274 if dest is None:
268 275 dest = defaultdest(source)
269 276 ui.status(_("destination directory: %s\n") % dest)
270 277 else:
271 278 dest = ui.expandpath(dest)
272 279
273 280 dest = util.urllocalpath(dest)
274 281 source = util.urllocalpath(source)
275 282
276 283 if not dest:
277 284 raise util.Abort(_("empty destination path is not valid"))
278 285 if os.path.exists(dest):
279 286 if not os.path.isdir(dest):
280 287 raise util.Abort(_("destination '%s' already exists") % dest)
281 288 elif os.listdir(dest):
282 289 raise util.Abort(_("destination '%s' is not empty") % dest)
283 290
284 291 class DirCleanup(object):
285 292 def __init__(self, dir_):
286 293 self.rmtree = shutil.rmtree
287 294 self.dir_ = dir_
288 295 def close(self):
289 296 self.dir_ = None
290 297 def cleanup(self):
291 298 if self.dir_:
292 299 self.rmtree(self.dir_, True)
293 300
294 301 srclock = destlock = dircleanup = None
295 302 srcrepo = srcpeer.local()
296 303 try:
297 304 abspath = origsource
298 305 if islocal(origsource):
299 306 abspath = os.path.abspath(util.urllocalpath(origsource))
300 307
301 308 if islocal(dest):
302 309 dircleanup = DirCleanup(dest)
303 310
304 311 copy = False
305 312 if (srcrepo and srcrepo.cancopy() and islocal(dest)
306 313 and not phases.hassecret(srcrepo)):
307 314 copy = not pull and not rev
308 315
309 316 if copy:
310 317 try:
311 318 # we use a lock here because if we race with commit, we
312 319 # can end up with extra data in the cloned revlogs that's
313 320 # not pointed to by changesets, thus causing verify to
314 321 # fail
315 322 srclock = srcrepo.lock(wait=False)
316 323 except error.LockError:
317 324 copy = False
318 325
319 326 if copy:
320 327 srcrepo.hook('preoutgoing', throw=True, source='clone')
321 328 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
322 329 if not os.path.exists(dest):
323 330 os.mkdir(dest)
324 331 else:
325 332 # only clean up directories we create ourselves
326 333 dircleanup.dir_ = hgdir
327 334 try:
328 335 destpath = hgdir
329 336 util.makedir(destpath, notindexed=True)
330 337 except OSError, inst:
331 338 if inst.errno == errno.EEXIST:
332 339 dircleanup.close()
333 340 raise util.Abort(_("destination '%s' already exists")
334 341 % dest)
335 342 raise
336 343
337 344 destlock = copystore(ui, srcrepo, destpath)
338 345
339 346 # Recomputing branch cache might be slow on big repos,
340 347 # so just copy it
341 348 dstcachedir = os.path.join(destpath, 'cache')
342 349 srcbranchcache = srcrepo.sjoin('cache/branchheads')
343 350 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
344 351 if os.path.exists(srcbranchcache):
345 352 if not os.path.exists(dstcachedir):
346 353 os.mkdir(dstcachedir)
347 354 util.copyfile(srcbranchcache, dstbranchcache)
348 355
349 356 # we need to re-init the repo after manually copying the data
350 357 # into it
351 358 destpeer = peer(srcrepo, peeropts, dest)
352 359 srcrepo.hook('outgoing', source='clone',
353 360 node=node.hex(node.nullid))
354 361 else:
355 362 try:
356 363 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
357 364 # only pass ui when no srcrepo
358 365 except OSError, inst:
359 366 if inst.errno == errno.EEXIST:
360 367 dircleanup.close()
361 368 raise util.Abort(_("destination '%s' already exists")
362 369 % dest)
363 370 raise
364 371
365 372 revs = None
366 373 if rev:
367 374 if not srcpeer.capable('lookup'):
368 375 raise util.Abort(_("src repository does not support "
369 376 "revision lookup and so doesn't "
370 377 "support clone by revision"))
371 378 revs = [srcpeer.lookup(r) for r in rev]
372 379 checkout = revs[0]
373 380 if destpeer.local():
374 381 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
375 382 elif srcrepo:
376 383 srcrepo.push(destpeer, revs=revs)
377 384 else:
378 385 raise util.Abort(_("clone from remote to remote not supported"))
379 386
380 387 if dircleanup:
381 388 dircleanup.close()
382 389
383 390 # clone all bookmarks except divergent ones
384 391 destrepo = destpeer.local()
385 392 if destrepo and srcpeer.capable("pushkey"):
386 393 rb = srcpeer.listkeys('bookmarks')
387 394 for k, n in rb.iteritems():
388 395 try:
389 396 m = destrepo.lookup(n)
390 397 destrepo._bookmarks[k] = m
391 398 except error.RepoLookupError:
392 399 pass
393 400 if rb:
394 401 bookmarks.write(destrepo)
395 402 elif srcrepo and destpeer.capable("pushkey"):
396 403 for k, n in srcrepo._bookmarks.iteritems():
397 404 destpeer.pushkey('bookmarks', k, '', hex(n))
398 405
399 406 if destrepo:
400 407 fp = destrepo.opener("hgrc", "w", text=True)
401 408 fp.write("[paths]\n")
402 409 u = util.url(abspath)
403 410 u.passwd = None
404 411 defaulturl = str(u)
405 412 fp.write("default = %s\n" % defaulturl)
406 413 fp.close()
407 414
408 415 destrepo.ui.setconfig('paths', 'default', defaulturl)
409 416
410 417 if update:
411 418 if update is not True:
412 419 checkout = srcpeer.lookup(update)
413 420 uprev = None
414 421 status = None
415 422 if checkout is not None:
416 423 try:
417 424 uprev = destrepo.lookup(checkout)
418 425 except error.RepoLookupError:
419 426 pass
420 427 if uprev is None:
421 428 try:
422 429 uprev = destrepo._bookmarks['@']
423 430 update = '@'
424 431 bn = destrepo[uprev].branch()
425 432 if bn == 'default':
426 433 status = _("updating to bookmark @\n")
427 434 else:
428 435 status = _("updating to bookmark @ on branch %s\n"
429 436 % bn)
430 437 except KeyError:
431 438 try:
432 439 uprev = destrepo.branchtip('default')
433 440 except error.RepoLookupError:
434 441 uprev = destrepo.lookup('tip')
435 442 if not status:
436 443 bn = destrepo[uprev].branch()
437 444 status = _("updating to branch %s\n") % bn
438 445 destrepo.ui.status(status)
439 446 _update(destrepo, uprev)
440 447 if update in destrepo._bookmarks:
441 448 bookmarks.setcurrent(destrepo, update)
442 449
443 450 return srcpeer, destpeer
444 451 finally:
445 452 release(srclock, destlock)
446 453 if dircleanup is not None:
447 454 dircleanup.cleanup()
448 455 if srcpeer is not None:
449 456 srcpeer.close()
450 457
451 458 def _showstats(repo, stats):
452 459 repo.ui.status(_("%d files updated, %d files merged, "
453 460 "%d files removed, %d files unresolved\n") % stats)
454 461
455 462 def update(repo, node):
456 463 """update the working directory to node, merging linear changes"""
457 464 stats = mergemod.update(repo, node, False, False, None)
458 465 _showstats(repo, stats)
459 466 if stats[3]:
460 467 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
461 468 return stats[3] > 0
462 469
463 470 # naming conflict in clone()
464 471 _update = update
465 472
466 473 def clean(repo, node, show_stats=True):
467 474 """forcibly switch the working directory to node, clobbering changes"""
468 475 stats = mergemod.update(repo, node, False, True, None)
469 476 if show_stats:
470 477 _showstats(repo, stats)
471 478 return stats[3] > 0
472 479
473 480 def merge(repo, node, force=None, remind=True):
474 481 """Branch merge with node, resolving changes. Return true if any
475 482 unresolved conflicts."""
476 483 stats = mergemod.update(repo, node, True, force, False)
477 484 _showstats(repo, stats)
478 485 if stats[3]:
479 486 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
480 487 "or 'hg update -C .' to abandon\n"))
481 488 elif remind:
482 489 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
483 490 return stats[3] > 0
484 491
485 492 def _incoming(displaychlist, subreporecurse, ui, repo, source,
486 493 opts, buffered=False):
487 494 """
488 495 Helper for incoming / gincoming.
489 496 displaychlist gets called with
490 497 (remoterepo, incomingchangesetlist, displayer) parameters,
491 498 and is supposed to contain only code that can't be unified.
492 499 """
493 500 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
494 501 other = peer(repo, opts, source)
495 502 ui.status(_('comparing with %s\n') % util.hidepassword(source))
496 503 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
497 504
498 505 if revs:
499 506 revs = [other.lookup(rev) for rev in revs]
500 507 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
501 508 revs, opts["bundle"], opts["force"])
502 509 try:
503 510 if not chlist:
504 511 ui.status(_("no changes found\n"))
505 512 return subreporecurse()
506 513
507 514 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
508 515
509 516 # XXX once graphlog extension makes it into core,
510 517 # should be replaced by a if graph/else
511 518 displaychlist(other, chlist, displayer)
512 519
513 520 displayer.close()
514 521 finally:
515 522 cleanupfn()
516 523 subreporecurse()
517 524 return 0 # exit code is zero since we found incoming changes
518 525
519 526 def incoming(ui, repo, source, opts):
520 527 def subreporecurse():
521 528 ret = 1
522 529 if opts.get('subrepos'):
523 530 ctx = repo[None]
524 531 for subpath in sorted(ctx.substate):
525 532 sub = ctx.sub(subpath)
526 533 ret = min(ret, sub.incoming(ui, source, opts))
527 534 return ret
528 535
529 536 def display(other, chlist, displayer):
530 537 limit = cmdutil.loglimit(opts)
531 538 if opts.get('newest_first'):
532 539 chlist.reverse()
533 540 count = 0
534 541 for n in chlist:
535 542 if limit is not None and count >= limit:
536 543 break
537 544 parents = [p for p in other.changelog.parents(n) if p != nullid]
538 545 if opts.get('no_merges') and len(parents) == 2:
539 546 continue
540 547 count += 1
541 548 displayer.show(other[n])
542 549 return _incoming(display, subreporecurse, ui, repo, source, opts)
543 550
544 551 def _outgoing(ui, repo, dest, opts):
545 552 dest = ui.expandpath(dest or 'default-push', dest or 'default')
546 553 dest, branches = parseurl(dest, opts.get('branch'))
547 554 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
548 555 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
549 556 if revs:
550 557 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
551 558
552 559 other = peer(repo, opts, dest)
553 560 outgoing = discovery.findcommonoutgoing(repo, other, revs,
554 561 force=opts.get('force'))
555 562 o = outgoing.missing
556 563 if not o:
557 564 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
558 565 return None
559 566 return o
560 567
561 568 def outgoing(ui, repo, dest, opts):
562 569 def recurse():
563 570 ret = 1
564 571 if opts.get('subrepos'):
565 572 ctx = repo[None]
566 573 for subpath in sorted(ctx.substate):
567 574 sub = ctx.sub(subpath)
568 575 ret = min(ret, sub.outgoing(ui, dest, opts))
569 576 return ret
570 577
571 578 limit = cmdutil.loglimit(opts)
572 579 o = _outgoing(ui, repo, dest, opts)
573 580 if o is None:
574 581 return recurse()
575 582
576 583 if opts.get('newest_first'):
577 584 o.reverse()
578 585 displayer = cmdutil.show_changeset(ui, repo, opts)
579 586 count = 0
580 587 for n in o:
581 588 if limit is not None and count >= limit:
582 589 break
583 590 parents = [p for p in repo.changelog.parents(n) if p != nullid]
584 591 if opts.get('no_merges') and len(parents) == 2:
585 592 continue
586 593 count += 1
587 594 displayer.show(repo[n])
588 595 displayer.close()
589 596 recurse()
590 597 return 0 # exit code is zero since we found outgoing changes
591 598
592 599 def revert(repo, node, choose):
593 600 """revert changes to revision in node without updating dirstate"""
594 601 return mergemod.update(repo, node, False, True, choose)[3] > 0
595 602
596 603 def verify(repo):
597 604 """verify the consistency of a repository"""
598 605 return verifymod.verify(repo)
599 606
600 607 def remoteui(src, opts):
601 608 'build a remote ui from ui or repo and opts'
602 609 if util.safehasattr(src, 'baseui'): # looks like a repository
603 610 dst = src.baseui.copy() # drop repo-specific config
604 611 src = src.ui # copy target options from repo
605 612 else: # assume it's a global ui object
606 613 dst = src.copy() # keep all global options
607 614
608 615 # copy ssh-specific options
609 616 for o in 'ssh', 'remotecmd':
610 617 v = opts.get(o) or src.config('ui', o)
611 618 if v:
612 619 dst.setconfig("ui", o, v)
613 620
614 621 # copy bundle-specific options
615 622 r = src.config('bundle', 'mainreporoot')
616 623 if r:
617 624 dst.setconfig('bundle', 'mainreporoot', r)
618 625
619 626 # copy selected local settings to the remote ui
620 627 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
621 628 for key, val in src.configitems(sect):
622 629 dst.setconfig(sect, key, val)
623 630 v = src.config('web', 'cacerts')
624 631 if v:
625 632 dst.setconfig('web', 'cacerts', util.expandpath(v))
626 633
627 634 return dst
General Comments 0
You need to be logged in to leave comments. Login now