##// END OF EJS Templates
url: use open and not url.open for local files (issue3624)
Siddharth Agarwal -
r17887:0e2846b2 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,377 +1,377 b''
1 # synthrepo.py - repo synthesis
1 # synthrepo.py - repo synthesis
2 #
2 #
3 # Copyright 2012 Facebook
3 # Copyright 2012 Facebook
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''synthesize structurally interesting change history
8 '''synthesize structurally interesting change history
9
9
10 This extension is useful for creating a repository with properties
10 This extension is useful for creating a repository with properties
11 that are statistically similar to an existing repository. During
11 that are statistically similar to an existing repository. During
12 analysis, a simple probability table is constructed from the history
12 analysis, a simple probability table is constructed from the history
13 of an existing repository. During synthesis, these properties are
13 of an existing repository. During synthesis, these properties are
14 reconstructed.
14 reconstructed.
15
15
16 Properties that are analyzed and synthesized include the following:
16 Properties that are analyzed and synthesized include the following:
17
17
18 - Lines added or removed when an existing file is modified
18 - Lines added or removed when an existing file is modified
19 - Number and sizes of files added
19 - Number and sizes of files added
20 - Number of files removed
20 - Number of files removed
21 - Line lengths
21 - Line lengths
22 - Topological distance to parent changeset(s)
22 - Topological distance to parent changeset(s)
23 - Probability of a commit being a merge
23 - Probability of a commit being a merge
24 - Probability of a newly added file being added to a new directory
24 - Probability of a newly added file being added to a new directory
25 - Interarrival time, and time zone, of commits
25 - Interarrival time, and time zone, of commits
26
26
27 A few obvious properties that are not currently handled realistically:
27 A few obvious properties that are not currently handled realistically:
28
28
29 - Merges are treated as regular commits with two parents, which is not
29 - Merges are treated as regular commits with two parents, which is not
30 realistic
30 realistic
31 - Modifications are not treated as operations on hunks of lines, but
31 - Modifications are not treated as operations on hunks of lines, but
32 as insertions and deletions of randomly chosen single lines
32 as insertions and deletions of randomly chosen single lines
33 - Committer ID (always random)
33 - Committer ID (always random)
34 - Executability of files
34 - Executability of files
35 - Symlinks and binary files are ignored
35 - Symlinks and binary files are ignored
36 '''
36 '''
37
37
38 import bisect, collections, json, os, random, time
38 import bisect, collections, json, os, random, time
39 from mercurial import cmdutil, context, patch, scmutil, url, util
39 from mercurial import cmdutil, context, patch, scmutil, url, util, hg
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41 from mercurial.node import nullrev, nullid
41 from mercurial.node import nullrev, nullid
42
42
43 testedwith = 'internal'
43 testedwith = 'internal'
44
44
45 cmdtable = {}
45 cmdtable = {}
46 command = cmdutil.command(cmdtable)
46 command = cmdutil.command(cmdtable)
47
47
48 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
48 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
49
49
50 def zerodict():
50 def zerodict():
51 return collections.defaultdict(lambda: 0)
51 return collections.defaultdict(lambda: 0)
52
52
53 def roundto(x, k):
53 def roundto(x, k):
54 if x > k * 2:
54 if x > k * 2:
55 return int(round(x / float(k)) * k)
55 return int(round(x / float(k)) * k)
56 return int(round(x))
56 return int(round(x))
57
57
58 def parsegitdiff(lines):
58 def parsegitdiff(lines):
59 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
59 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
60 binary = False
60 binary = False
61 for line in lines:
61 for line in lines:
62 start = line[:6]
62 start = line[:6]
63 if start == 'diff -':
63 if start == 'diff -':
64 if filename:
64 if filename:
65 yield filename, mar, lineadd, lineremove, binary
65 yield filename, mar, lineadd, lineremove, binary
66 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
66 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
67 filename = patch.gitre.match(line).group(1)
67 filename = patch.gitre.match(line).group(1)
68 elif start in newfile:
68 elif start in newfile:
69 mar = 'a'
69 mar = 'a'
70 elif start == 'GIT bi':
70 elif start == 'GIT bi':
71 binary = True
71 binary = True
72 elif start == 'delete':
72 elif start == 'delete':
73 mar = 'r'
73 mar = 'r'
74 elif start:
74 elif start:
75 s = start[0]
75 s = start[0]
76 if s == '-' and not line.startswith('--- '):
76 if s == '-' and not line.startswith('--- '):
77 lineremove += 1
77 lineremove += 1
78 elif s == '+' and not line.startswith('+++ '):
78 elif s == '+' and not line.startswith('+++ '):
79 lineadd[roundto(len(line) - 1, 5)] += 1
79 lineadd[roundto(len(line) - 1, 5)] += 1
80 if filename:
80 if filename:
81 yield filename, mar, lineadd, lineremove, binary
81 yield filename, mar, lineadd, lineremove, binary
82
82
83 @command('analyze',
83 @command('analyze',
84 [('o', 'output', [], _('write output to given file'), _('FILE')),
84 [('o', 'output', [], _('write output to given file'), _('FILE')),
85 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
85 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
86 _('hg analyze'))
86 _('hg analyze'))
87 def analyze(ui, repo, *revs, **opts):
87 def analyze(ui, repo, *revs, **opts):
88 '''create a simple model of a repository to use for later synthesis
88 '''create a simple model of a repository to use for later synthesis
89
89
90 This command examines every changeset in the given range (or all
90 This command examines every changeset in the given range (or all
91 of history if none are specified) and creates a simple statistical
91 of history if none are specified) and creates a simple statistical
92 model of the history of the repository.
92 model of the history of the repository.
93
93
94 The model is written out to a JSON file, and can be used by
94 The model is written out to a JSON file, and can be used by
95 :hg:`synthesize` to create or augment a repository with synthetic
95 :hg:`synthesize` to create or augment a repository with synthetic
96 commits that have a structure that is statistically similar to the
96 commits that have a structure that is statistically similar to the
97 analyzed repository.
97 analyzed repository.
98 '''
98 '''
99
99
100 revs = list(revs)
100 revs = list(revs)
101 revs.extend(opts['rev'])
101 revs.extend(opts['rev'])
102 if not revs:
102 if not revs:
103 revs = [':']
103 revs = [':']
104
104
105 output = opts['output']
105 output = opts['output']
106 if not output:
106 if not output:
107 output = os.path.basename(repo.root) + '.json'
107 output = os.path.basename(repo.root) + '.json'
108
108
109 if output == '-':
109 if output == '-':
110 fp = sys.stdout
110 fp = sys.stdout
111 else:
111 else:
112 fp = open(output, 'w')
112 fp = open(output, 'w')
113
113
114 revs = scmutil.revrange(repo, revs)
114 revs = scmutil.revrange(repo, revs)
115 revs.sort()
115 revs.sort()
116
116
117 lineschanged = zerodict()
117 lineschanged = zerodict()
118 children = zerodict()
118 children = zerodict()
119 p1distance = zerodict()
119 p1distance = zerodict()
120 p2distance = zerodict()
120 p2distance = zerodict()
121 linesinfilesadded = zerodict()
121 linesinfilesadded = zerodict()
122 fileschanged = zerodict()
122 fileschanged = zerodict()
123 filesadded = zerodict()
123 filesadded = zerodict()
124 filesremoved = zerodict()
124 filesremoved = zerodict()
125 linelengths = zerodict()
125 linelengths = zerodict()
126 interarrival = zerodict()
126 interarrival = zerodict()
127 parents = zerodict()
127 parents = zerodict()
128 dirsadded = zerodict()
128 dirsadded = zerodict()
129 tzoffset = zerodict()
129 tzoffset = zerodict()
130
130
131 progress = ui.progress
131 progress = ui.progress
132 _analyzing = _('analyzing')
132 _analyzing = _('analyzing')
133 _changesets = _('changesets')
133 _changesets = _('changesets')
134 _total = len(revs)
134 _total = len(revs)
135
135
136 for i, rev in enumerate(revs):
136 for i, rev in enumerate(revs):
137 progress(_analyzing, i, unit=_changesets, total=_total)
137 progress(_analyzing, i, unit=_changesets, total=_total)
138 ctx = repo[rev]
138 ctx = repo[rev]
139 pl = ctx.parents()
139 pl = ctx.parents()
140 pctx = pl[0]
140 pctx = pl[0]
141 prev = pctx.rev()
141 prev = pctx.rev()
142 children[prev] += 1
142 children[prev] += 1
143 p1distance[rev - prev] += 1
143 p1distance[rev - prev] += 1
144 parents[len(pl)] += 1
144 parents[len(pl)] += 1
145 tzoffset[ctx.date()[1]] += 1
145 tzoffset[ctx.date()[1]] += 1
146 if len(pl) > 1:
146 if len(pl) > 1:
147 p2distance[rev - pl[1].rev()] += 1
147 p2distance[rev - pl[1].rev()] += 1
148 if prev == rev - 1:
148 if prev == rev - 1:
149 lastctx = pctx
149 lastctx = pctx
150 else:
150 else:
151 lastctx = repo[rev - 1]
151 lastctx = repo[rev - 1]
152 if lastctx.rev() != nullrev:
152 if lastctx.rev() != nullrev:
153 interarrival[roundto(ctx.date()[0] - lastctx.date()[0], 300)] += 1
153 interarrival[roundto(ctx.date()[0] - lastctx.date()[0], 300)] += 1
154 diff = sum((d.splitlines()
154 diff = sum((d.splitlines()
155 for d in ctx.diff(pctx, opts=dict(git=True))), [])
155 for d in ctx.diff(pctx, opts=dict(git=True))), [])
156 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
156 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
157 for filename, mar, lineadd, lineremove, binary in parsegitdiff(diff):
157 for filename, mar, lineadd, lineremove, binary in parsegitdiff(diff):
158 if binary:
158 if binary:
159 continue
159 continue
160 added = sum(lineadd.itervalues(), 0)
160 added = sum(lineadd.itervalues(), 0)
161 if mar == 'm':
161 if mar == 'm':
162 if added and lineremove:
162 if added and lineremove:
163 lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
163 lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
164 filechanges += 1
164 filechanges += 1
165 elif mar == 'a':
165 elif mar == 'a':
166 fileadds += 1
166 fileadds += 1
167 if '/' in filename:
167 if '/' in filename:
168 filedir = filename.rsplit('/', 1)[0]
168 filedir = filename.rsplit('/', 1)[0]
169 if filedir not in pctx.dirs():
169 if filedir not in pctx.dirs():
170 diradds += 1
170 diradds += 1
171 linesinfilesadded[roundto(added, 5)] += 1
171 linesinfilesadded[roundto(added, 5)] += 1
172 elif mar == 'r':
172 elif mar == 'r':
173 fileremoves += 1
173 fileremoves += 1
174 for length, count in lineadd.iteritems():
174 for length, count in lineadd.iteritems():
175 linelengths[length] += count
175 linelengths[length] += count
176 fileschanged[filechanges] += 1
176 fileschanged[filechanges] += 1
177 filesadded[fileadds] += 1
177 filesadded[fileadds] += 1
178 dirsadded[diradds] += 1
178 dirsadded[diradds] += 1
179 filesremoved[fileremoves] += 1
179 filesremoved[fileremoves] += 1
180
180
181 invchildren = zerodict()
181 invchildren = zerodict()
182
182
183 for rev, count in children.iteritems():
183 for rev, count in children.iteritems():
184 invchildren[count] += 1
184 invchildren[count] += 1
185
185
186 if output != '-':
186 if output != '-':
187 ui.status(_('writing output to %s\n') % output)
187 ui.status(_('writing output to %s\n') % output)
188
188
189 def pronk(d):
189 def pronk(d):
190 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
190 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
191
191
192 json.dump(dict(revs=len(revs),
192 json.dump(dict(revs=len(revs),
193 lineschanged=pronk(lineschanged),
193 lineschanged=pronk(lineschanged),
194 children=pronk(invchildren),
194 children=pronk(invchildren),
195 fileschanged=pronk(fileschanged),
195 fileschanged=pronk(fileschanged),
196 filesadded=pronk(filesadded),
196 filesadded=pronk(filesadded),
197 linesinfilesadded=pronk(linesinfilesadded),
197 linesinfilesadded=pronk(linesinfilesadded),
198 dirsadded=pronk(dirsadded),
198 dirsadded=pronk(dirsadded),
199 filesremoved=pronk(filesremoved),
199 filesremoved=pronk(filesremoved),
200 linelengths=pronk(linelengths),
200 linelengths=pronk(linelengths),
201 parents=pronk(parents),
201 parents=pronk(parents),
202 p1distance=pronk(p1distance),
202 p1distance=pronk(p1distance),
203 p2distance=pronk(p2distance),
203 p2distance=pronk(p2distance),
204 interarrival=pronk(interarrival),
204 interarrival=pronk(interarrival),
205 tzoffset=pronk(tzoffset),
205 tzoffset=pronk(tzoffset),
206 ),
206 ),
207 fp)
207 fp)
208 fp.close()
208 fp.close()
209
209
210 @command('synthesize',
210 @command('synthesize',
211 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
211 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
212 ('', 'dict', '', _('path to a dictionary of words'), _('FILE'))],
212 ('', 'dict', '', _('path to a dictionary of words'), _('FILE'))],
213 _('hg synthesize [OPTION].. DESCFILE'))
213 _('hg synthesize [OPTION].. DESCFILE'))
214 def synthesize(ui, repo, descpath, **opts):
214 def synthesize(ui, repo, descpath, **opts):
215 '''synthesize commits based on a model of an existing repository
215 '''synthesize commits based on a model of an existing repository
216
216
217 The model must have been generated by :hg:`analyze`. Commits will
217 The model must have been generated by :hg:`analyze`. Commits will
218 be generated randomly according to the probabilities described in
218 be generated randomly according to the probabilities described in
219 the model.
219 the model.
220
220
221 When synthesizing new content, commit descriptions, and user
221 When synthesizing new content, commit descriptions, and user
222 names, words will be chosen randomly from a dictionary that is
222 names, words will be chosen randomly from a dictionary that is
223 presumed to contain one word per line. Use --dict to specify the
223 presumed to contain one word per line. Use --dict to specify the
224 path to an alternate dictionary to use.
224 path to an alternate dictionary to use.
225 '''
225 '''
226 try:
226 try:
227 fp = url.open(ui, descpath)
227 fp = hg.openpath(ui, descpath)
228 except Exception, err:
228 except Exception, err:
229 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
229 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
230 desc = json.load(fp)
230 desc = json.load(fp)
231 fp.close()
231 fp.close()
232
232
233 def cdf(l):
233 def cdf(l):
234 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
234 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
235 t = float(sum(probs, 0))
235 t = float(sum(probs, 0))
236 s, cdfs = 0, []
236 s, cdfs = 0, []
237 for v in probs:
237 for v in probs:
238 s += v
238 s += v
239 cdfs.append(s / t)
239 cdfs.append(s / t)
240 return vals, cdfs
240 return vals, cdfs
241
241
242 lineschanged = cdf(desc['lineschanged'])
242 lineschanged = cdf(desc['lineschanged'])
243 fileschanged = cdf(desc['fileschanged'])
243 fileschanged = cdf(desc['fileschanged'])
244 filesadded = cdf(desc['filesadded'])
244 filesadded = cdf(desc['filesadded'])
245 dirsadded = cdf(desc['dirsadded'])
245 dirsadded = cdf(desc['dirsadded'])
246 filesremoved = cdf(desc['filesremoved'])
246 filesremoved = cdf(desc['filesremoved'])
247 linelengths = cdf(desc['linelengths'])
247 linelengths = cdf(desc['linelengths'])
248 parents = cdf(desc['parents'])
248 parents = cdf(desc['parents'])
249 p1distance = cdf(desc['p1distance'])
249 p1distance = cdf(desc['p1distance'])
250 p2distance = cdf(desc['p2distance'])
250 p2distance = cdf(desc['p2distance'])
251 interarrival = cdf(desc['interarrival'])
251 interarrival = cdf(desc['interarrival'])
252 linesinfilesadded = cdf(desc['linesinfilesadded'])
252 linesinfilesadded = cdf(desc['linesinfilesadded'])
253 tzoffset = cdf(desc['tzoffset'])
253 tzoffset = cdf(desc['tzoffset'])
254
254
255 dictfile = opts.get('dict') or '/usr/share/dict/words'
255 dictfile = opts.get('dict') or '/usr/share/dict/words'
256 try:
256 try:
257 fp = open(dictfile, 'rU')
257 fp = open(dictfile, 'rU')
258 except IOError, err:
258 except IOError, err:
259 raise util.Abort('%s: %s' % (dictfile, err.strerror))
259 raise util.Abort('%s: %s' % (dictfile, err.strerror))
260 words = fp.read().splitlines()
260 words = fp.read().splitlines()
261 fp.close()
261 fp.close()
262
262
263 def pick(cdf):
263 def pick(cdf):
264 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
264 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
265
265
266 def makeline(minimum=0):
266 def makeline(minimum=0):
267 total = max(minimum, pick(linelengths))
267 total = max(minimum, pick(linelengths))
268 c, l = 0, []
268 c, l = 0, []
269 while c < total:
269 while c < total:
270 w = random.choice(words)
270 w = random.choice(words)
271 c += len(w) + 1
271 c += len(w) + 1
272 l.append(w)
272 l.append(w)
273 return ' '.join(l)
273 return ' '.join(l)
274
274
275 wlock = repo.wlock()
275 wlock = repo.wlock()
276 lock = repo.lock()
276 lock = repo.lock()
277
277
278 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
278 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
279
279
280 progress = ui.progress
280 progress = ui.progress
281 _synthesizing = _('synthesizing')
281 _synthesizing = _('synthesizing')
282 _changesets = _('changesets')
282 _changesets = _('changesets')
283
283
284 count = int(opts['count'])
284 count = int(opts['count'])
285 heads = set(map(repo.changelog.rev, repo.heads()))
285 heads = set(map(repo.changelog.rev, repo.heads()))
286 for i in xrange(count):
286 for i in xrange(count):
287 progress(_synthesizing, i, unit=_changesets, total=count)
287 progress(_synthesizing, i, unit=_changesets, total=count)
288
288
289 node = repo.changelog.node
289 node = repo.changelog.node
290 revs = len(repo)
290 revs = len(repo)
291
291
292 def pickhead(heads, distance):
292 def pickhead(heads, distance):
293 if heads:
293 if heads:
294 lheads = sorted(heads)
294 lheads = sorted(heads)
295 rev = revs - min(pick(distance), revs)
295 rev = revs - min(pick(distance), revs)
296 if rev < lheads[-1]:
296 if rev < lheads[-1]:
297 rev = lheads[bisect.bisect_left(lheads, rev)]
297 rev = lheads[bisect.bisect_left(lheads, rev)]
298 else:
298 else:
299 rev = lheads[-1]
299 rev = lheads[-1]
300 return rev, node(rev)
300 return rev, node(rev)
301 return nullrev, nullid
301 return nullrev, nullid
302
302
303 r1 = revs - min(pick(p1distance), revs)
303 r1 = revs - min(pick(p1distance), revs)
304 p1 = node(r1)
304 p1 = node(r1)
305
305
306 # the number of heads will grow without bound if we use a pure
306 # the number of heads will grow without bound if we use a pure
307 # model, so artificially constrain their proliferation
307 # model, so artificially constrain their proliferation
308 if pick(parents) == 2 or len(heads) > random.randint(1, 20):
308 if pick(parents) == 2 or len(heads) > random.randint(1, 20):
309 r2, p2 = pickhead(heads.difference([r1]), p2distance)
309 r2, p2 = pickhead(heads.difference([r1]), p2distance)
310 else:
310 else:
311 r2, p2 = nullrev, nullid
311 r2, p2 = nullrev, nullid
312
312
313 pl = [p1, p2]
313 pl = [p1, p2]
314 pctx = repo[r1]
314 pctx = repo[r1]
315 mf = pctx.manifest()
315 mf = pctx.manifest()
316 mfk = mf.keys()
316 mfk = mf.keys()
317 changes = {}
317 changes = {}
318 if mfk:
318 if mfk:
319 for __ in xrange(pick(fileschanged)):
319 for __ in xrange(pick(fileschanged)):
320 for __ in xrange(10):
320 for __ in xrange(10):
321 fctx = pctx.filectx(random.choice(mfk))
321 fctx = pctx.filectx(random.choice(mfk))
322 path = fctx.path()
322 path = fctx.path()
323 if not (path in nevertouch or fctx.isbinary() or
323 if not (path in nevertouch or fctx.isbinary() or
324 'l' in fctx.flags()):
324 'l' in fctx.flags()):
325 break
325 break
326 lines = fctx.data().splitlines()
326 lines = fctx.data().splitlines()
327 add, remove = pick(lineschanged)
327 add, remove = pick(lineschanged)
328 for __ in xrange(remove):
328 for __ in xrange(remove):
329 if not lines:
329 if not lines:
330 break
330 break
331 del lines[random.randrange(0, len(lines))]
331 del lines[random.randrange(0, len(lines))]
332 for __ in xrange(add):
332 for __ in xrange(add):
333 lines.insert(random.randint(0, len(lines)), makeline())
333 lines.insert(random.randint(0, len(lines)), makeline())
334 path = fctx.path()
334 path = fctx.path()
335 changes[path] = context.memfilectx(path,
335 changes[path] = context.memfilectx(path,
336 '\n'.join(lines) + '\n')
336 '\n'.join(lines) + '\n')
337 for __ in xrange(pick(filesremoved)):
337 for __ in xrange(pick(filesremoved)):
338 path = random.choice(mfk)
338 path = random.choice(mfk)
339 for __ in xrange(10):
339 for __ in xrange(10):
340 path = random.choice(mfk)
340 path = random.choice(mfk)
341 if path not in changes:
341 if path not in changes:
342 changes[path] = None
342 changes[path] = None
343 break
343 break
344 if filesadded:
344 if filesadded:
345 dirs = list(pctx.dirs())
345 dirs = list(pctx.dirs())
346 dirs.append('')
346 dirs.append('')
347 for __ in xrange(pick(filesadded)):
347 for __ in xrange(pick(filesadded)):
348 path = [random.choice(dirs)]
348 path = [random.choice(dirs)]
349 if pick(dirsadded):
349 if pick(dirsadded):
350 path.append(random.choice(words))
350 path.append(random.choice(words))
351 path.append(random.choice(words))
351 path.append(random.choice(words))
352 path = '/'.join(filter(None, path))
352 path = '/'.join(filter(None, path))
353 data = '\n'.join(makeline()
353 data = '\n'.join(makeline()
354 for __ in xrange(pick(linesinfilesadded))) + '\n'
354 for __ in xrange(pick(linesinfilesadded))) + '\n'
355 changes[path] = context.memfilectx(path, data)
355 changes[path] = context.memfilectx(path, data)
356 def filectxfn(repo, memctx, path):
356 def filectxfn(repo, memctx, path):
357 data = changes[path]
357 data = changes[path]
358 if data is None:
358 if data is None:
359 raise IOError
359 raise IOError
360 return data
360 return data
361 if not changes:
361 if not changes:
362 continue
362 continue
363 if revs:
363 if revs:
364 date = repo['tip'].date()[0] + pick(interarrival)
364 date = repo['tip'].date()[0] + pick(interarrival)
365 else:
365 else:
366 date = time.time() - (86400 * count)
366 date = time.time() - (86400 * count)
367 user = random.choice(words) + '@' + random.choice(words)
367 user = random.choice(words) + '@' + random.choice(words)
368 mc = context.memctx(repo, pl, makeline(minimum=2),
368 mc = context.memctx(repo, pl, makeline(minimum=2),
369 sorted(changes.iterkeys()),
369 sorted(changes.iterkeys()),
370 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
370 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
371 newnode = mc.commit()
371 newnode = mc.commit()
372 heads.add(repo.changelog.rev(newnode))
372 heads.add(repo.changelog.rev(newnode))
373 heads.discard(r1)
373 heads.discard(r1)
374 heads.discard(r2)
374 heads.discard(r2)
375
375
376 lock.release()
376 lock.release()
377 wlock.release()
377 wlock.release()
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,627 +1,634 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
13 import lock, util, extensions, error, node, scmutil, phases
13 import lock, util, extensions, error, node, scmutil, phases, url
14 import cmdutil, discovery
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.urllocalpath(path))
20 path = util.expandpath(util.urllocalpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, other, branches, revs):
23 def addbranchrevs(lrepo, other, branches, revs):
24 peer = other.peer() # a courtesy to callers using a localrepo for other
24 peer = other.peer() # a courtesy to callers using a localrepo for other
25 hashbranch, branches = branches
25 hashbranch, branches = branches
26 if not hashbranch and not branches:
26 if not hashbranch and not branches:
27 return revs or None, revs and revs[0] or None
27 return revs or None, revs and revs[0] or None
28 revs = revs and list(revs) or []
28 revs = revs and list(revs) or []
29 if not peer.capable('branchmap'):
29 if not peer.capable('branchmap'):
30 if branches:
30 if branches:
31 raise util.Abort(_("remote branch lookup not supported"))
31 raise util.Abort(_("remote branch lookup not supported"))
32 revs.append(hashbranch)
32 revs.append(hashbranch)
33 return revs, revs[0]
33 return revs, revs[0]
34 branchmap = peer.branchmap()
34 branchmap = peer.branchmap()
35
35
36 def primary(branch):
36 def primary(branch):
37 if branch == '.':
37 if branch == '.':
38 if not lrepo:
38 if not lrepo:
39 raise util.Abort(_("dirstate branch not accessible"))
39 raise util.Abort(_("dirstate branch not accessible"))
40 branch = lrepo.dirstate.branch()
40 branch = lrepo.dirstate.branch()
41 if branch in branchmap:
41 if branch in branchmap:
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
43 return True
43 return True
44 else:
44 else:
45 return False
45 return False
46
46
47 for branch in branches:
47 for branch in branches:
48 if not primary(branch):
48 if not primary(branch):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 if not primary(hashbranch):
51 if not primary(hashbranch):
52 revs.append(hashbranch)
52 revs.append(hashbranch)
53 return revs, revs[0]
53 return revs, revs[0]
54
54
55 def parseurl(path, branches=None):
55 def parseurl(path, branches=None):
56 '''parse url#branch, returning (url, (branch, branches))'''
56 '''parse url#branch, returning (url, (branch, branches))'''
57
57
58 u = util.url(path)
58 u = util.url(path)
59 branch = None
59 branch = None
60 if u.fragment:
60 if u.fragment:
61 branch = u.fragment
61 branch = u.fragment
62 u.fragment = None
62 u.fragment = None
63 return str(u), (branch, branches or [])
63 return str(u), (branch, branches or [])
64
64
65 schemes = {
65 schemes = {
66 'bundle': bundlerepo,
66 'bundle': bundlerepo,
67 'file': _local,
67 'file': _local,
68 'http': httppeer,
68 'http': httppeer,
69 'https': httppeer,
69 'https': httppeer,
70 'ssh': sshpeer,
70 'ssh': sshpeer,
71 'static-http': statichttprepo,
71 'static-http': statichttprepo,
72 }
72 }
73
73
74 def _peerlookup(path):
74 def _peerlookup(path):
75 u = util.url(path)
75 u = util.url(path)
76 scheme = u.scheme or 'file'
76 scheme = u.scheme or 'file'
77 thing = schemes.get(scheme) or schemes['file']
77 thing = schemes.get(scheme) or schemes['file']
78 try:
78 try:
79 return thing(path)
79 return thing(path)
80 except TypeError:
80 except TypeError:
81 return thing
81 return thing
82
82
83 def islocal(repo):
83 def islocal(repo):
84 '''return true if repo or path is local'''
84 '''return true if repo or path is local'''
85 if isinstance(repo, str):
85 if isinstance(repo, str):
86 try:
86 try:
87 return _peerlookup(repo).islocal(repo)
87 return _peerlookup(repo).islocal(repo)
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90 return repo.local()
90 return repo.local()
91
91
92 def openpath(ui, path):
93 '''open path with open if local, url.open if remote'''
94 if islocal(path):
95 return open(util.urllocalpath(path))
96 else:
97 return url.open(ui, path)
98
92 def _peerorrepo(ui, path, create=False):
99 def _peerorrepo(ui, path, create=False):
93 """return a repository object for the specified path"""
100 """return a repository object for the specified path"""
94 obj = _peerlookup(path).instance(ui, path, create)
101 obj = _peerlookup(path).instance(ui, path, create)
95 ui = getattr(obj, "ui", ui)
102 ui = getattr(obj, "ui", ui)
96 for name, module in extensions.extensions():
103 for name, module in extensions.extensions():
97 hook = getattr(module, 'reposetup', None)
104 hook = getattr(module, 'reposetup', None)
98 if hook:
105 if hook:
99 hook(ui, obj)
106 hook(ui, obj)
100 return obj
107 return obj
101
108
102 def repository(ui, path='', create=False):
109 def repository(ui, path='', create=False):
103 """return a repository object for the specified path"""
110 """return a repository object for the specified path"""
104 peer = _peerorrepo(ui, path, create)
111 peer = _peerorrepo(ui, path, create)
105 repo = peer.local()
112 repo = peer.local()
106 if not repo:
113 if not repo:
107 raise util.Abort(_("repository '%s' is not local") %
114 raise util.Abort(_("repository '%s' is not local") %
108 (path or peer.url()))
115 (path or peer.url()))
109 return repo
116 return repo
110
117
111 def peer(uiorrepo, opts, path, create=False):
118 def peer(uiorrepo, opts, path, create=False):
112 '''return a repository peer for the specified path'''
119 '''return a repository peer for the specified path'''
113 rui = remoteui(uiorrepo, opts)
120 rui = remoteui(uiorrepo, opts)
114 return _peerorrepo(rui, path, create).peer()
121 return _peerorrepo(rui, path, create).peer()
115
122
116 def defaultdest(source):
123 def defaultdest(source):
117 '''return default destination of clone if none is given'''
124 '''return default destination of clone if none is given'''
118 return os.path.basename(os.path.normpath(util.url(source).path))
125 return os.path.basename(os.path.normpath(util.url(source).path))
119
126
120 def share(ui, source, dest=None, update=True):
127 def share(ui, source, dest=None, update=True):
121 '''create a shared repository'''
128 '''create a shared repository'''
122
129
123 if not islocal(source):
130 if not islocal(source):
124 raise util.Abort(_('can only share local repositories'))
131 raise util.Abort(_('can only share local repositories'))
125
132
126 if not dest:
133 if not dest:
127 dest = defaultdest(source)
134 dest = defaultdest(source)
128 else:
135 else:
129 dest = ui.expandpath(dest)
136 dest = ui.expandpath(dest)
130
137
131 if isinstance(source, str):
138 if isinstance(source, str):
132 origsource = ui.expandpath(source)
139 origsource = ui.expandpath(source)
133 source, branches = parseurl(origsource)
140 source, branches = parseurl(origsource)
134 srcrepo = repository(ui, source)
141 srcrepo = repository(ui, source)
135 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
142 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
136 else:
143 else:
137 srcrepo = source.local()
144 srcrepo = source.local()
138 origsource = source = srcrepo.url()
145 origsource = source = srcrepo.url()
139 checkout = None
146 checkout = None
140
147
141 sharedpath = srcrepo.sharedpath # if our source is already sharing
148 sharedpath = srcrepo.sharedpath # if our source is already sharing
142
149
143 root = os.path.realpath(dest)
150 root = os.path.realpath(dest)
144 roothg = os.path.join(root, '.hg')
151 roothg = os.path.join(root, '.hg')
145
152
146 if os.path.exists(roothg):
153 if os.path.exists(roothg):
147 raise util.Abort(_('destination already exists'))
154 raise util.Abort(_('destination already exists'))
148
155
149 if not os.path.isdir(root):
156 if not os.path.isdir(root):
150 os.mkdir(root)
157 os.mkdir(root)
151 util.makedir(roothg, notindexed=True)
158 util.makedir(roothg, notindexed=True)
152
159
153 requirements = ''
160 requirements = ''
154 try:
161 try:
155 requirements = srcrepo.opener.read('requires')
162 requirements = srcrepo.opener.read('requires')
156 except IOError, inst:
163 except IOError, inst:
157 if inst.errno != errno.ENOENT:
164 if inst.errno != errno.ENOENT:
158 raise
165 raise
159
166
160 requirements += 'shared\n'
167 requirements += 'shared\n'
161 util.writefile(os.path.join(roothg, 'requires'), requirements)
168 util.writefile(os.path.join(roothg, 'requires'), requirements)
162 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
169 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
163
170
164 r = repository(ui, root)
171 r = repository(ui, root)
165
172
166 default = srcrepo.ui.config('paths', 'default')
173 default = srcrepo.ui.config('paths', 'default')
167 if default:
174 if default:
168 fp = r.opener("hgrc", "w", text=True)
175 fp = r.opener("hgrc", "w", text=True)
169 fp.write("[paths]\n")
176 fp.write("[paths]\n")
170 fp.write("default = %s\n" % default)
177 fp.write("default = %s\n" % default)
171 fp.close()
178 fp.close()
172
179
173 if update:
180 if update:
174 r.ui.status(_("updating working directory\n"))
181 r.ui.status(_("updating working directory\n"))
175 if update is not True:
182 if update is not True:
176 checkout = update
183 checkout = update
177 for test in (checkout, 'default', 'tip'):
184 for test in (checkout, 'default', 'tip'):
178 if test is None:
185 if test is None:
179 continue
186 continue
180 try:
187 try:
181 uprev = r.lookup(test)
188 uprev = r.lookup(test)
182 break
189 break
183 except error.RepoLookupError:
190 except error.RepoLookupError:
184 continue
191 continue
185 _update(r, uprev)
192 _update(r, uprev)
186
193
187 def copystore(ui, srcrepo, destpath):
194 def copystore(ui, srcrepo, destpath):
188 '''copy files from store of srcrepo in destpath
195 '''copy files from store of srcrepo in destpath
189
196
190 returns destlock
197 returns destlock
191 '''
198 '''
192 destlock = None
199 destlock = None
193 try:
200 try:
194 hardlink = None
201 hardlink = None
195 num = 0
202 num = 0
196 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
203 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
197 for f in srcrepo.store.copylist():
204 for f in srcrepo.store.copylist():
198 if srcpublishing and f.endswith('phaseroots'):
205 if srcpublishing and f.endswith('phaseroots'):
199 continue
206 continue
200 src = os.path.join(srcrepo.sharedpath, f)
207 src = os.path.join(srcrepo.sharedpath, f)
201 dst = os.path.join(destpath, f)
208 dst = os.path.join(destpath, f)
202 dstbase = os.path.dirname(dst)
209 dstbase = os.path.dirname(dst)
203 if dstbase and not os.path.exists(dstbase):
210 if dstbase and not os.path.exists(dstbase):
204 os.mkdir(dstbase)
211 os.mkdir(dstbase)
205 if os.path.exists(src):
212 if os.path.exists(src):
206 if dst.endswith('data'):
213 if dst.endswith('data'):
207 # lock to avoid premature writing to the target
214 # lock to avoid premature writing to the target
208 destlock = lock.lock(os.path.join(dstbase, "lock"))
215 destlock = lock.lock(os.path.join(dstbase, "lock"))
209 hardlink, n = util.copyfiles(src, dst, hardlink)
216 hardlink, n = util.copyfiles(src, dst, hardlink)
210 num += n
217 num += n
211 if hardlink:
218 if hardlink:
212 ui.debug("linked %d files\n" % num)
219 ui.debug("linked %d files\n" % num)
213 else:
220 else:
214 ui.debug("copied %d files\n" % num)
221 ui.debug("copied %d files\n" % num)
215 return destlock
222 return destlock
216 except: # re-raises
223 except: # re-raises
217 release(destlock)
224 release(destlock)
218 raise
225 raise
219
226
220 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
227 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
221 update=True, stream=False, branch=None):
228 update=True, stream=False, branch=None):
222 """Make a copy of an existing repository.
229 """Make a copy of an existing repository.
223
230
224 Create a copy of an existing repository in a new directory. The
231 Create a copy of an existing repository in a new directory. The
225 source and destination are URLs, as passed to the repository
232 source and destination are URLs, as passed to the repository
226 function. Returns a pair of repository peers, the source and
233 function. Returns a pair of repository peers, the source and
227 newly created destination.
234 newly created destination.
228
235
229 The location of the source is added to the new repository's
236 The location of the source is added to the new repository's
230 .hg/hgrc file, as the default to be used for future pulls and
237 .hg/hgrc file, as the default to be used for future pulls and
231 pushes.
238 pushes.
232
239
233 If an exception is raised, the partly cloned/updated destination
240 If an exception is raised, the partly cloned/updated destination
234 repository will be deleted.
241 repository will be deleted.
235
242
236 Arguments:
243 Arguments:
237
244
238 source: repository object or URL
245 source: repository object or URL
239
246
240 dest: URL of destination repository to create (defaults to base
247 dest: URL of destination repository to create (defaults to base
241 name of source repository)
248 name of source repository)
242
249
243 pull: always pull from source repository, even in local case
250 pull: always pull from source repository, even in local case
244
251
245 stream: stream raw data uncompressed from repository (fast over
252 stream: stream raw data uncompressed from repository (fast over
246 LAN, slow over WAN)
253 LAN, slow over WAN)
247
254
248 rev: revision to clone up to (implies pull=True)
255 rev: revision to clone up to (implies pull=True)
249
256
250 update: update working directory after clone completes, if
257 update: update working directory after clone completes, if
251 destination is local repository (True means update to default rev,
258 destination is local repository (True means update to default rev,
252 anything else is treated as a revision)
259 anything else is treated as a revision)
253
260
254 branch: branches to clone
261 branch: branches to clone
255 """
262 """
256
263
257 if isinstance(source, str):
264 if isinstance(source, str):
258 origsource = ui.expandpath(source)
265 origsource = ui.expandpath(source)
259 source, branch = parseurl(origsource, branch)
266 source, branch = parseurl(origsource, branch)
260 srcpeer = peer(ui, peeropts, source)
267 srcpeer = peer(ui, peeropts, source)
261 else:
268 else:
262 srcpeer = source.peer() # in case we were called with a localrepo
269 srcpeer = source.peer() # in case we were called with a localrepo
263 branch = (None, branch or [])
270 branch = (None, branch or [])
264 origsource = source = srcpeer.url()
271 origsource = source = srcpeer.url()
265 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
272 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
266
273
267 if dest is None:
274 if dest is None:
268 dest = defaultdest(source)
275 dest = defaultdest(source)
269 ui.status(_("destination directory: %s\n") % dest)
276 ui.status(_("destination directory: %s\n") % dest)
270 else:
277 else:
271 dest = ui.expandpath(dest)
278 dest = ui.expandpath(dest)
272
279
273 dest = util.urllocalpath(dest)
280 dest = util.urllocalpath(dest)
274 source = util.urllocalpath(source)
281 source = util.urllocalpath(source)
275
282
276 if not dest:
283 if not dest:
277 raise util.Abort(_("empty destination path is not valid"))
284 raise util.Abort(_("empty destination path is not valid"))
278 if os.path.exists(dest):
285 if os.path.exists(dest):
279 if not os.path.isdir(dest):
286 if not os.path.isdir(dest):
280 raise util.Abort(_("destination '%s' already exists") % dest)
287 raise util.Abort(_("destination '%s' already exists") % dest)
281 elif os.listdir(dest):
288 elif os.listdir(dest):
282 raise util.Abort(_("destination '%s' is not empty") % dest)
289 raise util.Abort(_("destination '%s' is not empty") % dest)
283
290
284 class DirCleanup(object):
291 class DirCleanup(object):
285 def __init__(self, dir_):
292 def __init__(self, dir_):
286 self.rmtree = shutil.rmtree
293 self.rmtree = shutil.rmtree
287 self.dir_ = dir_
294 self.dir_ = dir_
288 def close(self):
295 def close(self):
289 self.dir_ = None
296 self.dir_ = None
290 def cleanup(self):
297 def cleanup(self):
291 if self.dir_:
298 if self.dir_:
292 self.rmtree(self.dir_, True)
299 self.rmtree(self.dir_, True)
293
300
294 srclock = destlock = dircleanup = None
301 srclock = destlock = dircleanup = None
295 srcrepo = srcpeer.local()
302 srcrepo = srcpeer.local()
296 try:
303 try:
297 abspath = origsource
304 abspath = origsource
298 if islocal(origsource):
305 if islocal(origsource):
299 abspath = os.path.abspath(util.urllocalpath(origsource))
306 abspath = os.path.abspath(util.urllocalpath(origsource))
300
307
301 if islocal(dest):
308 if islocal(dest):
302 dircleanup = DirCleanup(dest)
309 dircleanup = DirCleanup(dest)
303
310
304 copy = False
311 copy = False
305 if (srcrepo and srcrepo.cancopy() and islocal(dest)
312 if (srcrepo and srcrepo.cancopy() and islocal(dest)
306 and not phases.hassecret(srcrepo)):
313 and not phases.hassecret(srcrepo)):
307 copy = not pull and not rev
314 copy = not pull and not rev
308
315
309 if copy:
316 if copy:
310 try:
317 try:
311 # we use a lock here because if we race with commit, we
318 # we use a lock here because if we race with commit, we
312 # can end up with extra data in the cloned revlogs that's
319 # can end up with extra data in the cloned revlogs that's
313 # not pointed to by changesets, thus causing verify to
320 # not pointed to by changesets, thus causing verify to
314 # fail
321 # fail
315 srclock = srcrepo.lock(wait=False)
322 srclock = srcrepo.lock(wait=False)
316 except error.LockError:
323 except error.LockError:
317 copy = False
324 copy = False
318
325
319 if copy:
326 if copy:
320 srcrepo.hook('preoutgoing', throw=True, source='clone')
327 srcrepo.hook('preoutgoing', throw=True, source='clone')
321 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
328 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
322 if not os.path.exists(dest):
329 if not os.path.exists(dest):
323 os.mkdir(dest)
330 os.mkdir(dest)
324 else:
331 else:
325 # only clean up directories we create ourselves
332 # only clean up directories we create ourselves
326 dircleanup.dir_ = hgdir
333 dircleanup.dir_ = hgdir
327 try:
334 try:
328 destpath = hgdir
335 destpath = hgdir
329 util.makedir(destpath, notindexed=True)
336 util.makedir(destpath, notindexed=True)
330 except OSError, inst:
337 except OSError, inst:
331 if inst.errno == errno.EEXIST:
338 if inst.errno == errno.EEXIST:
332 dircleanup.close()
339 dircleanup.close()
333 raise util.Abort(_("destination '%s' already exists")
340 raise util.Abort(_("destination '%s' already exists")
334 % dest)
341 % dest)
335 raise
342 raise
336
343
337 destlock = copystore(ui, srcrepo, destpath)
344 destlock = copystore(ui, srcrepo, destpath)
338
345
339 # Recomputing branch cache might be slow on big repos,
346 # Recomputing branch cache might be slow on big repos,
340 # so just copy it
347 # so just copy it
341 dstcachedir = os.path.join(destpath, 'cache')
348 dstcachedir = os.path.join(destpath, 'cache')
342 srcbranchcache = srcrepo.sjoin('cache/branchheads')
349 srcbranchcache = srcrepo.sjoin('cache/branchheads')
343 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
350 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
344 if os.path.exists(srcbranchcache):
351 if os.path.exists(srcbranchcache):
345 if not os.path.exists(dstcachedir):
352 if not os.path.exists(dstcachedir):
346 os.mkdir(dstcachedir)
353 os.mkdir(dstcachedir)
347 util.copyfile(srcbranchcache, dstbranchcache)
354 util.copyfile(srcbranchcache, dstbranchcache)
348
355
349 # we need to re-init the repo after manually copying the data
356 # we need to re-init the repo after manually copying the data
350 # into it
357 # into it
351 destpeer = peer(srcrepo, peeropts, dest)
358 destpeer = peer(srcrepo, peeropts, dest)
352 srcrepo.hook('outgoing', source='clone',
359 srcrepo.hook('outgoing', source='clone',
353 node=node.hex(node.nullid))
360 node=node.hex(node.nullid))
354 else:
361 else:
355 try:
362 try:
356 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
363 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
357 # only pass ui when no srcrepo
364 # only pass ui when no srcrepo
358 except OSError, inst:
365 except OSError, inst:
359 if inst.errno == errno.EEXIST:
366 if inst.errno == errno.EEXIST:
360 dircleanup.close()
367 dircleanup.close()
361 raise util.Abort(_("destination '%s' already exists")
368 raise util.Abort(_("destination '%s' already exists")
362 % dest)
369 % dest)
363 raise
370 raise
364
371
365 revs = None
372 revs = None
366 if rev:
373 if rev:
367 if not srcpeer.capable('lookup'):
374 if not srcpeer.capable('lookup'):
368 raise util.Abort(_("src repository does not support "
375 raise util.Abort(_("src repository does not support "
369 "revision lookup and so doesn't "
376 "revision lookup and so doesn't "
370 "support clone by revision"))
377 "support clone by revision"))
371 revs = [srcpeer.lookup(r) for r in rev]
378 revs = [srcpeer.lookup(r) for r in rev]
372 checkout = revs[0]
379 checkout = revs[0]
373 if destpeer.local():
380 if destpeer.local():
374 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
381 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
375 elif srcrepo:
382 elif srcrepo:
376 srcrepo.push(destpeer, revs=revs)
383 srcrepo.push(destpeer, revs=revs)
377 else:
384 else:
378 raise util.Abort(_("clone from remote to remote not supported"))
385 raise util.Abort(_("clone from remote to remote not supported"))
379
386
380 if dircleanup:
387 if dircleanup:
381 dircleanup.close()
388 dircleanup.close()
382
389
383 # clone all bookmarks except divergent ones
390 # clone all bookmarks except divergent ones
384 destrepo = destpeer.local()
391 destrepo = destpeer.local()
385 if destrepo and srcpeer.capable("pushkey"):
392 if destrepo and srcpeer.capable("pushkey"):
386 rb = srcpeer.listkeys('bookmarks')
393 rb = srcpeer.listkeys('bookmarks')
387 for k, n in rb.iteritems():
394 for k, n in rb.iteritems():
388 try:
395 try:
389 m = destrepo.lookup(n)
396 m = destrepo.lookup(n)
390 destrepo._bookmarks[k] = m
397 destrepo._bookmarks[k] = m
391 except error.RepoLookupError:
398 except error.RepoLookupError:
392 pass
399 pass
393 if rb:
400 if rb:
394 bookmarks.write(destrepo)
401 bookmarks.write(destrepo)
395 elif srcrepo and destpeer.capable("pushkey"):
402 elif srcrepo and destpeer.capable("pushkey"):
396 for k, n in srcrepo._bookmarks.iteritems():
403 for k, n in srcrepo._bookmarks.iteritems():
397 destpeer.pushkey('bookmarks', k, '', hex(n))
404 destpeer.pushkey('bookmarks', k, '', hex(n))
398
405
399 if destrepo:
406 if destrepo:
400 fp = destrepo.opener("hgrc", "w", text=True)
407 fp = destrepo.opener("hgrc", "w", text=True)
401 fp.write("[paths]\n")
408 fp.write("[paths]\n")
402 u = util.url(abspath)
409 u = util.url(abspath)
403 u.passwd = None
410 u.passwd = None
404 defaulturl = str(u)
411 defaulturl = str(u)
405 fp.write("default = %s\n" % defaulturl)
412 fp.write("default = %s\n" % defaulturl)
406 fp.close()
413 fp.close()
407
414
408 destrepo.ui.setconfig('paths', 'default', defaulturl)
415 destrepo.ui.setconfig('paths', 'default', defaulturl)
409
416
410 if update:
417 if update:
411 if update is not True:
418 if update is not True:
412 checkout = srcpeer.lookup(update)
419 checkout = srcpeer.lookup(update)
413 uprev = None
420 uprev = None
414 status = None
421 status = None
415 if checkout is not None:
422 if checkout is not None:
416 try:
423 try:
417 uprev = destrepo.lookup(checkout)
424 uprev = destrepo.lookup(checkout)
418 except error.RepoLookupError:
425 except error.RepoLookupError:
419 pass
426 pass
420 if uprev is None:
427 if uprev is None:
421 try:
428 try:
422 uprev = destrepo._bookmarks['@']
429 uprev = destrepo._bookmarks['@']
423 update = '@'
430 update = '@'
424 bn = destrepo[uprev].branch()
431 bn = destrepo[uprev].branch()
425 if bn == 'default':
432 if bn == 'default':
426 status = _("updating to bookmark @\n")
433 status = _("updating to bookmark @\n")
427 else:
434 else:
428 status = _("updating to bookmark @ on branch %s\n"
435 status = _("updating to bookmark @ on branch %s\n"
429 % bn)
436 % bn)
430 except KeyError:
437 except KeyError:
431 try:
438 try:
432 uprev = destrepo.branchtip('default')
439 uprev = destrepo.branchtip('default')
433 except error.RepoLookupError:
440 except error.RepoLookupError:
434 uprev = destrepo.lookup('tip')
441 uprev = destrepo.lookup('tip')
435 if not status:
442 if not status:
436 bn = destrepo[uprev].branch()
443 bn = destrepo[uprev].branch()
437 status = _("updating to branch %s\n") % bn
444 status = _("updating to branch %s\n") % bn
438 destrepo.ui.status(status)
445 destrepo.ui.status(status)
439 _update(destrepo, uprev)
446 _update(destrepo, uprev)
440 if update in destrepo._bookmarks:
447 if update in destrepo._bookmarks:
441 bookmarks.setcurrent(destrepo, update)
448 bookmarks.setcurrent(destrepo, update)
442
449
443 return srcpeer, destpeer
450 return srcpeer, destpeer
444 finally:
451 finally:
445 release(srclock, destlock)
452 release(srclock, destlock)
446 if dircleanup is not None:
453 if dircleanup is not None:
447 dircleanup.cleanup()
454 dircleanup.cleanup()
448 if srcpeer is not None:
455 if srcpeer is not None:
449 srcpeer.close()
456 srcpeer.close()
450
457
451 def _showstats(repo, stats):
458 def _showstats(repo, stats):
452 repo.ui.status(_("%d files updated, %d files merged, "
459 repo.ui.status(_("%d files updated, %d files merged, "
453 "%d files removed, %d files unresolved\n") % stats)
460 "%d files removed, %d files unresolved\n") % stats)
454
461
455 def update(repo, node):
462 def update(repo, node):
456 """update the working directory to node, merging linear changes"""
463 """update the working directory to node, merging linear changes"""
457 stats = mergemod.update(repo, node, False, False, None)
464 stats = mergemod.update(repo, node, False, False, None)
458 _showstats(repo, stats)
465 _showstats(repo, stats)
459 if stats[3]:
466 if stats[3]:
460 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
467 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
461 return stats[3] > 0
468 return stats[3] > 0
462
469
463 # naming conflict in clone()
470 # naming conflict in clone()
464 _update = update
471 _update = update
465
472
466 def clean(repo, node, show_stats=True):
473 def clean(repo, node, show_stats=True):
467 """forcibly switch the working directory to node, clobbering changes"""
474 """forcibly switch the working directory to node, clobbering changes"""
468 stats = mergemod.update(repo, node, False, True, None)
475 stats = mergemod.update(repo, node, False, True, None)
469 if show_stats:
476 if show_stats:
470 _showstats(repo, stats)
477 _showstats(repo, stats)
471 return stats[3] > 0
478 return stats[3] > 0
472
479
473 def merge(repo, node, force=None, remind=True):
480 def merge(repo, node, force=None, remind=True):
474 """Branch merge with node, resolving changes. Return true if any
481 """Branch merge with node, resolving changes. Return true if any
475 unresolved conflicts."""
482 unresolved conflicts."""
476 stats = mergemod.update(repo, node, True, force, False)
483 stats = mergemod.update(repo, node, True, force, False)
477 _showstats(repo, stats)
484 _showstats(repo, stats)
478 if stats[3]:
485 if stats[3]:
479 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
486 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
480 "or 'hg update -C .' to abandon\n"))
487 "or 'hg update -C .' to abandon\n"))
481 elif remind:
488 elif remind:
482 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
489 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
483 return stats[3] > 0
490 return stats[3] > 0
484
491
485 def _incoming(displaychlist, subreporecurse, ui, repo, source,
492 def _incoming(displaychlist, subreporecurse, ui, repo, source,
486 opts, buffered=False):
493 opts, buffered=False):
487 """
494 """
488 Helper for incoming / gincoming.
495 Helper for incoming / gincoming.
489 displaychlist gets called with
496 displaychlist gets called with
490 (remoterepo, incomingchangesetlist, displayer) parameters,
497 (remoterepo, incomingchangesetlist, displayer) parameters,
491 and is supposed to contain only code that can't be unified.
498 and is supposed to contain only code that can't be unified.
492 """
499 """
493 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
500 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
494 other = peer(repo, opts, source)
501 other = peer(repo, opts, source)
495 ui.status(_('comparing with %s\n') % util.hidepassword(source))
502 ui.status(_('comparing with %s\n') % util.hidepassword(source))
496 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
503 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
497
504
498 if revs:
505 if revs:
499 revs = [other.lookup(rev) for rev in revs]
506 revs = [other.lookup(rev) for rev in revs]
500 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
507 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
501 revs, opts["bundle"], opts["force"])
508 revs, opts["bundle"], opts["force"])
502 try:
509 try:
503 if not chlist:
510 if not chlist:
504 ui.status(_("no changes found\n"))
511 ui.status(_("no changes found\n"))
505 return subreporecurse()
512 return subreporecurse()
506
513
507 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
514 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
508
515
509 # XXX once graphlog extension makes it into core,
516 # XXX once graphlog extension makes it into core,
510 # should be replaced by a if graph/else
517 # should be replaced by a if graph/else
511 displaychlist(other, chlist, displayer)
518 displaychlist(other, chlist, displayer)
512
519
513 displayer.close()
520 displayer.close()
514 finally:
521 finally:
515 cleanupfn()
522 cleanupfn()
516 subreporecurse()
523 subreporecurse()
517 return 0 # exit code is zero since we found incoming changes
524 return 0 # exit code is zero since we found incoming changes
518
525
519 def incoming(ui, repo, source, opts):
526 def incoming(ui, repo, source, opts):
520 def subreporecurse():
527 def subreporecurse():
521 ret = 1
528 ret = 1
522 if opts.get('subrepos'):
529 if opts.get('subrepos'):
523 ctx = repo[None]
530 ctx = repo[None]
524 for subpath in sorted(ctx.substate):
531 for subpath in sorted(ctx.substate):
525 sub = ctx.sub(subpath)
532 sub = ctx.sub(subpath)
526 ret = min(ret, sub.incoming(ui, source, opts))
533 ret = min(ret, sub.incoming(ui, source, opts))
527 return ret
534 return ret
528
535
529 def display(other, chlist, displayer):
536 def display(other, chlist, displayer):
530 limit = cmdutil.loglimit(opts)
537 limit = cmdutil.loglimit(opts)
531 if opts.get('newest_first'):
538 if opts.get('newest_first'):
532 chlist.reverse()
539 chlist.reverse()
533 count = 0
540 count = 0
534 for n in chlist:
541 for n in chlist:
535 if limit is not None and count >= limit:
542 if limit is not None and count >= limit:
536 break
543 break
537 parents = [p for p in other.changelog.parents(n) if p != nullid]
544 parents = [p for p in other.changelog.parents(n) if p != nullid]
538 if opts.get('no_merges') and len(parents) == 2:
545 if opts.get('no_merges') and len(parents) == 2:
539 continue
546 continue
540 count += 1
547 count += 1
541 displayer.show(other[n])
548 displayer.show(other[n])
542 return _incoming(display, subreporecurse, ui, repo, source, opts)
549 return _incoming(display, subreporecurse, ui, repo, source, opts)
543
550
544 def _outgoing(ui, repo, dest, opts):
551 def _outgoing(ui, repo, dest, opts):
545 dest = ui.expandpath(dest or 'default-push', dest or 'default')
552 dest = ui.expandpath(dest or 'default-push', dest or 'default')
546 dest, branches = parseurl(dest, opts.get('branch'))
553 dest, branches = parseurl(dest, opts.get('branch'))
547 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
554 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
548 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
555 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
549 if revs:
556 if revs:
550 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
557 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
551
558
552 other = peer(repo, opts, dest)
559 other = peer(repo, opts, dest)
553 outgoing = discovery.findcommonoutgoing(repo, other, revs,
560 outgoing = discovery.findcommonoutgoing(repo, other, revs,
554 force=opts.get('force'))
561 force=opts.get('force'))
555 o = outgoing.missing
562 o = outgoing.missing
556 if not o:
563 if not o:
557 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
564 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
558 return None
565 return None
559 return o
566 return o
560
567
561 def outgoing(ui, repo, dest, opts):
568 def outgoing(ui, repo, dest, opts):
562 def recurse():
569 def recurse():
563 ret = 1
570 ret = 1
564 if opts.get('subrepos'):
571 if opts.get('subrepos'):
565 ctx = repo[None]
572 ctx = repo[None]
566 for subpath in sorted(ctx.substate):
573 for subpath in sorted(ctx.substate):
567 sub = ctx.sub(subpath)
574 sub = ctx.sub(subpath)
568 ret = min(ret, sub.outgoing(ui, dest, opts))
575 ret = min(ret, sub.outgoing(ui, dest, opts))
569 return ret
576 return ret
570
577
571 limit = cmdutil.loglimit(opts)
578 limit = cmdutil.loglimit(opts)
572 o = _outgoing(ui, repo, dest, opts)
579 o = _outgoing(ui, repo, dest, opts)
573 if o is None:
580 if o is None:
574 return recurse()
581 return recurse()
575
582
576 if opts.get('newest_first'):
583 if opts.get('newest_first'):
577 o.reverse()
584 o.reverse()
578 displayer = cmdutil.show_changeset(ui, repo, opts)
585 displayer = cmdutil.show_changeset(ui, repo, opts)
579 count = 0
586 count = 0
580 for n in o:
587 for n in o:
581 if limit is not None and count >= limit:
588 if limit is not None and count >= limit:
582 break
589 break
583 parents = [p for p in repo.changelog.parents(n) if p != nullid]
590 parents = [p for p in repo.changelog.parents(n) if p != nullid]
584 if opts.get('no_merges') and len(parents) == 2:
591 if opts.get('no_merges') and len(parents) == 2:
585 continue
592 continue
586 count += 1
593 count += 1
587 displayer.show(repo[n])
594 displayer.show(repo[n])
588 displayer.close()
595 displayer.close()
589 recurse()
596 recurse()
590 return 0 # exit code is zero since we found outgoing changes
597 return 0 # exit code is zero since we found outgoing changes
591
598
592 def revert(repo, node, choose):
599 def revert(repo, node, choose):
593 """revert changes to revision in node without updating dirstate"""
600 """revert changes to revision in node without updating dirstate"""
594 return mergemod.update(repo, node, False, True, choose)[3] > 0
601 return mergemod.update(repo, node, False, True, choose)[3] > 0
595
602
596 def verify(repo):
603 def verify(repo):
597 """verify the consistency of a repository"""
604 """verify the consistency of a repository"""
598 return verifymod.verify(repo)
605 return verifymod.verify(repo)
599
606
600 def remoteui(src, opts):
607 def remoteui(src, opts):
601 'build a remote ui from ui or repo and opts'
608 'build a remote ui from ui or repo and opts'
602 if util.safehasattr(src, 'baseui'): # looks like a repository
609 if util.safehasattr(src, 'baseui'): # looks like a repository
603 dst = src.baseui.copy() # drop repo-specific config
610 dst = src.baseui.copy() # drop repo-specific config
604 src = src.ui # copy target options from repo
611 src = src.ui # copy target options from repo
605 else: # assume it's a global ui object
612 else: # assume it's a global ui object
606 dst = src.copy() # keep all global options
613 dst = src.copy() # keep all global options
607
614
608 # copy ssh-specific options
615 # copy ssh-specific options
609 for o in 'ssh', 'remotecmd':
616 for o in 'ssh', 'remotecmd':
610 v = opts.get(o) or src.config('ui', o)
617 v = opts.get(o) or src.config('ui', o)
611 if v:
618 if v:
612 dst.setconfig("ui", o, v)
619 dst.setconfig("ui", o, v)
613
620
614 # copy bundle-specific options
621 # copy bundle-specific options
615 r = src.config('bundle', 'mainreporoot')
622 r = src.config('bundle', 'mainreporoot')
616 if r:
623 if r:
617 dst.setconfig('bundle', 'mainreporoot', r)
624 dst.setconfig('bundle', 'mainreporoot', r)
618
625
619 # copy selected local settings to the remote ui
626 # copy selected local settings to the remote ui
620 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
627 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
621 for key, val in src.configitems(sect):
628 for key, val in src.configitems(sect):
622 dst.setconfig(sect, key, val)
629 dst.setconfig(sect, key, val)
623 v = src.config('web', 'cacerts')
630 v = src.config('web', 'cacerts')
624 if v:
631 if v:
625 dst.setconfig('web', 'cacerts', util.expandpath(v))
632 dst.setconfig('web', 'cacerts', util.expandpath(v))
626
633
627 return dst
634 return dst
General Comments 0
You need to be logged in to leave comments. Login now